hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cb1fdaf8493491e512900e0559b4a73b749a5bbd | 802 | py | Python | openstack_dashboard/dashboards/sdscontroller/administration/nodes/models.py | iostackproject/SDS-dashboard | efa3d7968c738bfb10bc19776f24f2937d5802d8 | [
"Apache-2.0"
] | 1 | 2021-01-20T00:14:15.000Z | 2021-01-20T00:14:15.000Z | openstack_dashboard/dashboards/sdscontroller/administration/nodes/models.py | iostackproject/SDS-dashboard | efa3d7968c738bfb10bc19776f24f2937d5802d8 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/sdscontroller/administration/nodes/models.py | iostackproject/SDS-dashboard | efa3d7968c738bfb10bc19776f24f2937d5802d8 | [
"Apache-2.0"
] | null | null | null | import calendar
import time
STATUS_THRESHOLD = 15
class ProxyNode:
"""
ProxyNode class defines a Swift Proxy node. The identifier is the name of the node.
"""
class StorageNode:
"""
StorageNode class defines a Swift storage node. The identifier is the name of the node.
"""
| 29.703704 | 101 | 0.647132 | import calendar
import time
STATUS_THRESHOLD = 15
class ProxyNode:
"""
ProxyNode class defines a Swift Proxy node. The identifier is the name of the node.
"""
def __init__(self, name, ip, last_ping):
self.id = name
self.ip = ip
self.last_ping = last_ping
self.node_status = calendar.timegm(time.gmtime()) - int(float(last_ping)) <= STATUS_THRESHOLD
class StorageNode:
"""
StorageNode class defines a Swift storage node. The identifier is the name of the node.
"""
def __init__(self, name, ip, last_ping, devices):
self.id = name
self.ip = ip
self.last_ping = last_ping
self.node_status = calendar.timegm(time.gmtime()) - int(float(last_ping)) <= STATUS_THRESHOLD
self.devices = devices
| 440 | 0 | 52 |
fbfd8831ec1f387d2a9f1a3631a3447d74d67fbe | 4,467 | py | Python | minotaur/utilities/ContentWriter.py | mlunnay/minotaur | e27b456b57e18ba0e7a4dd0a3b665f14ca79a2d8 | [
"MIT"
] | 1 | 2020-05-04T01:46:58.000Z | 2020-05-04T01:46:58.000Z | minotaur/utilities/ContentWriter.py | mlunnay/minotaur | e27b456b57e18ba0e7a4dd0a3b665f14ca79a2d8 | [
"MIT"
] | null | null | null | minotaur/utilities/ContentWriter.py | mlunnay/minotaur | e27b456b57e18ba0e7a4dd0a3b665f14ca79a2d8 | [
"MIT"
] | null | null | null | import clr
from System import *
from System.IO import *
if __name__ == "__main__":
f = File.Create("test.MEB")
writer = ContentWriter(f)
writer.WriteInt(42)
writer.Flush()
f.Close()
| 33.335821 | 111 | 0.623461 | import clr
from System import *
from System.IO import *
class ContentWriter(object):
def __init__(self, outputStream, compressOutput = False, identifierString = "MEB"):
self.identifierString = identifierString
self.compressContent = compressOutput
self.finalOutput = BinaryWriter(outputStream)
self.headerContent = MemoryStream()
self.contentData = MemoryStream()
self.outStream = BinaryWriter(self.contentData)
self.typeMap = {}
self.sharedResourceMap = {}
self.sharedResources = []
self.typeList = []
self.typeSerializerMap = {}
self.version = 1
def WriteBool(self, value):
self.outStream.Write(clr.Convert(value, Boolean))
def WriteInt16(self, value):
self.outStream.Write(clr.Convert(value, Int16))
def WriteInt(self, value):
self.outStream.Write(clr.Convert(value, Int32))
def WriteLong(self, value):
self.outStream.Write(clr.Convert(value, Int64))
def WriteUInt16(self, value):
self.outStream.Write(clr.Convert(value, UInt16))
def WriteUInt32(self, value):
self.outStream.Write(clr.Convert(value, UInt32))
def WriteUint64(self, value):
self.outStream.Write(clr.Convert(value, UInt64))
def WriteSingle(self, value):
self.outStream.Write(clr.Convert(value, Single))
def WriteDouble(self, value):
self.outStream.Write(clr.Convert(value, Double))
def WriteDecimal(self, value):
self.outStream.Write(clr.Convert(value, Decimal))
def WriteByte(self, value):
self.outStream.Write(clr.Convert(value, Byte))
def WriteSByte(self, value):
self.outStream.Write(clr.Convert(value, SByte))
def WriteByteArray(self, value):
self.outStream.Write(value)
def WriteByteArrayPartial(self, value, offset, count):
self.outStream.Write(value, offset, count)
def WriteChar(self, value):
self.outStream.Write(clr.Convert(value, Char))
def WriteString(self, value):
self.WriteInt(len(value))
self.outStream.Write(clr.Convert(value, String).ToCharArray(), 0, len(value))
def WriteGuid(self, value):
self.outStream.Write(value.ToByteArray())
def WriteObject(self, value):
# TODO: implement writing objects
pass
def Flush(self):
self.WriteSharedResources()
self.WriteHeader()
self.WriteOutput()
def WriteSharedResources(self):
for i in xrange(len(self.sharedResources)):
value = self.sharedResources[i]
self.WriteObject(value)
def WriteHeader(self):
writer = BinaryWriter(self.headerContent)
writer.Write(clr.Convert(len(self.typeList), Int32))
for serializer in self.typeList:
writer.Write(serializer.id.ToByteArray())
writer.Write(clr.Convert(len(self.sharedResources), Int32))
def WriteOutput(self):
for c in self.identifierString:
self.finalOutput.Write(clr.Convert(c, Char))
self.finalOutput.Write(clr.Convert(self.version, Byte))
flags = 0
if self.compressContent:
flags |= 0x1
self.finalOutput.Write(clr.Convert(flags, Byte))
filesize = self.finalOutput.BaseStream.Length + self.headerContent.Length + self.contentData.Length + 8
self.finalOutput.Write(clr.Convert(filesize, Int64))
if self.compressContent:
self.WriteCompressedContent()
else:
self.WriteUncompressedContent()
def WriteCompressedContent(self):
raise NotImplementedError()
def WriteUncompressedContent(self):
self.headerContent.Seek(0, SeekOrigin.Begin)
self.contentData.Seek(0, SeekOrigin.Begin)
self.Pump(self.headerContent, self.finalOutput)
self.Pump(self.contentData, self.finalOutput)
def Pump(self, input, output):
bytes = Array.CreateInstance(Byte, 4096) # 4Kib at a time
while 1:
n = input.Read(bytes, 0, bytes.Length)
if n == 0:
break
output.Write(bytes, 0, n)
if __name__ == "__main__":
f = File.Create("test.MEB")
writer = ContentWriter(f)
writer.WriteInt(42)
writer.Flush()
f.Close()
| 3,368 | 7 | 876 |
9406667b0ada76b1b8f03bbef3586d583b0e7568 | 45,375 | py | Python | iiotedge.py | hansgschossmann/iot-edge-opc | c4cc051e1e278c627377cb13f34fb7aaa08691b5 | [
"MIT"
] | null | null | null | iiotedge.py | hansgschossmann/iot-edge-opc | c4cc051e1e278c627377cb13f34fb7aaa08691b5 | [
"MIT"
] | null | null | null | iiotedge.py | hansgschossmann/iot-edge-opc | c4cc051e1e278c627377cb13f34fb7aaa08691b5 | [
"MIT"
] | null | null | null | import sys
_python3 = False
if (sys.version_info > (3, 0)):
_python3 = True
import os
import platform
import json
import subprocess
import shlex
import argparse
import time
import shutil
import socket
import yaml
import logging
from azure.mgmt.resource import ResourceManagementClient
from azure.common.client_factory import get_client_from_cli_profile
import stat
import requests
PLATFORM_CPU = 'amd64'
OPCPUBLISHER_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-publisher'
# to test new features in publisher use a local registry
#OPCPUBLISHER_CONTAINER_IMAGE = 'localhost:5000/opc-publisher'
OPCPUBLISHER_CONTAINER_VERSION = ''
OPCPROXY_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-proxy'
OPCPROXY_CONTAINER_VERSION = '1.0.4'
OPCTWIN_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-twin'
OPCTWIN_CONTAINER_VERSION = ''
OPCPLC_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-plc'
OPCPLC_CONTAINER_VERSION = ''
# set module globals
_targetPlatform = ''
_startScript = []
_stopScript = []
_initScript = []
_deinitScript = []
_iotHubOwnerConnectionString = ''
_hostDirHost = ''
_opcPublisherContainer = OPCPUBLISHER_CONTAINER_IMAGE
_opcProxyContainer = OPCPROXY_CONTAINER_IMAGE
_opcTwinContainer = OPCTWIN_CONTAINER_IMAGE
_opcPlcContainer = OPCPLC_CONTAINER_IMAGE
_platformCpu = PLATFORM_CPU
_edgeSite = ''
_dockerBindSource = ''
_outdirConfig = ''
# command line parsing
parser = argparse.ArgumentParser(description="Installs an Industrial IoT gateway based on IoT Edge")
# site to handle
siteParser = argparse.ArgumentParser(add_help=False)
siteParser.add_argument('site', metavar='SITE', default=None,
help="The site (factory/production line) of the installation. This is not a DNS domain, but a topology site used to address hosts with identical IP addresses from the cloud or build reduntant systems.")
# publisher configuration files
publisherConfigParser = argparse.ArgumentParser(add_help=False)
publisherConfigParser.add_argument('--nodesconfig', default=None,
help="The configuration file specifying the OPC UA nodes to publish. Requires the hostdir parameter to be set to a directory.")
publisherConfigParser.add_argument('--telemetryconfig', default=None,
help="The configuration file specifying the format of the telemetry to be ingested by OPC Publisher. Requires the hostdir parameter to be set to a directory.")
# iothub name
iothubArgsParser = argparse.ArgumentParser(add_help=False)
iothubArgsParser.add_argument('--iothubname', default=None, required=True,
help="Name of the IoTHub to use.")
# optional arguments valid for all sub commands
commonOptArgsParser = argparse.ArgumentParser(add_help=False)
commonOptArgsParser.add_argument('--dockerregistry', default=None,
help="The container registry for all used containers.")
commonOptArgsParser.add_argument('--hostdir', default=None,
help="A directory on the host machine, which containers use for log, config and certificate files. Use the syntax of your targetplatform to specify (for WSL use Windows syntax) If not specified everything is kept in Docker volumes.")
commonOptArgsParser.add_argument('--outdir', default='./out',
help="The directory where all generated files are created.")
commonOptArgsParser.add_argument('--targetplatform', choices=['windows', 'linux', 'wsl'], default=None,
help="The scripts created should target a different platform than you are working on. Default: the platform you are working on")
commonOptArgsParser.add_argument('--lcow', action='store_true',
help="Forces to use Linux Containers On Windows. Only valid for a Windows target platform.")
commonOptArgsParser.add_argument('--force', action='store_true',
help="Forces deletion of existing IoT Edge deployment and device if they exist.")
commonOptArgsParser.add_argument('--proxyschema', default="http",
help="Schema for the proxy.")
commonOptArgsParser.add_argument('--proxyhost', default=None,
help="Hostname of the proxy to enable IoT Edge communication via proxy.")
commonOptArgsParser.add_argument('--proxyport', default=None,
help="Port tu use for the proxy.")
commonOptArgsParser.add_argument('--proxyusername', default=None,
help="Username to use for proxy authentication.")
commonOptArgsParser.add_argument('--proxypassword', default=None,
help="Password to use for proxy authentication.")
commonOptArgsParser.add_argument('--upstreamprotocol', choices=['Amqp', 'AmpqWs'], default='Amqp',
help="the upstream protocol IoT Edge should use for communication via proxy.")
commonOptArgsParser.add_argument('--archivepath', default=None,
help="the path to an IoT Edge archive to use.")
commonOptArgsParser.add_argument('--siteconfig', default="simple-site.yml",
help="the configuration of the site as docker-compose YAML file.")
commonOptArgsParser.add_argument('-s', '--serviceprincipalcert',
help=".pem containing a service principal cert to login to Azure.")
commonOptArgsParser.add_argument('-t', '--tenantid',
help="TenantId of the Azure tenant to login.")
commonOptArgsParser.add_argument('-a', '--appid',
help="AppId of the Azure service principal to login.")
commonOptArgsParser.add_argument('--loglevel', default='info',
help="The log level. Allowed: debug, info, warning, error, critical")
# add sub commands
subParsers = parser.add_subparsers(dest='subcommand')
subParsers.required = True
gwParser = subParsers.add_parser('gw', parents=[siteParser, commonOptArgsParser, iothubArgsParser, publisherConfigParser], help='Generates scripts for an Azure Industrial IoT gateway deployment.')
_args = parser.parse_args()
#
# configure IoT Edge site
#
###############################################################################
#
# Main script
#
###############################################################################
# configure script logging
try:
logLevel = getattr(logging, _args.loglevel.upper())
except:
logLevel = logging.INFO
if not isinstance(logLevel, int):
raise( ValueError('Invalid log level: {0}'.format(logLevel)))
logging.basicConfig(level=logLevel)
# get path of script
_scriptDir = sys.path[0]
# CPU specific settings
if 'intel64' in str(platform.processor()).lower():
_platformCpu = 'amd64'
else:
_platformCpu = 'arm32v7'
#
# OS specific settings
#
if not _args.targetplatform:
_targetPlatform = str(platform.system()).lower()
if _targetPlatform == 'linux':
# check if we are on WSL
for line in open('/proc/version'):
if 'Microsoft' in line:
_targetPlatform = 'wsl'
elif _targetPlatform == 'windows':
pass
else:
logging.critical("OS is not supported. Exiting...")
sys.exit(1)
else:
_targetPlatform = _args.targetplatform
logging.info("Using targetplatform '{0}'".format(_targetPlatform))
if _targetPlatform == 'linux' or _targetPlatform == 'wsl':
_startScriptFileName = 'start-iiotedge.sh'
_startScriptCmdPrefix = ''
_startScriptCmdPostfix = ' &'
_stopScriptFileName = 'stop-iiotedge.sh'
_stopScriptCmdPrefix = ''
_stopScriptCmdPostfix = ''
_initScriptFileName = 'init-iiotedge.sh'
_initScriptCmdPrefix = ''
_initScriptCmdPostfix = ' &'
_deinitScriptFileName = 'deinit-iiotedge.sh'
_deinitScriptCmdPrefix = ''
_deinitScriptCmdPostfix = ' &'
_targetNewline = '\n'
elif _targetPlatform == 'windows':
_startScriptFileName = 'Start-IIoTEdge.ps1'
_startScriptCmdPrefix = 'start '
_startScriptCmdPostfix = ''
_stopScriptFileName = 'Stop-IIoTEdge.ps1'
_stopScriptCmdPrefix = ''
_stopScriptCmdPostfix = ''
_initScriptFileName = 'Init-IIoTEdge.ps1'
_initScriptCmdPrefix = ''
_initScriptCmdPostfix = ''
_deinitScriptFileName = 'Deinit-IIoTEdge.ps1'
_deinitScriptCmdPrefix = ''
_deinitScriptCmdPostfix = ''
_targetNewline = '\r\n'
#
# validate common arguments
#
if _args.lcow:
if _targetPlatform == 'windows':
_containerOs = 'linux'
else:
logging.fatal("-lcow is only allowed for a Winodws target")
sys.exit(1)
else:
_containerOs = _targetPlatform if _targetPlatform != 'wsl' else 'linux'
if _args.outdir is not None:
_args.outdir = _args.outdir.strip()
if not os.path.exists(_args.outdir):
os.mkdir(_args.outdir)
elif not os.path.isdir(_args.outdir):
logging.critical("Given outdir '{0} is not a directory. Please check. Exiting...".format(_args.outdir))
sys.exit(2)
logging.info("Create all generated files in directory '{0}'.".format(_args.outdir))
if _args.hostdir is not None:
# the --hostdir parameter specifies where on the docker host the configuration files should be stored.
# during docker configuration a volume bind is configured, which points to this directory.
# in case of a cross platform generation, the files are put into a config subdirectory of the specified --outdir
# and need to be transfered manually to the IoT Edge device.
_dockerBindSource = _args.hostdir = _args.hostdir.strip().replace('\\', '/')
# The Docker for Windows volume bind syntax has changed over time.
# With docker ce 18.03.0-ce-win59 (16762), engine 18.03.0-ce the bind syntax for D:/docker needs to be //d/docker
if _targetPlatform in [ 'windows', 'wsl']:
# we accept only fully qualified windows syntax (starts with <drive>:)
if _args.hostdir[1:3] != ':/':
logging.fatal("The --hostdir parameter must be using a fully qualified Windows directory syntax.")
sys.exit(1)
elif _targetPlatform == 'linux':
if _args.hostdir[0:1] != '/':
logging.fatal("The --hostdir parameter must be using a fully qualified Linux directory syntax.")
sys.exit(1)
else:
logging.fatal("Target platform '{0}' is not supported.".format(_targetPlatform))
sys.exit(1)
if _args.targetplatform:
# create a directory for the configuration files, if not running on the IoT Edge device
_outdirConfig = _args.outdir + '/config'
if not os.path.exists(_outdirConfig):
os.mkdir(_outdirConfig)
logging.info("Create directory '{0}' for target system configuration files.".format(_outdirConfig))
elif not os.path.isdir(_outdirConfig):
logging.critical("'{0}' is expected to be a directory to provide configuration files, but it is not. Pls check. Exiting...".format(_outdirConfig))
sys.exit(2)
logging.info("Create all generated configuration files in directory '{0}'.".format(_outdirConfig))
logging.info("Passing '{0}' to docker as source in bind, maps to '{1}'.".format(_dockerBindSource, _args.hostdir))
_hostDirHost = _args.hostdir
else:
logging.info("--targetplatform was not specified. Assume we run on the IoT Edge device.")
if _targetPlatform in [ 'windows', 'linux' ]:
_hostDirHost = _args.hostdir
if _targetPlatform == 'wsl':
_hostDirHost = '/mnt/' + _args.hostdir[0:1] + '/' + _args.hostdir[3:]
if not os.path.exists(_hostDirHost):
logging.info("Directory '{0}' specified via --hostdir does not exist. Creating it...".format(_args.hostdir))
os.mkdir(_hostDirHost)
logging.info("Passing '{0}' to docker as source in bind, maps to '{1}'.".format(_dockerBindSource, _hostDirHost))
else:
# use a docker volume
# todo verify correct handling with sites
_dockerBindSource = 'cfappdata'
logging.info("Passing '{0}' (docker volume) to docker as source in bind.".format(_dockerBindSource))
if _args.dockerregistry is None:
_args.dockerregistry = 'microsoft'
else:
_args.dockerregistry = _args.dockerregistry.strip().lower()
logging.info("Docker container registry to use: '{0}'".format(_args.dockerregistry))
#
# build container names
#
_opcProxyContainer = OPCPROXY_CONTAINER_IMAGE if '/' in OPCPROXY_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPROXY_CONTAINER_IMAGE)
_opcProxyContainer = '{0}:'.format(_opcProxyContainer) if not OPCPROXY_CONTAINER_VERSION else '{0}:{1}-'.format(_opcProxyContainer, OPCPROXY_CONTAINER_VERSION)
_opcProxyContainer = '{0}{1}'.format(_opcProxyContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcProxyContainer, 'linux')
_opcProxyContainer = '{0}-{1}'.format(_opcProxyContainer, 'amd64') if _platformCpu == 'amd64' else '{0}-{1}'.format(_opcProxyContainer, 'arm32v7')
_opcTwinContainer = OPCTWIN_CONTAINER_IMAGE if '/' in OPCTWIN_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCTWIN_CONTAINER_IMAGE)
_opcTwinContainer = '{0}:'.format(_opcTwinContainer) if not OPCTWIN_CONTAINER_VERSION else '{0}:{1}-'.format(_opcTwinContainer, OPCTWIN_CONTAINER_VERSION)
_opcTwinContainer = '{0}{1}'.format(_opcTwinContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcTwinContainer, 'linux')
_opcTwinContainer = '{0}-{1}'.format(_opcTwinContainer, 'amd64') if _platformCpu == 'amd64' else '{0}{1}'.format(_opcTwinContainer, 'arm32v7')
_opcPublisherContainer = OPCPUBLISHER_CONTAINER_IMAGE if '/' in OPCPUBLISHER_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPUBLISHER_CONTAINER_IMAGE)
_opcPublisherContainer = '{0}:'.format(_opcPublisherContainer) if not OPCPUBLISHER_CONTAINER_VERSION else '{0}:{1}-'.format(_opcPublisherContainer, OPCPUBLISHER_CONTAINER_VERSION)
_opcPublisherContainer = '{0}{1}'.format(_opcPublisherContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcPublisherContainer, 'linux')
_opcPublisherContainer = '{0}-{1}'.format(_opcPublisherContainer, 'amd64') if _platformCpu == 'amd64' else '{0}-{1}'.format(_opcPublisherContainer, 'arm32v7')
_opcPlcContainer = OPCPLC_CONTAINER_IMAGE if '/' in OPCPLC_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPLC_CONTAINER_IMAGE)
_opcPlcContainer = '{0}:'.format(_opcPlcContainer) if not OPCPLC_CONTAINER_VERSION else '{0}:{1}-'.format(_opcPlcContainer, OPCPLC_CONTAINER_VERSION)
_opcPlcContainer = '{0}{1}'.format(_opcPlcContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcPlcContainer, 'linux')
_opcPlcContainer = '{0}-{1}'.format(_opcPlcContainer, 'amd64') if _platformCpu == 'amd64' else '{0}{1}'.format(_opcPlcContainer, 'arm32v7')
logging.info("Using OpcPublisher container: '{0}'".format(_opcPublisherContainer))
logging.info("Using OpcProxy container: '{0}'".format(_opcProxyContainer))
logging.info("Using OpcTwin container: '{0}'".format(_opcTwinContainer))
logging.info("Using OpcPlc container: '{0}'".format(_opcPlcContainer))
#
# azure authentication
#
if _args.serviceprincipalcert is not None:
_args.serviceprincipalcert = _args.serviceprincipalcert.strip()
if _targetPlatform == 'windows' and not _args.serviceprincipalcert[1:2] == ':' or _targetPlatform == 'linux' and not _args.serviceprincipalcert.startswith('/'):
_args.serviceprincipalcert = '{0}/{1}'.format(os.getcwd(), _args.serviceprincipalcert)
logging.info("Setup using service principal cert in file '{0}'".format(_args.serviceprincipalcert))
if _args.tenantid is not None:
_args.tenantid = _args.tenantid.strip()
logging.info("Setup using tenant id '{0}' to login".format(_args.tenantid))
if _args.appid is not None:
_args.appid = _args.appid.strip()
logging.info("Setup using AppId '{0}' to login".format(_args.appid))
if ((_args.serviceprincipalcert is not None or _args.tenantid is not None or _args.appid is not None) and
(_args.serviceprincipalcert is None or _args.tenantid is None or _args.appid is None)):
logging.critical("serviceprincipalcert, tennantid and appid must all be specified. Exiting...")
sys.exit(2)
_args.subcommand = _args.subcommand.lower()
#
# validate all required parameters for gw subcommand
#
if _args.subcommand == 'gw':
# validate the nodesconfig file
if _args.nodesconfig:
# check if file exists
if not os.path.exists(_args.nodesconfig) or not os.path.isfile(_args.nodesconfig):
logging.critical("The nodesconfig file '{0}' can not be found or is not a file. Exiting...".format(_args.nodesconfig))
sys.exit(2)
# to access it we need access to host file system and need a hostdir parameter
if not _args.hostdir:
logging.critical("If --nodesconfig is specified you need to specify a host directory for --hostdir as well. Exiting...")
sys.exit(2)
try:
if _args.telemetryconfig:
# check if file exists
if not os.path.exists(_args.telemetryconfig) or not os.path.isfile(_args.telemetryconfig):
logging.critical("The telemetryconfig file '{0}' can not be found or is not a file. Exiting...".format(_args.telemetryconfig))
sys.exit(2)
# to access it we need access to host file system and need a hostdir parameter
if not _args.hostdir:
logging.critical("If --telemetryconfig requires --hostdir as well. Exiting...")
sys.exit(2)
except AttributeError:
pass
_args.site = _args.site.lower()
_edgeSite = _args.site
# IoT Edge archive
if _args.archivepath is not None:
_args.archivepath = _args.archivepath.strip()
if not os.path.exists(_args.archivepath):
logging.critical("The given archive '{0} does not exist. Please check. Exiting...".format(_args.archivepath))
sys.exit(2)
# site configuration
if _args.siteconfig is not None:
_args.siteconfig = _args.siteconfig.strip()
if not os.path.exists(_args.siteconfig):
logging.critical("The given site config file '{0} does not exist. Please check. Exiting...".format(_args.siteconfig))
sys.exit(2)
# build the list of hostname/IP address mapping to allow the containers to access the local and external hosts, in case there is no DNS (espacially on Windows)
# add localhost info if we run on the targetplatform
_additionalHosts = []
if not _args.targetplatform:
ipAddress = getLocalIpAddress()
if ipAddress is None:
logging.critical("There is not network connection available.")
sys.exit(1)
hostName = socket.gethostname()
fqdnHostName = socket.getfqdn()
_additionalHosts.append({ "host": hostName, "ip": ipAddress })
if hostName.lower() != fqdnHostName.lower():
_additionalHosts.append({ "host": fqdnHostName, "ip": ipAddress })
else:
print("FQDN '{0}' is equal to hostname '{1}'".format(fqdnHostName, hostName))
_additionalHosts.extend(getExtraHosts()[:])
_extraHosts = []
if len(_additionalHosts) > 0:
_extraHosts.extend('- "{0}:{1}"\n'.format(host['host'], host['ip']) for host in _additionalHosts[0:1])
if len(_additionalHosts) > 2:
_extraHosts.extend(' - "{0}:{1}"\n'.format(host['host'], host['ip']) for host in _additionalHosts[1:-1])
if len(_additionalHosts) >= 2:
_extraHosts.extend(' - "{0}:{1}"'.format(host['host'], host['ip']) for host in _additionalHosts[-1:])
#
# gw operation: create all scripts to (de)init and start/stop the site specified on the command line
# - copy the configuration files
# - create an IoT Edge device and deployment for the site and all OPC components are configured to run as IoT Edge modules
#
if _args.subcommand == 'gw':
# login to Azure and fetch IoTHub connection string
azureLogin()
azureGetIotHubCs()
# copy configuration files to the right directory if we are running on the target, otherwise copy it to the config file directory
if _args.targetplatform:
if _args.nodesconfig:
nodesconfigFileName = 'pn-' + _args.site + '.json'
shutil.copyfile(_args.nodesconfig, '{0}/{1}'.format(_outdirConfig, nodesconfigFileName))
try:
if _args.telemetryconfig:
telemetryconfigFileName = 'tc-' + _args.site + '.json'
shutil.copyfile(_args.telemetryconfig, '{0}/{1}'.format(_outdirConfig, telemetryconfigFileName))
except AttributeError:
pass
else:
if _args.nodesconfig:
nodesconfigFileName = 'pn-' + _args.site + '.json'
shutil.copyfile(_args.nodesconfig, '{0}/{1}'.format(_hostDirHost, nodesconfigFileName))
if _args.telemetryconfig:
telemetryconfigFileName = 'tc-' + _args.site + '.json'
shutil.copyfile(_args.telemetryconfig, '{0}/{1}'.format(_hostDirHost, telemetryconfigFileName))
# create site/factory scripts
logging.info("Create the site initialization and configuration for '{0}'".format(_args.site))
createEdgeSiteConfiguration(_args.site)
# optional: sleep to debug initialization script issues
# _initScript.append('timeout 60\n')
# write the scripts
writeScript(_startScriptFileName, _startScript)
writeScript(_stopScriptFileName, _stopScript, reverse = True)
writeScript(_initScriptFileName, _initScript)
writeScript(_deinitScriptFileName, _deinitScript, reverse = True)
# todo patch config.yaml if proxy is used
# copy prerequisites installation scripts
if _args.targetplatform:
if _args.targetplatform in [ 'windows' ]:
shutil.copyfile('{0}/Init-IotEdgeService.ps1'.format(_scriptDir), '{0}/Init-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Deinit-IotEdgeService.ps1'.format(_scriptDir), '{0}/Deinit-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}Prepare-IIotHost.ps1'.format(_scriptDir), '{0}/Prepare-IIotHost.ps1'.format(_args.outdir))
if _args.targetplatform in [ 'linux', 'wsl' ]:
shutil.copyfile('{0}/iiotedge-install-prerequisites.sh'.format(_scriptDir), '{0}/iiotedge-install-prerequisites.sh'.format(_args.outdir))
shutil.copyfile('{0}/iiotedge-install-linux-packages.sh'.format(_scriptDir), '{0}/iiotedge-install-linux-packages.sh'.format(_args.outdir))
shutil.copyfile('{0}/requirements.txt'.format(_scriptDir), '{0}/requirements.txt'.format(_args.outdir))
# inform user when not running on target platform
logging.info('')
logging.info("Please copy any required script files from '{0}' to your target system.".format(_args.outdir))
if _args.hostdir:
logging.info("Please copy any required configuration files from '{0}' to your target system to directory '{1}'.".format(_outdirConfig, _args.hostdir))
elif _targetPlatform == 'windows':
shutil.copyfile('{0}/Init-IotEdgeService.ps1'.format(_scriptDir), '{0}/Init-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Deinit-IotEdgeService.ps1'.format(_scriptDir), '{0}/Deinit-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Prepare-WindowsGatewayStep1.ps1'.format(_scriptDir), '{0}/Prepare-WindowsGatewayStep1.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Prepare-WindowsGatewayStep2.ps1'.format(_scriptDir), '{0}/Prepare-WindowsGatewayStep2.ps1'.format(_args.outdir))
# done
logging.info('')
if _args.targetplatform:
logging.info("The generated script files can be found in: '{0}'. Please copy them to your target system.".format(_args.outdir))
else:
logging.info("The generated script files can be found in: '{0}'".format(_args.outdir))
logging.info('')
logging.info("Operation completed.")
| 53.382353 | 237 | 0.667989 | import sys
_python3 = False
if (sys.version_info > (3, 0)):
_python3 = True
import os
import platform
import json
import subprocess
import shlex
import argparse
import time
import shutil
import socket
import yaml
import logging
from azure.mgmt.resource import ResourceManagementClient
from azure.common.client_factory import get_client_from_cli_profile
import stat
import requests
PLATFORM_CPU = 'amd64'
OPCPUBLISHER_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-publisher'
# to test new features in publisher use a local registry
#OPCPUBLISHER_CONTAINER_IMAGE = 'localhost:5000/opc-publisher'
OPCPUBLISHER_CONTAINER_VERSION = ''
OPCPROXY_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-proxy'
OPCPROXY_CONTAINER_VERSION = '1.0.4'
OPCTWIN_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-twin'
OPCTWIN_CONTAINER_VERSION = ''
OPCPLC_CONTAINER_IMAGE = 'mcr.microsoft.com/iotedge/opc-plc'
OPCPLC_CONTAINER_VERSION = ''
# set module globals
_targetPlatform = ''
_startScript = []
_stopScript = []
_initScript = []
_deinitScript = []
_iotHubOwnerConnectionString = ''
_hostDirHost = ''
_opcPublisherContainer = OPCPUBLISHER_CONTAINER_IMAGE
_opcProxyContainer = OPCPROXY_CONTAINER_IMAGE
_opcTwinContainer = OPCTWIN_CONTAINER_IMAGE
_opcPlcContainer = OPCPLC_CONTAINER_IMAGE
_platformCpu = PLATFORM_CPU
_edgeSite = ''
_dockerBindSource = ''
_outdirConfig = ''
# command line parsing
parser = argparse.ArgumentParser(description="Installs an Industrial IoT gateway based on IoT Edge")
# site to handle
siteParser = argparse.ArgumentParser(add_help=False)
siteParser.add_argument('site', metavar='SITE', default=None,
help="The site (factory/production line) of the installation. This is not a DNS domain, but a topology site used to address hosts with identical IP addresses from the cloud or build reduntant systems.")
# publisher configuration files
publisherConfigParser = argparse.ArgumentParser(add_help=False)
publisherConfigParser.add_argument('--nodesconfig', default=None,
help="The configuration file specifying the OPC UA nodes to publish. Requires the hostdir parameter to be set to a directory.")
publisherConfigParser.add_argument('--telemetryconfig', default=None,
help="The configuration file specifying the format of the telemetry to be ingested by OPC Publisher. Requires the hostdir parameter to be set to a directory.")
# iothub name
iothubArgsParser = argparse.ArgumentParser(add_help=False)
iothubArgsParser.add_argument('--iothubname', default=None, required=True,
help="Name of the IoTHub to use.")
# optional arguments valid for all sub commands
commonOptArgsParser = argparse.ArgumentParser(add_help=False)
commonOptArgsParser.add_argument('--dockerregistry', default=None,
help="The container registry for all used containers.")
commonOptArgsParser.add_argument('--hostdir', default=None,
help="A directory on the host machine, which containers use for log, config and certificate files. Use the syntax of your targetplatform to specify (for WSL use Windows syntax) If not specified everything is kept in Docker volumes.")
commonOptArgsParser.add_argument('--outdir', default='./out',
help="The directory where all generated files are created.")
commonOptArgsParser.add_argument('--targetplatform', choices=['windows', 'linux', 'wsl'], default=None,
help="The scripts created should target a different platform than you are working on. Default: the platform you are working on")
commonOptArgsParser.add_argument('--lcow', action='store_true',
help="Forces to use Linux Containers On Windows. Only valid for a Windows target platform.")
commonOptArgsParser.add_argument('--force', action='store_true',
help="Forces deletion of existing IoT Edge deployment and device if they exist.")
commonOptArgsParser.add_argument('--proxyschema', default="http",
help="Schema for the proxy.")
commonOptArgsParser.add_argument('--proxyhost', default=None,
help="Hostname of the proxy to enable IoT Edge communication via proxy.")
commonOptArgsParser.add_argument('--proxyport', default=None,
help="Port tu use for the proxy.")
commonOptArgsParser.add_argument('--proxyusername', default=None,
help="Username to use for proxy authentication.")
commonOptArgsParser.add_argument('--proxypassword', default=None,
help="Password to use for proxy authentication.")
commonOptArgsParser.add_argument('--upstreamprotocol', choices=['Amqp', 'AmpqWs'], default='Amqp',
help="the upstream protocol IoT Edge should use for communication via proxy.")
commonOptArgsParser.add_argument('--archivepath', default=None,
help="the path to an IoT Edge archive to use.")
commonOptArgsParser.add_argument('--siteconfig', default="simple-site.yml",
help="the configuration of the site as docker-compose YAML file.")
commonOptArgsParser.add_argument('-s', '--serviceprincipalcert',
help=".pem containing a service principal cert to login to Azure.")
commonOptArgsParser.add_argument('-t', '--tenantid',
help="TenantId of the Azure tenant to login.")
commonOptArgsParser.add_argument('-a', '--appid',
help="AppId of the Azure service principal to login.")
commonOptArgsParser.add_argument('--loglevel', default='info',
help="The log level. Allowed: debug, info, warning, error, critical")
# add sub commands
subParsers = parser.add_subparsers(dest='subcommand')
subParsers.required = True
gwParser = subParsers.add_parser('gw', parents=[siteParser, commonOptArgsParser, iothubArgsParser, publisherConfigParser], help='Generates scripts for an Azure Industrial IoT gateway deployment.')
_args = parser.parse_args()
#
# configure IoT Edge site
#
def createEdgeSiteConfiguration(siteName):
#
# create all IoT Edge azure configuration resoures and settings for the site
#
# check if the deployment already exists
deploymentName = 'iiot-deployment-{0}'.format(siteName)
logging.info("Check if deployment with id '{0}' exists".format(deploymentName))
cmd = "az iot edge deployment list --hub-name {0} --query \"[?id=='{1}']\"".format(_args.iothubname, deploymentName)
deploymentListResult = os.popen(cmd).read()
deploymentListJson = json.loads(deploymentListResult)
#
# create an IoTHub IoT Edge deployment if it is not there
#
createDeployment = False
if not deploymentListResult or len(deploymentListJson) == 0:
createDeployment = True
else:
if _args.force:
# delete deployment and trigger creation
logging.info("Deployment '{0}' found. Deleting it...".format(deploymentName))
cmd = "az iot edge deployment delete --hub-name {0} --config-id {1}".format(_args.iothubname, deploymentName)
os.popen(cmd).read()
createDeployment = True
else:
logging.info("Deployment '{0}' found. Using it...".format(deploymentName))
logging.debug(json.dumps(deploymentListJson, indent=4))
#
# Read our module configuration from a .yml to push it into a deployment manifest in the next step
#
if createDeployment:
logging.info("Creating deployment '{0}'".format(deploymentName))
twinService = False
# patch the template to create a docker compose configuration
ymlFileName = '{0}.yml'.format(siteName)
ymlOutFileName = '{0}/{1}'.format(_args.outdir, ymlFileName)
telemetryConfigOption = ''
try:
if _args.telemetryconfig:
telemetryConfigOption = '--tc /d/tc-{0}.json'.format(siteName)
except AttributeError:
pass
with open('{0}/{1}'.format(_scriptDir, _args.siteconfig), 'r') as setupTemplate, open(ymlOutFileName, 'w+', newline=_targetNewline) as setupOutFile:
for line in setupTemplate:
line = line.replace('${OPCPUBLISHER_CONTAINER}', _opcPublisherContainer)
line = line.replace('${OPCPROXY_CONTAINER}', _opcProxyContainer)
line = line.replace('${OPCTWIN_CONTAINER}', _opcTwinContainer)
line = line.replace('${OPCPLC_CONTAINER}', _opcPlcContainer)
line = line.replace('${TELEMETRYCONFIG_OPTION}', telemetryConfigOption)
line = line.replace('${IOTHUB_CONNECTIONSTRING}', _iotHubOwnerConnectionString)
line = line.replace('${OPCTWIN_DEVICECONNECTIONSTRING_OPTION}', '')
line = line.replace('${SITE}', siteName)
line = line.replace('${BINDSOURCE}', _dockerBindSource)
line = line.replace('${EXTRAHOSTS}', "".join(_extraHosts))
setupOutFile.write(line)
with open(ymlOutFileName, 'r') as templateStream:
yamlTemplate = yaml.load(templateStream)
modulesConfig = {}
for service in yamlTemplate['services']:
serviceConfig = yamlTemplate['services'][service]
moduleConfig = {}
moduleConfig['version'] = '1.0'
moduleConfig['type'] = 'docker'
moduleConfig['status'] = 'running'
moduleConfig['restartPolicy'] = serviceConfig['restart']
settings = {}
settings['image'] = serviceConfig['image']
createOptions = {}
if 'hostname' in serviceConfig:
createOptions['Hostname'] = serviceConfig['hostname']
if 'environment' in serviceConfig:
env = []
for envVar in serviceConfig['environment']:
env.append('"{0}"'.format(envVar))
createOptions['Env'] = env
if 'command' in serviceConfig and serviceConfig['command'] is not None:
cmdList = []
cmdArgs = filter(lambda arg: arg.strip() != '', serviceConfig['command'].split(" "))
cmdList.extend(cmdArgs)
createOptions['Cmd'] = cmdList
hostConfig = {}
if 'expose' in serviceConfig:
exposedPorts = {}
for port in serviceConfig['expose']:
exposedPort = str(port) + "/tcp"
exposedPorts[exposedPort] = {}
createOptions['ExposedPorts'] = exposedPorts
if 'ports' in serviceConfig:
portBindings = {}
for port in serviceConfig['ports']:
hostPorts = []
if '-' in port or '/' in port:
logging.fatal("For ports in the .yml configuration only the single port short syntax without protocol (tcp is used) is supported (HOSTPORT:CONTAINERPORT)")
sys.exit(1)
if ':' in port:
delim = port.find(':')
hostPort = port[:delim]
containerPort = port[delim+1:] + '/tcp'
else:
hostPort = port
containerPort = port + '/tcp'
hostPorts.append( { "HostPort": str(hostPort) } )
portBindings[containerPort] = hostPorts
hostConfig['PortBindings'] = portBindings
if 'volumes' in serviceConfig:
binds = []
for bind in serviceConfig['volumes']:
if bind[0:1] != '/' and bind[1:2] != ':':
bind = '{0}_{1}'.format(siteName, bind)
binds.append(bind)
hostConfig['Binds'] = binds
if 'extra_hosts' in serviceConfig and serviceConfig['extra_hosts']:
extraHosts = []
for extraHost in serviceConfig['extra_hosts']:
extraHosts.append(extraHost)
hostConfig['ExtraHosts'] = extraHosts
if len(hostConfig) != 0:
createOptions['HostConfig'] = hostConfig
settings['createOptions'] = json.dumps(createOptions)
moduleConfig['settings'] = settings
# map the service name to a site specific service name
if service.lower() == 'publisher':
service = 'pub-{0}'.format(siteName)
elif service.lower() == 'proxy':
service = 'prx-{0}'.format(siteName)
elif service.lower() == 'plc':
service = 'plc-{0}'.format(siteName)
elif service.lower() == 'twin':
service = 'twin-{0}'.format(siteName)
twinService = True
modulesConfig[service] = moduleConfig
#
# todo fetch the deployment content template from a new created deployment, so we can get rid of iiot-edge-deployment-content-template.json
#
#
# create IoTHub IoT Edge deployment manifest
#
with open('iiot-edge-deployment-content-template.json', 'r') as deploymentContentTemplateFile, open('{0}/{1}.json'.format(_args.outdir, deploymentName), 'w', newline=_targetNewline) as deploymentContentFile:
deploymentContent = json.loads(deploymentContentTemplateFile.read())
# add proxy configuration
if _args.proxyhost:
ProxyUrl = _args.proxyschema + "://"
if _args.proxyusername and _args.proxypassword:
ProxyUrl = ProxyUrl + _args.proxyusername + ":" + _args.proxypassword
ProxyUrl = ProxyUrl + "@" + _args.proxyhost
if _args.proxyport:
ProxyUrl = ProxyUrl + ":" + _args.proxyport
# configure EdgeHub to use proxy
if not 'env' in deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeHub']['settings']:
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeHub']['settings']['env'] = {}
if not 'https_proxy' in deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeHub']['settings']['env']:
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeHub']['settings']['env']['https_proxy'] = {}
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeHub']['settings']['env']['https_proxy'] = { 'value': ProxyUrl }
# configure EdgeAgent to use proxy
if not 'env' in deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']:
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env'] = {}
if not 'https_proxy' in deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']:
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']['https_proxy'] = {}
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']['https_proxy'] = { 'value': ProxyUrl }
# configure EdgeHub for requested upstream protocol
if _args.upstreamprotocol != 'Amqp':
if not 'UpstreamProtocol' in deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']:
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']['UpstreamProtocol'] = {}
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['systemModules']['edgeAgent']['settings']['env']['UpstreamProtocol'] = { 'value': _args.upstreamprotocol }
# configure IIoT Edge modules config
deploymentContent['content']['modulesContent']['$edgeAgent']['properties.desired']['modules'] = modulesConfig
# set default properties for twin
if twinService:
deploymentContent['content']['modulesContent']['twin-{0}'.format(siteName)] = { 'properties.desired': {} }
# todo read more complex discovery settings
deploymentContent['content']['modulesContent']['twin-{0}'.format(siteName)]['properties.desired'] = { 'Discovery': "Off" }
# todo add scanner configuration from file
json.dump(deploymentContent, deploymentContentFile, indent=4)
# todo enable when bool is supported for target condition
#cmd = 'az iot edge deployment create --config-id {0} --hub-name {1} --content {2}/{0}.json --target-condition "tags.iiot=true and tags.site=\'{3}\'"'.format(deploymentName, _args.iothubname, _args.outdir, siteName)
cmd = "az iot edge deployment create --config-id {0} --hub-name {1} --content {2}/{0}.json --target-condition \"tags.iiot=\'true\' and tags.site=\'{3}\'\"".format(deploymentName, _args.iothubname, _args.outdir, siteName)
deploymentCreateResult = os.popen(cmd).read()
if not deploymentCreateResult:
logging.critical("Can not create deployment. Exiting...")
sys.exit(1)
logging.debug(json.dumps(json.loads(deploymentCreateResult), indent=4))
#
# create an IoTHub device identity for the edge device and set tags
#
deviceId = 'iiot-edge-{0}'.format(siteName)
logging.info("Check if device '{0}' already exists".format(deviceId))
cmd = "az iot hub device-identity show --hub-name {0} --device-id {1}".format(_args.iothubname, deviceId)
deviceShowResult = os.popen(cmd).read()
createDevice = False
if not deviceShowResult:
createDevice = True
else:
if _args.force:
# delete device and trigger creation
logging.info("Device '{0}' found. Deleting it...".format(deviceId))
cmd = "az iot hub device-identity delete --hub-name {0} --device-id {1}".format(_args.iothubname, deviceId)
os.popen(cmd).read()
createDevice = True
else:
logging.info("Device '{0}' found. Using it...".format(deviceId))
logging.debug(json.dumps(json.loads(deviceShowResult), indent=4))
if createDevice:
logging.info("Creating device '{0}'".format(deviceId))
cmd = "az iot hub device-identity create --hub-name {0} --device-id {1} --edge-enabled".format(_args.iothubname, deviceId)
deviceCreateResult = os.popen(cmd).read()
if not deviceCreateResult:
logging.critical("Can not create device. Exiting...")
sys.exit(1)
logging.debug(json.dumps(json.loads(deviceCreateResult), indent=4))
logging.info("Setting tags for device '{0}'".format(deviceId))
# todo enable when bool is supported for target condition
# tags = {"iiot": True, "site": sitename }
tags = {"iiot": "true", "site": siteName }
tagsJson = json.dumps(tags)
# todo need to fix escape and strings for Linux
tagsJsonOs = tagsJson.replace('\"', '\\"').replace(' ', '')
cmd = "az iot hub device-twin update --hub-name {0} --device-id {1} --set tags={2}".format(_args.iothubname, deviceId, tagsJsonOs)
updateTagsResult = os.popen(cmd).read()
if not updateTagsResult:
logging.critical("Can not set tags for device. Exiting...")
sys.exit(1)
logging.debug(json.dumps(json.loads(updateTagsResult), indent=4))
#
# fetch edge device connection string
#
logging.info("Fetch connection string for device '{0}'".format(deviceId))
cmd = "az iot hub device-identity show-connection-string --hub-name {0} --device-id {1}".format(_args.iothubname, deviceId)
connectionStringResult = os.popen(cmd).read()
if not connectionStringResult:
logging.critical("Can not read connection string for device. Exiting...")
sys.exit(1)
connectionStringJson = json.loads(connectionStringResult)
logging.debug(json.dumps(connectionStringJson, indent=4))
edgeDeviceConnectionString = connectionStringJson['cs']
#
# create script commands to start/stop IoT Edge
#
if _targetPlatform == 'windows':
startCmd = "Start-Service iotedge"
_startScript.append(startCmd + '\n')
stopCmd = "Stop-Service iotedge"
_stopScript.append(stopCmd + '\n')
#
# create setup scripts
#
# patch the init template to create a docker compose configuration
ymlFileName = '{0}-edge-init.yml'.format(siteName)
ymlOutFileName = '{0}/{1}'.format(_args.outdir, ymlFileName)
with open('{0}/site-edge-init.yml'.format(_scriptDir), 'r') as setupTemplate, open(ymlOutFileName, 'w+', newline=_targetNewline) as setupOutFile:
for line in setupTemplate:
line = line.replace('${OPCPROXY_CONTAINER}', _opcProxyContainer)
line = line.replace('${IOTHUB_CONNECTIONSTRING}', _iotHubOwnerConnectionString)
line = line.replace('${SITE}', siteName)
line = line.replace('${BINDSOURCE}', _dockerBindSource)
setupOutFile.write(line)
# generate our setup script
# todo add registry credential
# todo use CA signed cert
initCmd = 'docker volume create {0}_cfappdata'.format(siteName)
_initScript.append(initCmd + '\n')
initCmd = 'docker pull {0}'.format(_opcProxyContainer)
_initScript.append(initCmd + '\n')
initCmd = 'docker-compose -p {0} -f {1} up'.format(siteName, ymlFileName)
_initScript.append(_initScriptCmdPrefix + initCmd + _initScriptCmdPostfix + '\n')
initCmd = 'docker-compose -p {0} -f {1} down'.format(siteName, ymlFileName)
_initScript.append(_initScriptCmdPrefix + initCmd + _initScriptCmdPostfix + '\n')
if _targetPlatform == 'windows':
initCmd = '. ./Init-IotEdgeService.ps1 -DeviceConnectionString "{0}" -ContainerOs {1} '.format(edgeDeviceConnectionString, _containerOs)
if _args.proxyhost:
initCmd = initCmd + ' -ProxySchema {0} -ProxyHost "{1}" '.format(_args.proxyschema, _args.proxyhost)
if _args.proxyport:
initCmd = initCmd + ' -ProxyPort {0} '.format(_args.proxyport)
if _args.proxyusername:
initCmd = initCmd + ' -ProxyUsername {0} '.format(_args.proxyusername)
if _args.proxypassword:
initCmd = initCmd + ' -ProxyPassword {0} '.format(_args.proxypassword)
# todo for extended offline mqtt support is required
if _args.upstreamprotocol != 'Ampq':
initCmd = initCmd + ' -UpstreamProtocol {0} '.format(_args.upstreamprotocol)
if _args.archivepath:
initCmd = initCmd + ' -ArchivePath "{0}" '.format(_args.archivepath)
_initScript.append(_initScriptCmdPrefix + initCmd + _initScriptCmdPostfix + '\n')
deinitCmd = ". ./Deinit-IotEdgeService.ps1"
_deinitScript.append(_deinitScriptCmdPrefix + deinitCmd + _deinitScriptCmdPostfix + '\n')
else:
# todo adjust to v1
initCmd = 'iotedgectl setup --connection-string "{0}" --auto-cert-gen-force-no-passwords {1}'.format(edgeDeviceConnectionString, '--runtime-log-level debug' if (_args.loglevel.lower() == 'debug') else '')
_initScript.append(_initScriptCmdPrefix + initCmd + _initScriptCmdPostfix + '\n')
# deinit commands are written in reversed order
deinitCmd = 'docker volume rm {0}_cfappdata'.format(siteName)
_deinitScript.append(_deinitScriptCmdPrefix + deinitCmd + _deinitScriptCmdPostfix + '\n')
def getLocalIpAddress():
ipAddress = None
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
sock.connect(('8.8.8.8', 1))
ipAddress = sock.getsockname()[0]
except:
ipAddress = None
finally:
sock.close()
return ipAddress
def getExtraHosts():
hosts = []
if os.path.exists("{0}/extrahosts".format(_scriptDir)) and os.path.isfile("{0}/extrahosts".format(_scriptDir)):
with open("{0}/extrahosts".format(_scriptDir), "r") as hostsfile:
hostlines = hostsfile.readlines()
hostlines = [line.strip() for line in hostlines
if not line.startswith('#') and line.strip() != '']
for line in hostlines:
linesplit = line.split('#')[0].split()[:]
ipAddress = linesplit[0]
try:
socket.inet_aton(ipAddress)
except:
exceptionInfo = sys.exc_info()
logging.warning("Exception info:")
logging.warning("{0}".format(exceptionInfo))
logging.warning("There is an entry in extrahosts with invalid IP address syntax: '{0}'. Ignoring...".format(ipAddress))
continue
hostNames = linesplit[1:]
for hostName in hostNames:
hosts.append({ "host": hostName, "ip": ipAddress })
return hosts
def writeScript(scriptFileBaseName, scriptBuffer, reverse = False):
scriptFileName = '{0}/{1}'.format(_args.outdir, scriptFileBaseName)
logging.debug("Write '{0}'{1}".format(scriptFileName, ' in reversed order.' if reverse else '.'))
if reverse:
scriptBuffer = scriptBuffer[::-1]
with open(scriptFileName, 'w+', newline=_targetNewline) as scriptFile:
for command in scriptBuffer:
scriptFile.write(command)
os.chmod(scriptFileName, os.stat(scriptFileName).st_mode | stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR)
def azureLogin():
# login via service principal if login info is provided
logging.info("Login to Azure")
if _args.serviceprincipalcert:
# auto login
cmd = "az login --service-principal -u {0} -p {1} --tenant {2}".format(_args.appid, _args.serviceprincipalcert, _args.tenantid)
cmdResult = os.popen(cmd).read()
else:
try:
client = get_client_from_cli_profile(ResourceManagementClient)
except:
exceptionInfo = sys.exc_info()
logging.critical("Exception info:")
logging.critical("{0}".format(exceptionInfo))
logging.critical("Please login to Azure with 'az login' and set the subscription which contains IoTHub '{0}' with 'az account set'.".format(_args.iothubname))
sys.exit(1)
def azureGetIotHubCs():
global _iotHubOwnerConnectionString
# verify IoTHub existence
cmd = "az iot hub show --name {0}".format(_args.iothubname)
iotHubShowResult = os.popen(cmd).read()
if not iotHubShowResult:
logging.critical("IoTHub '{0}' can not be found. Please verify your Azure login and account settings. Exiting...".format(_args.iothubname))
sys.exit(1)
logging.debug(json.dumps(json.loads(iotHubShowResult), indent=4))
# fetch the connectionstring
logging.info("Read IoTHub connectionstring")
cmd = "az iot hub show-connection-string --hub-name {0}".format(_args.iothubname)
connectionStringResult = os.popen(cmd).read()
if not connectionStringResult:
logging.critical("Can not read IoTHub owner connection string. Please verify your configuration. Exiting...")
sys.exit(1)
connectionStringJson = json.loads(connectionStringResult)
logging.debug(json.dumps(connectionStringJson, indent=4))
_iotHubOwnerConnectionString = connectionStringJson['cs']
logging.debug("IoTHub connection string is '{0}'".format(_iotHubOwnerConnectionString))
###############################################################################
#
# Main script
#
###############################################################################
# configure script logging
try:
logLevel = getattr(logging, _args.loglevel.upper())
except:
logLevel = logging.INFO
if not isinstance(logLevel, int):
raise( ValueError('Invalid log level: {0}'.format(logLevel)))
logging.basicConfig(level=logLevel)
# get path of script
_scriptDir = sys.path[0]
# CPU specific settings
if 'intel64' in str(platform.processor()).lower():
_platformCpu = 'amd64'
else:
_platformCpu = 'arm32v7'
#
# OS specific settings
#
if not _args.targetplatform:
_targetPlatform = str(platform.system()).lower()
if _targetPlatform == 'linux':
# check if we are on WSL
for line in open('/proc/version'):
if 'Microsoft' in line:
_targetPlatform = 'wsl'
elif _targetPlatform == 'windows':
pass
else:
logging.critical("OS is not supported. Exiting...")
sys.exit(1)
else:
_targetPlatform = _args.targetplatform
logging.info("Using targetplatform '{0}'".format(_targetPlatform))
if _targetPlatform == 'linux' or _targetPlatform == 'wsl':
_startScriptFileName = 'start-iiotedge.sh'
_startScriptCmdPrefix = ''
_startScriptCmdPostfix = ' &'
_stopScriptFileName = 'stop-iiotedge.sh'
_stopScriptCmdPrefix = ''
_stopScriptCmdPostfix = ''
_initScriptFileName = 'init-iiotedge.sh'
_initScriptCmdPrefix = ''
_initScriptCmdPostfix = ' &'
_deinitScriptFileName = 'deinit-iiotedge.sh'
_deinitScriptCmdPrefix = ''
_deinitScriptCmdPostfix = ' &'
_targetNewline = '\n'
elif _targetPlatform == 'windows':
_startScriptFileName = 'Start-IIoTEdge.ps1'
_startScriptCmdPrefix = 'start '
_startScriptCmdPostfix = ''
_stopScriptFileName = 'Stop-IIoTEdge.ps1'
_stopScriptCmdPrefix = ''
_stopScriptCmdPostfix = ''
_initScriptFileName = 'Init-IIoTEdge.ps1'
_initScriptCmdPrefix = ''
_initScriptCmdPostfix = ''
_deinitScriptFileName = 'Deinit-IIoTEdge.ps1'
_deinitScriptCmdPrefix = ''
_deinitScriptCmdPostfix = ''
_targetNewline = '\r\n'
#
# validate common arguments
#
if _args.lcow:
if _targetPlatform == 'windows':
_containerOs = 'linux'
else:
logging.fatal("-lcow is only allowed for a Winodws target")
sys.exit(1)
else:
_containerOs = _targetPlatform if _targetPlatform != 'wsl' else 'linux'
if _args.outdir is not None:
_args.outdir = _args.outdir.strip()
if not os.path.exists(_args.outdir):
os.mkdir(_args.outdir)
elif not os.path.isdir(_args.outdir):
logging.critical("Given outdir '{0} is not a directory. Please check. Exiting...".format(_args.outdir))
sys.exit(2)
logging.info("Create all generated files in directory '{0}'.".format(_args.outdir))
if _args.hostdir is not None:
# the --hostdir parameter specifies where on the docker host the configuration files should be stored.
# during docker configuration a volume bind is configured, which points to this directory.
# in case of a cross platform generation, the files are put into a config subdirectory of the specified --outdir
# and need to be transfered manually to the IoT Edge device.
_dockerBindSource = _args.hostdir = _args.hostdir.strip().replace('\\', '/')
# The Docker for Windows volume bind syntax has changed over time.
# With docker ce 18.03.0-ce-win59 (16762), engine 18.03.0-ce the bind syntax for D:/docker needs to be //d/docker
if _targetPlatform in [ 'windows', 'wsl']:
# we accept only fully qualified windows syntax (starts with <drive>:)
if _args.hostdir[1:3] != ':/':
logging.fatal("The --hostdir parameter must be using a fully qualified Windows directory syntax.")
sys.exit(1)
elif _targetPlatform == 'linux':
if _args.hostdir[0:1] != '/':
logging.fatal("The --hostdir parameter must be using a fully qualified Linux directory syntax.")
sys.exit(1)
else:
logging.fatal("Target platform '{0}' is not supported.".format(_targetPlatform))
sys.exit(1)
if _args.targetplatform:
# create a directory for the configuration files, if not running on the IoT Edge device
_outdirConfig = _args.outdir + '/config'
if not os.path.exists(_outdirConfig):
os.mkdir(_outdirConfig)
logging.info("Create directory '{0}' for target system configuration files.".format(_outdirConfig))
elif not os.path.isdir(_outdirConfig):
logging.critical("'{0}' is expected to be a directory to provide configuration files, but it is not. Pls check. Exiting...".format(_outdirConfig))
sys.exit(2)
logging.info("Create all generated configuration files in directory '{0}'.".format(_outdirConfig))
logging.info("Passing '{0}' to docker as source in bind, maps to '{1}'.".format(_dockerBindSource, _args.hostdir))
_hostDirHost = _args.hostdir
else:
logging.info("--targetplatform was not specified. Assume we run on the IoT Edge device.")
if _targetPlatform in [ 'windows', 'linux' ]:
_hostDirHost = _args.hostdir
if _targetPlatform == 'wsl':
_hostDirHost = '/mnt/' + _args.hostdir[0:1] + '/' + _args.hostdir[3:]
if not os.path.exists(_hostDirHost):
logging.info("Directory '{0}' specified via --hostdir does not exist. Creating it...".format(_args.hostdir))
os.mkdir(_hostDirHost)
logging.info("Passing '{0}' to docker as source in bind, maps to '{1}'.".format(_dockerBindSource, _hostDirHost))
else:
# use a docker volume
# todo verify correct handling with sites
_dockerBindSource = 'cfappdata'
logging.info("Passing '{0}' (docker volume) to docker as source in bind.".format(_dockerBindSource))
if _args.dockerregistry is None:
_args.dockerregistry = 'microsoft'
else:
_args.dockerregistry = _args.dockerregistry.strip().lower()
logging.info("Docker container registry to use: '{0}'".format(_args.dockerregistry))
#
# build container names
#
_opcProxyContainer = OPCPROXY_CONTAINER_IMAGE if '/' in OPCPROXY_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPROXY_CONTAINER_IMAGE)
_opcProxyContainer = '{0}:'.format(_opcProxyContainer) if not OPCPROXY_CONTAINER_VERSION else '{0}:{1}-'.format(_opcProxyContainer, OPCPROXY_CONTAINER_VERSION)
_opcProxyContainer = '{0}{1}'.format(_opcProxyContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcProxyContainer, 'linux')
_opcProxyContainer = '{0}-{1}'.format(_opcProxyContainer, 'amd64') if _platformCpu == 'amd64' else '{0}-{1}'.format(_opcProxyContainer, 'arm32v7')
_opcTwinContainer = OPCTWIN_CONTAINER_IMAGE if '/' in OPCTWIN_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCTWIN_CONTAINER_IMAGE)
_opcTwinContainer = '{0}:'.format(_opcTwinContainer) if not OPCTWIN_CONTAINER_VERSION else '{0}:{1}-'.format(_opcTwinContainer, OPCTWIN_CONTAINER_VERSION)
_opcTwinContainer = '{0}{1}'.format(_opcTwinContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcTwinContainer, 'linux')
_opcTwinContainer = '{0}-{1}'.format(_opcTwinContainer, 'amd64') if _platformCpu == 'amd64' else '{0}{1}'.format(_opcTwinContainer, 'arm32v7')
_opcPublisherContainer = OPCPUBLISHER_CONTAINER_IMAGE if '/' in OPCPUBLISHER_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPUBLISHER_CONTAINER_IMAGE)
_opcPublisherContainer = '{0}:'.format(_opcPublisherContainer) if not OPCPUBLISHER_CONTAINER_VERSION else '{0}:{1}-'.format(_opcPublisherContainer, OPCPUBLISHER_CONTAINER_VERSION)
_opcPublisherContainer = '{0}{1}'.format(_opcPublisherContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcPublisherContainer, 'linux')
_opcPublisherContainer = '{0}-{1}'.format(_opcPublisherContainer, 'amd64') if _platformCpu == 'amd64' else '{0}-{1}'.format(_opcPublisherContainer, 'arm32v7')
_opcPlcContainer = OPCPLC_CONTAINER_IMAGE if '/' in OPCPLC_CONTAINER_IMAGE else '{0}/{1}'.format(_args.dockerregistry, OPCPLC_CONTAINER_IMAGE)
_opcPlcContainer = '{0}:'.format(_opcPlcContainer) if not OPCPLC_CONTAINER_VERSION else '{0}:{1}-'.format(_opcPlcContainer, OPCPLC_CONTAINER_VERSION)
_opcPlcContainer = '{0}{1}'.format(_opcPlcContainer, 'windows') if _containerOs == 'windows' else '{0}{1}'.format(_opcPlcContainer, 'linux')
_opcPlcContainer = '{0}-{1}'.format(_opcPlcContainer, 'amd64') if _platformCpu == 'amd64' else '{0}{1}'.format(_opcPlcContainer, 'arm32v7')
logging.info("Using OpcPublisher container: '{0}'".format(_opcPublisherContainer))
logging.info("Using OpcProxy container: '{0}'".format(_opcProxyContainer))
logging.info("Using OpcTwin container: '{0}'".format(_opcTwinContainer))
logging.info("Using OpcPlc container: '{0}'".format(_opcPlcContainer))
#
# azure authentication
#
if _args.serviceprincipalcert is not None:
_args.serviceprincipalcert = _args.serviceprincipalcert.strip()
if _targetPlatform == 'windows' and not _args.serviceprincipalcert[1:2] == ':' or _targetPlatform == 'linux' and not _args.serviceprincipalcert.startswith('/'):
_args.serviceprincipalcert = '{0}/{1}'.format(os.getcwd(), _args.serviceprincipalcert)
logging.info("Setup using service principal cert in file '{0}'".format(_args.serviceprincipalcert))
if _args.tenantid is not None:
_args.tenantid = _args.tenantid.strip()
logging.info("Setup using tenant id '{0}' to login".format(_args.tenantid))
if _args.appid is not None:
_args.appid = _args.appid.strip()
logging.info("Setup using AppId '{0}' to login".format(_args.appid))
if ((_args.serviceprincipalcert is not None or _args.tenantid is not None or _args.appid is not None) and
(_args.serviceprincipalcert is None or _args.tenantid is None or _args.appid is None)):
logging.critical("serviceprincipalcert, tennantid and appid must all be specified. Exiting...")
sys.exit(2)
_args.subcommand = _args.subcommand.lower()
#
# validate all required parameters for gw subcommand
#
if _args.subcommand == 'gw':
# validate the nodesconfig file
if _args.nodesconfig:
# check if file exists
if not os.path.exists(_args.nodesconfig) or not os.path.isfile(_args.nodesconfig):
logging.critical("The nodesconfig file '{0}' can not be found or is not a file. Exiting...".format(_args.nodesconfig))
sys.exit(2)
# to access it we need access to host file system and need a hostdir parameter
if not _args.hostdir:
logging.critical("If --nodesconfig is specified you need to specify a host directory for --hostdir as well. Exiting...")
sys.exit(2)
try:
if _args.telemetryconfig:
# check if file exists
if not os.path.exists(_args.telemetryconfig) or not os.path.isfile(_args.telemetryconfig):
logging.critical("The telemetryconfig file '{0}' can not be found or is not a file. Exiting...".format(_args.telemetryconfig))
sys.exit(2)
# to access it we need access to host file system and need a hostdir parameter
if not _args.hostdir:
logging.critical("If --telemetryconfig requires --hostdir as well. Exiting...")
sys.exit(2)
except AttributeError:
pass
_args.site = _args.site.lower()
_edgeSite = _args.site
# IoT Edge archive
if _args.archivepath is not None:
_args.archivepath = _args.archivepath.strip()
if not os.path.exists(_args.archivepath):
logging.critical("The given archive '{0} does not exist. Please check. Exiting...".format(_args.archivepath))
sys.exit(2)
# site configuration
if _args.siteconfig is not None:
_args.siteconfig = _args.siteconfig.strip()
if not os.path.exists(_args.siteconfig):
logging.critical("The given site config file '{0} does not exist. Please check. Exiting...".format(_args.siteconfig))
sys.exit(2)
# build the list of hostname/IP address mapping to allow the containers to access the local and external hosts, in case there is no DNS (espacially on Windows)
# add localhost info if we run on the targetplatform
_additionalHosts = []
if not _args.targetplatform:
ipAddress = getLocalIpAddress()
if ipAddress is None:
logging.critical("There is not network connection available.")
sys.exit(1)
hostName = socket.gethostname()
fqdnHostName = socket.getfqdn()
_additionalHosts.append({ "host": hostName, "ip": ipAddress })
if hostName.lower() != fqdnHostName.lower():
_additionalHosts.append({ "host": fqdnHostName, "ip": ipAddress })
else:
print("FQDN '{0}' is equal to hostname '{1}'".format(fqdnHostName, hostName))
_additionalHosts.extend(getExtraHosts()[:])
_extraHosts = []
if len(_additionalHosts) > 0:
_extraHosts.extend('- "{0}:{1}"\n'.format(host['host'], host['ip']) for host in _additionalHosts[0:1])
if len(_additionalHosts) > 2:
_extraHosts.extend(' - "{0}:{1}"\n'.format(host['host'], host['ip']) for host in _additionalHosts[1:-1])
if len(_additionalHosts) >= 2:
_extraHosts.extend(' - "{0}:{1}"'.format(host['host'], host['ip']) for host in _additionalHosts[-1:])
#
# gw operation: create all scripts to (de)init and start/stop the site specified on the command line
# - copy the configuration files
# - create an IoT Edge device and deployment for the site and all OPC components are configured to run as IoT Edge modules
#
if _args.subcommand == 'gw':
# login to Azure and fetch IoTHub connection string
azureLogin()
azureGetIotHubCs()
# copy configuration files to the right directory if we are running on the target, otherwise copy it to the config file directory
if _args.targetplatform:
if _args.nodesconfig:
nodesconfigFileName = 'pn-' + _args.site + '.json'
shutil.copyfile(_args.nodesconfig, '{0}/{1}'.format(_outdirConfig, nodesconfigFileName))
try:
if _args.telemetryconfig:
telemetryconfigFileName = 'tc-' + _args.site + '.json'
shutil.copyfile(_args.telemetryconfig, '{0}/{1}'.format(_outdirConfig, telemetryconfigFileName))
except AttributeError:
pass
else:
if _args.nodesconfig:
nodesconfigFileName = 'pn-' + _args.site + '.json'
shutil.copyfile(_args.nodesconfig, '{0}/{1}'.format(_hostDirHost, nodesconfigFileName))
if _args.telemetryconfig:
telemetryconfigFileName = 'tc-' + _args.site + '.json'
shutil.copyfile(_args.telemetryconfig, '{0}/{1}'.format(_hostDirHost, telemetryconfigFileName))
# create site/factory scripts
logging.info("Create the site initialization and configuration for '{0}'".format(_args.site))
createEdgeSiteConfiguration(_args.site)
# optional: sleep to debug initialization script issues
# _initScript.append('timeout 60\n')
# write the scripts
writeScript(_startScriptFileName, _startScript)
writeScript(_stopScriptFileName, _stopScript, reverse = True)
writeScript(_initScriptFileName, _initScript)
writeScript(_deinitScriptFileName, _deinitScript, reverse = True)
# todo patch config.yaml if proxy is used
# copy prerequisites installation scripts
if _args.targetplatform:
if _args.targetplatform in [ 'windows' ]:
shutil.copyfile('{0}/Init-IotEdgeService.ps1'.format(_scriptDir), '{0}/Init-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Deinit-IotEdgeService.ps1'.format(_scriptDir), '{0}/Deinit-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}Prepare-IIotHost.ps1'.format(_scriptDir), '{0}/Prepare-IIotHost.ps1'.format(_args.outdir))
if _args.targetplatform in [ 'linux', 'wsl' ]:
shutil.copyfile('{0}/iiotedge-install-prerequisites.sh'.format(_scriptDir), '{0}/iiotedge-install-prerequisites.sh'.format(_args.outdir))
shutil.copyfile('{0}/iiotedge-install-linux-packages.sh'.format(_scriptDir), '{0}/iiotedge-install-linux-packages.sh'.format(_args.outdir))
shutil.copyfile('{0}/requirements.txt'.format(_scriptDir), '{0}/requirements.txt'.format(_args.outdir))
# inform user when not running on target platform
logging.info('')
logging.info("Please copy any required script files from '{0}' to your target system.".format(_args.outdir))
if _args.hostdir:
logging.info("Please copy any required configuration files from '{0}' to your target system to directory '{1}'.".format(_outdirConfig, _args.hostdir))
elif _targetPlatform == 'windows':
shutil.copyfile('{0}/Init-IotEdgeService.ps1'.format(_scriptDir), '{0}/Init-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Deinit-IotEdgeService.ps1'.format(_scriptDir), '{0}/Deinit-IotEdgeService.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Prepare-WindowsGatewayStep1.ps1'.format(_scriptDir), '{0}/Prepare-WindowsGatewayStep1.ps1'.format(_args.outdir))
shutil.copyfile('{0}/Prepare-WindowsGatewayStep2.ps1'.format(_scriptDir), '{0}/Prepare-WindowsGatewayStep2.ps1'.format(_args.outdir))
# done
logging.info('')
if _args.targetplatform:
logging.info("The generated script files can be found in: '{0}'. Please copy them to your target system.".format(_args.outdir))
else:
logging.info("The generated script files can be found in: '{0}'".format(_args.outdir))
logging.info('')
logging.info("Operation completed.")
| 21,938 | 0 | 137 |
b169bc61e43aa59f78f85ee25252495c61ab1381 | 4,645 | py | Python | sdlf-utils/pipeline-examples/dataset-dependency/stageA/lambda/stage-a-dependent-status/src/lambda_function.py | pravinva/aws-serverless-data-lake-framework | 6dc422733a5d4add94040b3f3475a70470d5d510 | [
"MIT-0"
] | 267 | 2020-10-26T16:21:49.000Z | 2022-03-27T21:37:17.000Z | sdlf-utils/pipeline-examples/dataset-dependency/stageA/lambda/stage-a-dependent-status/src/lambda_function.py | pravinva/aws-serverless-data-lake-framework | 6dc422733a5d4add94040b3f3475a70470d5d510 | [
"MIT-0"
] | 28 | 2020-10-28T08:17:14.000Z | 2022-01-21T18:47:23.000Z | sdlf-utils/pipeline-examples/dataset-dependency/stageA/lambda/stage-a-dependent-status/src/lambda_function.py | pravinva/aws-serverless-data-lake-framework | 6dc422733a5d4add94040b3f3475a70470d5d510 | [
"MIT-0"
] | 101 | 2020-10-27T15:36:20.000Z | 2022-03-23T19:54:52.000Z | import datetime
import os
import re
import shutil
import boto3
from boto3.dynamodb.conditions import Key
from datalake_library import octagon
from datalake_library.commons import init_logger
from datalake_library.configuration.resource_configs import DynamoConfiguration
from datalake_library.interfaces.dynamo_interface import DynamoInterface
dynamodbClient = boto3.resource("dynamodb")
logger = init_logger(__name__)
def lambda_handler(event, context):
"""Checks dependent datasets status
Arguments:
event {dict} -- Dictionary with details on datasets dependency
context {dict} -- Dictionary with details on Lambda context
Returns:
{dict} -- Dictionary with details on datasets dependency
"""
try:
logger.info("Dataset dependency Lambda")
bucket = event['body']['bucket']
team = event['body']['team']
pipeline = event['body']['pipeline']
stage = event['body']['pipeline_stage']
dataset = event['body']['dataset']
env = event['body']['env']
dependent_stage = event['body']['dependent_stage']
retry_count = event['body']["retry_count"]
logger.info('Initializing Octagon client')
component = context.function_name.split('-')[-2].title()
octagon_client = (
octagon.OctagonClient()
.with_run_lambda(True)
.with_configuration_instance(env)
.build()
)
if 'peh_id' not in event['body']:
peh_id = octagon_client.start_pipeline_execution(
pipeline_name='{}-{}-stage-{}'.format(team,
pipeline, stage[-1].lower()),
dataset_name='{}-{}'.format(team, dataset),
comment=event
)
else:
peh_id = event['body']['peh_id']
octagon.peh.PipelineExecutionHistoryAPI(
octagon_client).retrieve_pipeline_execution(peh_id)
logger.info("Checking dependent tables status")
dependent_datasets = get_dependent_datasets(team, dataset)
atomic_completed_datasets_count = 0
for each_dataset in dependent_datasets:
output = get_dynamodb_peh_status(
env,
dependent_datasets[each_dataset],
dependent_stage,
get_current_date()
)
if output == "COMPLETED":
atomic_completed_datasets_count += 1
dependent_datasets_status = "SUCCEEDED" if len(
dependent_datasets) == atomic_completed_datasets_count else "FAILED"
octagon_client.update_pipeline_execution(
status="{} {} Dependent Datasets Status".format(stage, component), component=component)
except Exception as e:
logger.error("Fatal error", exc_info=True)
octagon_client.end_pipeline_execution_failed(component=component,
issue_comment="{} {} Error: {}".format(stage, component, repr(e)))
raise e
return {
"body": {
"bucket": bucket,
"team": team,
"pipeline": pipeline,
"pipeline_stage": stage,
"dataset": dataset,
"env": env,
"dependent_stage": dependent_stage,
"retry_count": retry_count + 1,
"dependent_datasets_status": dependent_datasets_status,
"peh_id": peh_id
}
}
| 36.289063 | 119 | 0.627987 | import datetime
import os
import re
import shutil
import boto3
from boto3.dynamodb.conditions import Key
from datalake_library import octagon
from datalake_library.commons import init_logger
from datalake_library.configuration.resource_configs import DynamoConfiguration
from datalake_library.interfaces.dynamo_interface import DynamoInterface
dynamodbClient = boto3.resource("dynamodb")
logger = init_logger(__name__)
def get_current_date():
return 'COMPLETED#{}T00:00:00.000Z'.format(
datetime.datetime.utcnow().date().isoformat())
def get_dependent_datasets(team_name, dataset_name):
dynamo_config = DynamoConfiguration()
dynamo_interface = DynamoInterface(dynamo_config)
transform_info = dynamo_interface.get_transform_table_item(
"{}-{}".format(team_name, dataset_name)
)
return transform_info["dependencies"]
def get_dynamodb_peh_status(environment, dataset_name, dp_stage, current_date):
peh_dynamodb_table = dynamodbClient.Table(
f"octagon-PipelineExecutionHistory-{environment}")
dynamodb_response = peh_dynamodb_table.query(
IndexName="dataset-status_last_updated_timestamp-index",
KeyConditionExpression=Key("dataset").eq(dataset_name)
& Key("status_last_updated_timestamp").gt(current_date),
)
status_value = ""
dp_stage_ft = re.sub(r'(?<!^)(?=[A-Z])', '-', dp_stage).lower()
if dynamodb_response["Items"]:
for i in dynamodb_response["Items"]:
if dp_stage_ft in i["pipeline"]:
status_value = i["status"]
return status_value
def lambda_handler(event, context):
"""Checks dependent datasets status
Arguments:
event {dict} -- Dictionary with details on datasets dependency
context {dict} -- Dictionary with details on Lambda context
Returns:
{dict} -- Dictionary with details on datasets dependency
"""
try:
logger.info("Dataset dependency Lambda")
bucket = event['body']['bucket']
team = event['body']['team']
pipeline = event['body']['pipeline']
stage = event['body']['pipeline_stage']
dataset = event['body']['dataset']
env = event['body']['env']
dependent_stage = event['body']['dependent_stage']
retry_count = event['body']["retry_count"]
logger.info('Initializing Octagon client')
component = context.function_name.split('-')[-2].title()
octagon_client = (
octagon.OctagonClient()
.with_run_lambda(True)
.with_configuration_instance(env)
.build()
)
if 'peh_id' not in event['body']:
peh_id = octagon_client.start_pipeline_execution(
pipeline_name='{}-{}-stage-{}'.format(team,
pipeline, stage[-1].lower()),
dataset_name='{}-{}'.format(team, dataset),
comment=event
)
else:
peh_id = event['body']['peh_id']
octagon.peh.PipelineExecutionHistoryAPI(
octagon_client).retrieve_pipeline_execution(peh_id)
logger.info("Checking dependent tables status")
dependent_datasets = get_dependent_datasets(team, dataset)
atomic_completed_datasets_count = 0
for each_dataset in dependent_datasets:
output = get_dynamodb_peh_status(
env,
dependent_datasets[each_dataset],
dependent_stage,
get_current_date()
)
if output == "COMPLETED":
atomic_completed_datasets_count += 1
dependent_datasets_status = "SUCCEEDED" if len(
dependent_datasets) == atomic_completed_datasets_count else "FAILED"
octagon_client.update_pipeline_execution(
status="{} {} Dependent Datasets Status".format(stage, component), component=component)
except Exception as e:
logger.error("Fatal error", exc_info=True)
octagon_client.end_pipeline_execution_failed(component=component,
issue_comment="{} {} Error: {}".format(stage, component, repr(e)))
raise e
return {
"body": {
"bucket": bucket,
"team": team,
"pipeline": pipeline,
"pipeline_stage": stage,
"dataset": dataset,
"env": env,
"dependent_stage": dependent_stage,
"retry_count": retry_count + 1,
"dependent_datasets_status": dependent_datasets_status,
"peh_id": peh_id
}
}
| 1,087 | 0 | 69 |
7202c117a689b6be6485eb4f90cd136cebeb4c38 | 700 | py | Python | UVa-problems/465.py | LeKSuS-04/Competitive-Programming | fbc86a8c6febeef72587a8f94135e92197e1f99e | [
"WTFPL"
] | null | null | null | UVa-problems/465.py | LeKSuS-04/Competitive-Programming | fbc86a8c6febeef72587a8f94135e92197e1f99e | [
"WTFPL"
] | null | null | null | UVa-problems/465.py | LeKSuS-04/Competitive-Programming | fbc86a8c6febeef72587a8f94135e92197e1f99e | [
"WTFPL"
] | null | null | null | ''' UVa 465 - Overflow '''
# https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=406
# Date: 2021-08-14 17:38:54
# Run time: 0.010
# Verdict: AC
from sys import stdin
limit = 2 ** 31 - 1
for line in stdin:
a, action, b = line.strip().split()
print(a, action, b)
a, b = int(a), int(b)
if a > limit:
print('first number too big')
if b > limit:
print('second number too big')
if action == '*' and a == 0 or b == 0:
continue
if (a > limit or b > limit):
print('result too big')
else:
res = a + b if action == '+' else a * b
if res > limit:
print('result too big')
| 21.875 | 97 | 0.554286 | ''' UVa 465 - Overflow '''
# https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=406
# Date: 2021-08-14 17:38:54
# Run time: 0.010
# Verdict: AC
from sys import stdin
limit = 2 ** 31 - 1
for line in stdin:
a, action, b = line.strip().split()
print(a, action, b)
a, b = int(a), int(b)
if a > limit:
print('first number too big')
if b > limit:
print('second number too big')
if action == '*' and a == 0 or b == 0:
continue
if (a > limit or b > limit):
print('result too big')
else:
res = a + b if action == '+' else a * b
if res > limit:
print('result too big')
| 0 | 0 | 0 |
0c395cab5188e74881e4d3e613572b70353de036 | 5,587 | py | Python | tests/test_cms_toolbars.py | jonasundderwolf/django-cms-helpers | d28e3516fa7e17cb51f87c40ba1d66f692106975 | [
"MIT"
] | null | null | null | tests/test_cms_toolbars.py | jonasundderwolf/django-cms-helpers | d28e3516fa7e17cb51f87c40ba1d66f692106975 | [
"MIT"
] | 5 | 2019-03-19T12:18:13.000Z | 2020-09-29T19:08:24.000Z | tests/test_cms_toolbars.py | jonasundderwolf/django-cms-helpers | d28e3516fa7e17cb51f87c40ba1d66f692106975 | [
"MIT"
] | 2 | 2020-09-18T09:47:02.000Z | 2020-09-29T13:22:58.000Z | from unittest import mock
import pytest
from cms.api import create_page, create_title
from cms.toolbar.items import ModalItem, SubMenu
from tests.resources.cmsapp.models import ExtensionModel
@mock.patch('cms_helpers.cms_toolbars.TitleExtensionToolbar.get_item_position')
@pytest.mark.django_db
| 40.485507 | 79 | 0.636836 | from unittest import mock
import pytest
from cms.api import create_page, create_title
from cms.toolbar.items import ModalItem, SubMenu
from tests.resources.cmsapp.models import ExtensionModel
def test_titleextensiontoolbar_inserted(admin_client):
page = create_page('Test Page', 'INHERIT', 'en-us')
response = admin_client.get('{0}?edit=on'.format(page.get_absolute_url()))
toolbar = response.context['request'].toolbar
menu = toolbar.get_menu('page')
item = menu.items[5]
assert isinstance(item, ModalItem)
assert item.name == 'Extension...'
assert item.url.startswith('/admin/cmsapp/extensionmodel/')
@mock.patch('cms_helpers.cms_toolbars.TitleExtensionToolbar.get_item_position')
def test_titleextensiontoolbar_not_inserted(position_mock, admin_client):
response = admin_client.get('/non-cms/')
toolbar = response.context['request'].toolbar
assert toolbar.get_menu('page') is None
assert position_mock.called is False
@pytest.mark.django_db
class TestTitleextensiontoolbarMultilingual:
@pytest.fixture(autouse=True)
def setup(self, settings):
settings.LANGUAGE_CODE = 'en-us'
settings.USE_I18N = True
settings.USE_L10N = True
settings.LANGUAGES = [
('en-us', 'English'),
('de', 'German'),
]
settings.CMS_LANGUAGES = {
1: [
{'code': 'de', 'name': 'German'},
{'code': 'en-us', 'name': 'English'},
]
}
def test_add(self, admin_client):
page = create_page('Test Page', 'INHERIT', 'en-us')
title_de = create_title(language='de', page=page, title='Test Page de')
title_en = page.get_title_obj(language='en-us')
expected_url = '/admin/cmsapp/extensionmodel/add/?extended_object={0}'
response = admin_client.get(
'{0}?edit=on'.format(page.get_absolute_url()))
toolbar = response.context['request'].toolbar
menu = toolbar.get_menu('page')
item = menu.items[5]
extensions = {ext.name: ext for ext in item.items}
assert isinstance(item, SubMenu)
assert item.name == 'Extension'
assert len(item.items) == 2
assert 'English Extension...' in extensions
assert 'German Extension...' in extensions
assert extensions['English Extension...'].url == (
expected_url.format(title_en.pk))
assert extensions['German Extension...'].url == (
expected_url.format(title_de.pk))
def test_change(self, admin_client):
page = create_page('Test Page', 'INHERIT', 'en-us')
title_de = create_title(
language='de', page=page, title='Test Page de')
title_en = page.get_title_obj(language='en-us')
extension_de = ExtensionModel.objects.create(
name='de', extended_object=title_de)
extension_en = ExtensionModel.objects.create(
name='en', extended_object=title_en)
expected_url = '/admin/cmsapp/extensionmodel/{0}/change/'
response = admin_client.get(
'{0}?edit=on'.format(page.get_absolute_url()))
toolbar = response.context['request'].toolbar
menu = toolbar.get_menu('page')
item = menu.items[5]
extensions = {ext.name: ext for ext in item.items}
assert extensions['English Extension...'].url == (
expected_url.format(extension_en.pk))
assert extensions['German Extension...'].url == (
expected_url.format(extension_de.pk))
def test_add_change(self, admin_client):
page = create_page('Test Page', 'INHERIT', 'en-us')
title_de = create_title(language='de', page=page, title='Test Page de')
title_en = page.get_title_obj(language='en-us')
extension_de = ExtensionModel.objects.create(
name='de', extended_object=title_de)
expected_url_add = (
'/admin/cmsapp/extensionmodel/add/?extended_object={0}')
expected_url_change = '/admin/cmsapp/extensionmodel/{0}/change/'
response = admin_client.get(
'{0}?edit=on'.format(page.get_absolute_url()))
toolbar = response.context['request'].toolbar
menu = toolbar.get_menu('page')
item = menu.items[5]
extensions = {ext.name: ext for ext in item.items}
assert extensions['English Extension...'].url == (
expected_url_add.format(title_en.pk))
assert extensions['German Extension...'].url == (
expected_url_change.format(extension_de.pk))
def test_change_add(self, admin_client):
page = create_page('Test Page', 'INHERIT', 'en-us')
title_de = create_title(language='de', page=page, title='Test Page de')
title_en = page.get_title_obj(language='en-us')
extension_en = ExtensionModel.objects.create(
name='en', extended_object=title_en)
expected_url_add = (
'/admin/cmsapp/extensionmodel/add/?extended_object={0}')
expected_url_change = '/admin/cmsapp/extensionmodel/{0}/change/'
response = admin_client.get(
'{0}?edit=on'.format(page.get_absolute_url()))
toolbar = response.context['request'].toolbar
menu = toolbar.get_menu('page')
item = menu.items[5]
extensions = {ext.name: ext for ext in item.items}
assert extensions['English Extension...'].url == (
expected_url_change.format(extension_en.pk))
assert extensions['German Extension...'].url == (
expected_url_add.format(title_de.pk))
| 5,026 | 192 | 67 |
86125a0b6a2575b67c68c5a792d8a8aab2cd3096 | 12,001 | py | Python | data/io/PIGLET/data_statistics.py | toolmen-lab/R3Det_piglet-detection | 9e256570e157ee184eb9a4dc11ebafdc1b56f121 | [
"MIT"
] | null | null | null | data/io/PIGLET/data_statistics.py | toolmen-lab/R3Det_piglet-detection | 9e256570e157ee184eb9a4dc11ebafdc1b56f121 | [
"MIT"
] | null | null | null | data/io/PIGLET/data_statistics.py | toolmen-lab/R3Det_piglet-detection | 9e256570e157ee184eb9a4dc11ebafdc1b56f121 | [
"MIT"
] | null | null | null | import json
import numpy as np
import itertools
from tabulate import tabulate
import math
import matplotlib.pyplot as plt
#import pandas as pd
import cv2
import sys
sys.path.append("../../../libs")
from box_utils import generate_anchors
from configs import cfgs
if __name__ == "__main__":
dataset = 'train/train.json'
input_imgsize = 512
# calculate_instance_histogram(dataset)
# ratio, size = calculate_horizontal_boxes_histogram(dataset, input_imgsize)
# h_ious, r_ious = calculate_iou_histogram(dataset)
calculate_positive_horizontal_anchors(input_imgsize = 512) | 43.64 | 130 | 0.588951 | import json
import numpy as np
import itertools
from tabulate import tabulate
import math
import matplotlib.pyplot as plt
#import pandas as pd
import cv2
import sys
sys.path.append("../../../libs")
from box_utils import generate_anchors
from configs import cfgs
def calculate_instance_histogram(dataset):
with open('classes.txt') as f:
class_names = f.read().split(',')
with open(dataset) as f:
gt = json.load(f)
num_classes = len(gt['categories'])
hist_bins = np.arange(num_classes + 1)
classes = [ann["category_id"] for ann in gt['annotations'] if not ann.get("iscrowd", 0)]
histogram = np.histogram(classes, bins=hist_bins)[0]
N_COLS = min(6, len(class_names) * 2)
data = list(
itertools.chain(*[[class_names[i], int(v)] for i, v in enumerate(histogram)])
)
total_num_instances = sum(data[1::2])
data.extend([None] * (N_COLS - (len(data) % N_COLS)))
if num_classes > 1:
data.extend(["total", total_num_instances])
data = itertools.zip_longest(*[data[i::N_COLS] for i in range(N_COLS)])
table = tabulate(
data,
headers=["category", "#instances"] * (N_COLS // 2),
tablefmt="pipe",
numalign="left",
stralign="center",
)
print(table)
def calculate_horizontal_boxes_histogram(dataset, input_imgsize):
plt.style.use('seaborn')
with open(dataset) as f:
gt = json.load(f)
w_h = {"width": [], "height": []}
for ann in gt['annotations']:
cx, cy, w, h, angle = ann['bbox']
theta = angle / 180.0 * math.pi
c = math.cos(-theta)
s = math.sin(-theta)
rect = [(-w / 2, h / 2), (-w / 2, -h / 2), (w / 2, -h / 2), (w / 2, h / 2)]
rotated_rect = [(s * yy + c * xx + cx, c * yy - s * xx + cy) for (xx, yy) in rect]
rotated_rect = [item for sub in rotated_rect for item in sub]
xmin = min(rotated_rect[0::2])
xmax = max(rotated_rect[0::2])
ymax = max(rotated_rect[1::2])
ymin = min(rotated_rect[1::2])
w = xmax - xmin + 1
h = ymax - ymin + 1
image_id = ann['image_id']
img_width = gt['images'][image_id]['width']
img_height = gt['images'][image_id]['height']
w_h["width"].append(w / img_width * input_imgsize)
w_h["height"].append(h / img_height * input_imgsize)
# df = pd.DataFrame(data = w_h)
ratio = [w_h['width'][i] / w_h['height'][i] for i in range(len(w_h['width']))]
size = [w_h['width'][i] * w_h['height'][i] for i in range(len(w_h['height']))]
sort_ratio = sorted(ratio)
sort_size = sorted(size)
ratio_x = [1/i for i in range(int(np.ceil(1/sort_ratio[0])), 1 ,-1)] + [i for i in range(1,int(np.ceil(sort_ratio[-1]))+1)]
ratio_ticks = [r'$\frac{{1}}{{{}}}$'.format(i) for i in range(int(np.ceil(1/sort_ratio[0])), 1 ,-1)] \
+ [str(i) for i in range(1,int(np.ceil(sort_ratio[-1])) + 1)]
ratio_x = np.log10(ratio_x)
fig, (ax1, ax2) = plt.subplots(1, 2, constrained_layout=True, figsize = (12.0,8.0))
ratio_n, ratio_bins, patches = ax1.hist(np.log10(sort_ratio), bins = 101, alpha = 0.5)
# ax1.plot(bins[:-1] + patches[0]._width / 2, n, color = (0,0,139/255), alpha = 0.5)
ax1.set_xticks(ratio_x)
ax1.set_xticklabels(ratio_ticks)
ax1.set_xlabel("Anchor Ratio at logarithm base 10", fontsize = 8)
ax1.set_title("Object horizontal boxes ratio", fontsize = 10)
ax1.set_ylabel("Count")
ax1.set_xlim(min(ratio_x) - 0.01, max(ratio_x) + 0.01)
sort_size = np.log2(sort_size)
size_n, size_bins, patches = ax2.hist(sort_size, bins = 101, alpha = 0.5)
# ax2.plot(bins[:-1] + patches[0]._width / 2, n, color = (0,0,139/255), alpha = 0.5)
ax2.set_title("Object horizontal boxes area (pixels)" , fontsize = 10)
ax2.set_xlabel("Anchor Size", fontsize = 8)
ax2.set_xticks([i*2 for i in range(int(np.floor(min(sort_size)/2)), int(np.ceil(max(sort_size)/2)) + 1)])
ax2.set_xticklabels([2**i for i in range(int(np.floor(min(sort_size)/2)), int(np.ceil(max(sort_size)/2)) + 1)])
fig.suptitle("Input image size {}".format(input_imgsize), fontsize = 16)
plt.grid(True)
# sns.jointplot(x = "width", y = "height", data = df, kind = "reg")
plt.style.use('default')
max_ratio_image = gt['images'][gt['annotations'][ratio.index(max(ratio))]['image_id']]['file_name']
min_ratio_image = gt['images'][gt['annotations'][ratio.index(min(ratio))]['image_id']]['file_name']
max_size_image = gt['images'][gt['annotations'][size.index(max(size))]['image_id']]['file_name']
min_size_image = gt['images'][gt['annotations'][size.index(min(size))]['image_id']]['file_name']
print("minimum area {} at {}".format(np.power(2, sort_size[0]), min_size_image))
print("maximum area {} at {}".format(np.power(2, sort_size[-1]), max_size_image))
print("minimum ratio {} at {}".format(sort_ratio[0], min_ratio_image))
print("maximum ratio {} at {}".format(sort_ratio[-1], max_ratio_image))
return (ratio, ratio_n, ratio_bins), (size, size_n, size_bins)
def calculate_iou_histogram(dataset):
plt.style.use('seaborn')
with open(dataset) as f:
gt = json.load(f)
h_boxes, r_boxes = [], []
index = 0
for image_id in range(len(gt["images"])):
h_box, r_box = [], []
for i in range(index, len(gt['annotations'])):
ann = gt['annotations'][i]
if ann["image_id"] != image_id:
index = i
break
r_box.append(ann['bbox'])
cx, cy, w, h, angle = ann['bbox']
theta = angle / 180.0 * math.pi
c = math.cos(-theta)
s = math.sin(-theta)
rect = [(-w / 2, h / 2), (-w / 2, -h / 2), (w / 2, -h / 2), (w / 2, h / 2)]
rotated_rect = [(s * yy + c * xx + cx, c * yy - s * xx + cy) for (xx, yy) in rect]
rotated_rect = [item for sub in rotated_rect for item in sub]
xmin = min(rotated_rect[0::2])
xmax = max(rotated_rect[0::2])
ymax = max(rotated_rect[1::2])
ymin = min(rotated_rect[1::2])
h_box.append([xmin, ymin, xmax, ymax])
r_boxes.append(np.array(r_box))
h_boxes.append(np.array(h_box))
h_ious, r_ious = [], []
for h_box in h_boxes:
h_ious.append(iou_calculate(h_box, h_box))
for r_box in r_boxes:
r_ious.append(iou_rotate_calculate(r_box, r_box))
fig, (ax1, ax2) = plt.subplots(1, 2, constrained_layout=True, figsize = (12.0,8.0))
h_n, h_bins, _ = ax1.hist(np.hstack(tuple(h_iou.reshape(-1) for h_iou in h_ious)), bins = 32, range = (0.1, 0.9), alpha = 0.5)
ax1.set_title("Horizontal IoU histogram")
ax1.set_xticks(np.arange(0.1, 0.95, 0.05))
ax1.set_ylabel("Count")
ax1.set_xlabel("IoU")
ax1.set_xlim([0.1, 0.9])
r_n, r_bins, _ = ax2.hist(np.hstack(tuple(r_iou.reshape(-1) for r_iou in r_ious)), bins = 32, range = (0.1, 0.9), alpha = 0.5)
ax2.set_title("Rotated Bounding box IoU histogram")
ax2.set_xticks(np.arange(0.1, 0.95, 0.05))
ax2.set_xlabel("IoU")
ax2.set_xlim([0.1, 0.9])
plt.style.use('default')
return h_ious, r_ious
def iou_calculate(boxes1, boxes2):
area1 = (boxes1[:, 2] - boxes1[:, 0] + 1) * (boxes1[:, 3] - boxes1[:, 1] + 1)
area2 = (boxes2[:, 2] - boxes2[:, 0] + 1) * (boxes2[:, 3] - boxes2[:, 1] + 1)
ious = []
for i, box1 in enumerate(boxes1):
temp_ious = []
for j, box2 in enumerate(boxes2):
ixmin = np.maximum(box1[0], box2[0])
iymin = np.maximum(box1[1], box2[1])
ixmax = np.minimum(box1[2], box2[2])
iymax = np.minimum(box1[3], box2[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
int_area = iw * ih
inter = np.around(int_area * 1.0 / (area1[i] + area2[j] - int_area), decimals=5)
temp_ious.append(inter)
ious.append(temp_ious)
return np.array(ious, dtype=np.float32)
def iou_rotate_calculate(boxes1, boxes2):
area1 = boxes1[:, 2] * boxes1[:, 3]
area2 = boxes2[:, 2] * boxes2[:, 3]
ious = []
for i, box1 in enumerate(boxes1):
temp_ious = []
r1 = ((box1[0], box1[1]), (box1[2], box1[3]), box1[4])
for j, box2 in enumerate(boxes2):
r2 = ((box2[0], box2[1]), (box2[2], box2[3]), box2[4])
int_pts = cv2.rotatedRectangleIntersection(r1, r2)[1]
if int_pts is not None:
order_pts = cv2.convexHull(int_pts, returnPoints=True)
int_area = cv2.contourArea(order_pts)
inter = np.around(int_area * 1.0 / (area1[i] + area2[j] - int_area), decimals=5)
temp_ious.append(inter)
else:
temp_ious.append(0.0)
ious.append(temp_ious)
return np.array(ious, dtype=np.float32)
def make_anchor(input_imgsize = 512):
anchor_list = []
resolution = [(input_imgsize / (2 ** i) , input_imgsize / (2 ** i)) for i in range(3,7)]
for i, r in enumerate(resolution):
featuremap_height, featuremap_width = r
stride = cfgs.ANCHOR_STRIDE[i]
tmp_anchors = generate_anchors.generate_anchors_pre(featuremap_height, featuremap_width, stride,
np.array(cfgs.ANCHOR_SCALES) * stride, cfgs.ANCHOR_RATIOS, 4.0)
anchor_list.append(tmp_anchors)
anchors = np.concatenate(anchor_list, axis=0)
return anchors
def anchor_target_layer(gt_boxes_h, anchors):
anchor_states = np.zeros((anchors.shape[0],))
labels = np.zeros((anchors.shape[0], cfgs.CLASS_NUM))
overlaps = iou_calculate(np.ascontiguousarray(anchors, dtype=np.float),
np.ascontiguousarray(gt_boxes_h, dtype=np.float))
argmax_overlaps_inds = np.argmax(overlaps, axis=1)
max_overlaps = overlaps[np.arange(overlaps.shape[0]), argmax_overlaps_inds]
target_boxes = gt_boxes_h[argmax_overlaps_inds]
positive_indices = max_overlaps >= cfgs.IOU_POSITIVE_THRESHOLD
ignore_indices = (max_overlaps > cfgs.IOU_NEGATIVE_THRESHOLD) & ~positive_indices
labels[positive_indices, target_boxes[positive_indices, -1].astype(int) - 1] = 1
anchor_states[ignore_indices] = -1
anchor_states[positive_indices] = 1
return anchor_states
def calculate_positive_horizontal_anchors(input_imgsize = 512):
anchors = make_anchor(input_imgsize = 512)
with open('train/train.json') as f:
gt = json.load(f)
index = 0
for image_id in range(len(gt["images"])):
h_box = []
for i in range(index, len(gt['annotations'])):
ann = gt['annotations'][i]
if ann["image_id"] != image_id:
index = i
break
cx, cy, w, h, angle = ann['bbox']
theta = angle / 180.0 * math.pi
c = math.cos(-theta)
s = math.sin(-theta)
rect = [(-w / 2, h / 2), (-w / 2, -h / 2), (w / 2, -h / 2), (w / 2, h / 2)]
rotated_rect = [(s * yy + c * xx + cx, c * yy - s * xx + cy) for (xx, yy) in rect]
rotated_rect = [item / 1000 * input_imgsize for sub in rotated_rect for item in sub]
xmin = min(rotated_rect[0::2])
xmax = max(rotated_rect[0::2])
ymax = max(rotated_rect[1::2])
ymin = min(rotated_rect[1::2])
h_box.append([xmin, ymin, xmax, ymax, ann['category_id'] + 1])
anchor_states = anchor_target_layer(np.array(h_box), anchors)
if __name__ == "__main__":
dataset = 'train/train.json'
input_imgsize = 512
# calculate_instance_histogram(dataset)
# ratio, size = calculate_horizontal_boxes_histogram(dataset, input_imgsize)
# h_ious, r_ious = calculate_iou_histogram(dataset)
calculate_positive_horizontal_anchors(input_imgsize = 512) | 11,201 | 0 | 200 |
4bb7c5fd441e34b83a65ffc0a3f7886077f1c164 | 2,201 | py | Python | tests/test_transformation.py | utiasASRL/pylgmath | b392f9960c2b12758bd05a639966f161240282cb | [
"BSD-3-Clause"
] | 3 | 2021-11-11T17:54:35.000Z | 2021-12-09T01:44:16.000Z | tests/test_transformation.py | utiasASRL/pylgmath | b392f9960c2b12758bd05a639966f161240282cb | [
"BSD-3-Clause"
] | null | null | null | tests/test_transformation.py | utiasASRL/pylgmath | b392f9960c2b12758bd05a639966f161240282cb | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import numpy.linalg as npla
from pylgmath import se3op, Transformation
TEST_SIZE = 10000
| 34.936508 | 79 | 0.738301 | import numpy as np
import numpy.linalg as npla
from pylgmath import se3op, Transformation
TEST_SIZE = 10000
def test_constructor():
# generate random transform from the most basic constructor for testing
xi_ab_rand = np.random.uniform(-np.pi / 2, np.pi / 2, size=(TEST_SIZE, 6, 1))
T_ba_rand = se3op.vec2tran(xi_ab_rand)
transformation_rand = Transformation(T_ba=T_ba_rand)
# default constructor
test = Transformation()
assert np.allclose(test.matrix(), np.eye(4))
# copy constructor
test = Transformation(transformation=transformation_rand)
assert np.allclose(test.matrix(), transformation_rand.matrix())
# construct from invalid C_ba with reprojection (ones to identity)
T_ba_project = T_ba_invalid = np.copy(T_ba_rand[0])
T_ba_invalid[:3, :3] = np.ones((3, 3))
T_ba_project[:3, :3] = np.eye(3)
test = Transformation(T_ba=T_ba_invalid)
assert np.allclose(test.matrix(), T_ba_project)
# construct from se3 algebra vec (analytical)
test = Transformation(xi_ab=xi_ab_rand)
assert np.allclose(test.matrix(), transformation_rand.matrix())
# construct from se3 algebra vec (numerical)
test = Transformation(xi_ab=xi_ab_rand, num_terms=20)
assert np.allclose(test.matrix(), transformation_rand.matrix(), atol=1e-6)
def test_se3algebra():
# generate random transform from the most basic constructor for testing
xi_ab_rand = np.random.uniform(-np.pi / 2, np.pi / 2, size=(TEST_SIZE, 6, 1))
# construct from axis-angle and then call .vec() to get se3 algebra vector.
test = Transformation(xi_ab=xi_ab_rand)
assert np.allclose(test.vec(), xi_ab_rand)
def test_inverse():
# generate random transform from the most basic constructor for testing
xi_ab_rand = np.random.uniform(-np.pi / 2, np.pi / 2, size=(TEST_SIZE, 6, 1))
T_ba_rand = se3op.vec2tran(xi_ab_rand)
# transformations to be tested
test = Transformation(xi_ab=xi_ab_rand)
test_inv = test.inverse()
# compare to basic matrix inverse
assert np.allclose(test_inv.matrix(), npla.inv(T_ba_rand))
# product of inverse and self make identity
assert np.allclose(test.matrix() @ test_inv.matrix(), np.eye(4))
assert np.allclose((test * test_inv).matrix(), np.eye(4))
| 2,019 | 0 | 69 |
21a3b3b04547ce37b4587ea04edda977f9f04967 | 371 | py | Python | models/amenity.py | devephy/AirBnB_clone_v2 | b9f0ba65d76f730c0b2ef98b10764424af426570 | [
"Apache-2.0"
] | null | null | null | models/amenity.py | devephy/AirBnB_clone_v2 | b9f0ba65d76f730c0b2ef98b10764424af426570 | [
"Apache-2.0"
] | null | null | null | models/amenity.py | devephy/AirBnB_clone_v2 | b9f0ba65d76f730c0b2ef98b10764424af426570 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
""" State Module for HBNB project """
from models.base_model import BaseModel, Base
from models import storage_type
from sqlalchemy import Column, String
class Amenity(BaseModel, Base):
'''amenity class'''
__tablename__ = 'amenities'
if storage_type == 'db':
name = Column(String(128), nullable=False)
else:
name = ""
| 24.733333 | 50 | 0.679245 | #!/usr/bin/python3
""" State Module for HBNB project """
from models.base_model import BaseModel, Base
from models import storage_type
from sqlalchemy import Column, String
class Amenity(BaseModel, Base):
'''amenity class'''
__tablename__ = 'amenities'
if storage_type == 'db':
name = Column(String(128), nullable=False)
else:
name = ""
| 0 | 0 | 0 |
6779f08310ecfb594397a5556c79c574b5dbfa7e | 464 | py | Python | setup.py | iashwash/statuspage-py | 55390e790a93232e3c33b2ae1c56648e26e72ea0 | [
"Apache-2.0"
] | 1 | 2019-01-03T21:05:36.000Z | 2019-01-03T21:05:36.000Z | setup.py | iashwash/statuspage-py | 55390e790a93232e3c33b2ae1c56648e26e72ea0 | [
"Apache-2.0"
] | null | null | null | setup.py | iashwash/statuspage-py | 55390e790a93232e3c33b2ae1c56648e26e72ea0 | [
"Apache-2.0"
] | null | null | null | from statuspage import __version__
from setuptools import setup, find_packages
setup(
name='statuspage',
version=__version__,
description='Python library for Statuspage.io APIs',
author='Kunal Lillaney',
author_email='lillaney@jhu.edu',
url='https://github.io/kunallillaney/statuspage-py',
license='Apache2.0',
packages=find_packages(exclude=('tests')),
setup_requires=[
],
install_requires=[
'requests'
],
)
| 24.421053 | 56 | 0.69181 | from statuspage import __version__
from setuptools import setup, find_packages
setup(
name='statuspage',
version=__version__,
description='Python library for Statuspage.io APIs',
author='Kunal Lillaney',
author_email='lillaney@jhu.edu',
url='https://github.io/kunallillaney/statuspage-py',
license='Apache2.0',
packages=find_packages(exclude=('tests')),
setup_requires=[
],
install_requires=[
'requests'
],
)
| 0 | 0 | 0 |
2b6e4feb528e794151fe0780d3fd084635cc36c5 | 879 | py | Python | ubivar/test/test_integration.py | oriskami/oriskami-python | 2b0d81f713a9149977907183c67eec136d49ee8c | [
"MIT"
] | 4 | 2017-05-28T19:37:31.000Z | 2017-06-13T11:34:26.000Z | ubivar/test/test_integration.py | ubivar/ubivar-python | 2b0d81f713a9149977907183c67eec136d49ee8c | [
"MIT"
] | null | null | null | ubivar/test/test_integration.py | ubivar/ubivar-python | 2b0d81f713a9149977907183c67eec136d49ee8c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import sys
import unittest2
import ubivar
from mock import patch
from ubivar.test.helper import (UbivarTestCase, NOW)
if __name__ == '__main__':
unittest2.main()
| 23.131579 | 57 | 0.704209 | # -*- coding: utf-8 -*-
import os
import sys
import unittest2
import ubivar
from mock import patch
from ubivar.test.helper import (UbivarTestCase, NOW)
class FunctionalTests(UbivarTestCase):
request_client = ubivar.http_client.Urllib2Client
def setUp(self):
super(FunctionalTests, self).setUp()
def get_http_client(*args, **kwargs):
return self.request_client(*args, **kwargs)
self.client_patcher = patch(
'ubivar.http_client.new_default_http_client')
client_mock = self.client_patcher.start()
client_mock.side_effect = get_http_client
def tearDown(self):
super(FunctionalTests, self).tearDown()
self.client_patcher.stop()
class RequestsFunctionalTests(FunctionalTests):
request_client = ubivar.http_client.RequestsClient
if __name__ == '__main__':
unittest2.main()
| 422 | 206 | 46 |
01f5f713c9f5b6622ee771d1a280ade07ea7739f | 886 | py | Python | care/facility/migrations/0177_auto_20200916_1448.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 189 | 2020-03-17T17:18:58.000Z | 2022-02-22T09:49:45.000Z | care/facility/migrations/0177_auto_20200916_1448.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 598 | 2020-03-19T21:22:09.000Z | 2022-03-30T05:08:37.000Z | care/facility/migrations/0177_auto_20200916_1448.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 159 | 2020-03-19T18:45:56.000Z | 2022-03-17T13:23:12.000Z | # Generated by Django 2.2.11 on 2020-09-16 09:18
import django.core.validators
from django.db import migrations, models
| 35.44 | 292 | 0.646727 | # Generated by Django 2.2.11 on 2020-09-16 09:18
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('facility', '0176_auto_20200916_1443'),
]
operations = [
migrations.AddField(
model_name='shiftingrequest',
name='refering_facility_contact_name',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='shiftingrequest',
name='refering_facility_contact_number',
field=models.CharField(blank=True, default='', max_length=14, validators=[django.core.validators.RegexValidator(code='invalid_mobile', message='Please Enter 10/11 digit mobile number or landline as 0<std code><phone number>', regex='^((\\+91|91|0)[\\- ]{0,1})?[456789]\\d{9}$')]),
),
]
| 0 | 741 | 23 |
dfe9de1f9ccfa8f57dc69fcaabfc66ea2875865f | 640 | py | Python | webapp/api/helpers/middlewares.py | sangwonl/stage34 | 03b0b25ba843a781d059c33b4df9824636876853 | [
"MIT"
] | 9 | 2016-08-01T05:13:59.000Z | 2020-05-12T04:35:13.000Z | webapp/api/helpers/middlewares.py | sangwonl/stage34 | 03b0b25ba843a781d059c33b4df9824636876853 | [
"MIT"
] | 38 | 2016-08-14T01:07:14.000Z | 2021-06-10T21:05:25.000Z | webapp/api/helpers/middlewares.py | sangwonl/stage34 | 03b0b25ba843a781d059c33b4df9824636876853 | [
"MIT"
] | 3 | 2016-11-24T00:05:56.000Z | 2018-11-28T04:34:25.000Z | from django.contrib import auth
from django.contrib.auth.middleware import get_user
from django.utils.functional import SimpleLazyObject
| 42.666667 | 77 | 0.6875 | from django.contrib import auth
from django.contrib.auth.middleware import get_user
from django.utils.functional import SimpleLazyObject
class JWTAuthenticationMiddleware(object):
def process_request(self, request):
if not hasattr(request, 'user') or not request.user.is_authenticated:
auth_bearer = request.META.get('HTTP_AUTHORIZATION')
if auth_bearer and 'token' in auth_bearer:
token = auth_bearer.replace('token', '').strip()
request.user = auth.authenticate(token=token)
else:
request.user = SimpleLazyObject(lambda: get_user(request))
| 432 | 21 | 49 |
3c60d36951e95b1086addb67c64aa03ebf5cf268 | 3,973 | py | Python | data/models/player.py | noaPVC/Ninjobi | 2aa685a8933990760c19d9d0b2b838b8c1c5e14d | [
"MIT"
] | null | null | null | data/models/player.py | noaPVC/Ninjobi | 2aa685a8933990760c19d9d0b2b838b8c1c5e14d | [
"MIT"
] | null | null | null | data/models/player.py | noaPVC/Ninjobi | 2aa685a8933990760c19d9d0b2b838b8c1c5e14d | [
"MIT"
] | null | null | null | import pygame
from data.constants import *
from data.core import animation_asset_loader
# check for any collisions
| 37.130841 | 92 | 0.575132 | import pygame
from data.constants import *
from data.core import animation_asset_loader
class Player(pygame.sprite.Sprite):
def __init__(self, pos):
super().__init__()
self.animation_sprites = animation_asset_loader('data/assets/characters/ninja')
self.animation_frame_index = 0
self.is_flipped = False
self.image = self.animation_sprites['idle'][self.animation_frame_index]
self.rect = self.image.get_rect(topleft = pos)
self.gravity = 0
self.movement_value = 5
self.jump_value = 20
self.movement = [0,0]
self.moving_right = False
self.moving_left = False
self.moving_up = False
self.block_movement = False
self.collisions_on = {'top': False, 'bottom': False, 'right': False, 'left': False}
def update_gravity(self):
self.movement = [0, 0]
if not self.block_movement:
if self.moving_right:
self.movement[0] += self.movement_value
if self.moving_left:
self.movement[0] -= self.movement_value
else:
if self.moving_right:
self.movement[0] += 0.1
if self.moving_left:
self.movement[0] -= 0.1
if self.moving_up: self.gravity -= self.jump_value
self.gravity += 1
if self.gravity > 23: self.gravity = 23
self.movement[1] += self.gravity
def perform_animation(self, type, duration_speed):
if self.animation_frame_index < len(self.animation_sprites[type]):
image = self.animation_sprites[type][int(self.animation_frame_index)]
if self.is_flipped: self.image = pygame.transform.flip(image, True, False)
else: self.image = self.image = image
self.animation_frame_index += duration_speed
else: self.animation_frame_index = 0
def key_input(self):
keys = pygame.key.get_pressed()
self.perform_animation('idle', 0.15)
if keys[pygame.K_LEFT]:
self.moving_left = True
self.is_flipped = True
else: self.moving_left = False
if keys[pygame.K_RIGHT]:
self.moving_right = True
self.is_flipped = False
else: self.moving_right = False
if keys[pygame.K_UP] and self.collisions_on['bottom']: self.moving_up = True
else: self.moving_up = False
# check for any collisions
def get_collisions(self, tiles):
collisions = []
for tile in tiles:
if self.rect.colliderect(tile):
collisions.append(tile)
return collisions
def move(self, tiles):
self.collisions_on = {'top': False, 'bottom': False, 'right': False, 'left': False}
# x axis
if round(self.movement[0]) != 0: self.rect.x += self.movement[0]
collisions = self.get_collisions(tiles)
for tile in collisions:
if self.movement[0] > 0:
self.rect.right = tile.left
self.collisions_on['right'] = True
elif self.movement[0] < 0:
self.rect.left = tile.right
self.collisions_on['left'] = True
# y axis
self.rect.y += self.movement[1]
collisions = self.get_collisions(tiles)
for tile in collisions:
if self.movement[1] > 0:
self.rect.bottom = tile.top
self.collisions_on['bottom'] = True
elif self.movement[1] < 0:
self.rect.top = tile.bottom
self.collisions_on['top'] = True
return self.collisions_on
def update(self, tiles):
self.key_input()
if self.collisions_on['bottom'] or self.collisions_on['top']: self.gravity = 0
self.collisions_on = self.move(tiles)
self.update_gravity() | 3,604 | 14 | 231 |
d07bc00d9745b69eaaf99279456d8c46b4cc45df | 559 | py | Python | src/tf/metrics/metrics.py | cgalaz01/mnms2_challenge | f61679a699819f0f9f8339d1c4046098a4d55aa1 | [
"Apache-2.0"
] | null | null | null | src/tf/metrics/metrics.py | cgalaz01/mnms2_challenge | f61679a699819f0f9f8339d1c4046098a4d55aa1 | [
"Apache-2.0"
] | null | null | null | src/tf/metrics/metrics.py | cgalaz01/mnms2_challenge | f61679a699819f0f9f8339d1c4046098a4d55aa1 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
from tensorflow.keras import backend as K
@tf.autograph.experimental.do_not_convert
| 25.409091 | 89 | 0.669052 | import tensorflow as tf
from tensorflow.keras import backend as K
@tf.autograph.experimental.do_not_convert
def soft_dice(y_true, y_pred):
epsilon = 1e-6
y_true = tf.cast(y_true, dtype=tf.float32)
y_pred = tf.cast(y_pred, dtype=tf.float32)
# Expected y_pred to be 'logits'
y_pred = tf.sigmoid(y_pred)
y_true = K.flatten(y_true)
y_pred = K.flatten(y_pred)
intersection = K.sum(y_true * y_pred)
dice_coef = (2. * intersection + epsilon) / (K.sum(y_true) + K.sum(y_pred) + epsilon)
return dice_coef
| 422 | 0 | 22 |
a9e45c93c57afe519d4227c00887dce9200f4d06 | 133 | py | Python | dynamic_watershed/__init__.py | PeterJackNaylor/dynamic_watershed | 37ac2dcc85dbcbefdd092ee3912a9888ae1269af | [
"MIT"
] | 11 | 2018-09-03T11:39:33.000Z | 2022-02-01T09:03:35.000Z | dynamic_watershed/__init__.py | PeterJackNaylor/dynamic_watershed | 37ac2dcc85dbcbefdd092ee3912a9888ae1269af | [
"MIT"
] | null | null | null | dynamic_watershed/__init__.py | PeterJackNaylor/dynamic_watershed | 37ac2dcc85dbcbefdd092ee3912a9888ae1269af | [
"MIT"
] | null | null | null | """ dynamic_watershed/__init__.py """
# __all__ = []
from .dynamic_watershed import post_process
from .version import __version__
| 16.625 | 43 | 0.759398 | """ dynamic_watershed/__init__.py """
# __all__ = []
from .dynamic_watershed import post_process
from .version import __version__
| 0 | 0 | 0 |
705e5afc9b357faa79c77b95cc4c2b90b2ead2c4 | 215 | py | Python | env/lib/python3.5/site-packages/pylint/test/functional/not_async_context_manager_py37.py | Udolf15/recommedMeMovies | be5ae74acd98e3f93beaaa5bb55623974fb24247 | [
"MIT"
] | 33 | 2019-08-04T01:48:11.000Z | 2022-03-20T13:53:42.000Z | env/lib/python3.5/site-packages/pylint/test/functional/not_async_context_manager_py37.py | Udolf15/recommedMeMovies | be5ae74acd98e3f93beaaa5bb55623974fb24247 | [
"MIT"
] | 16 | 2020-02-12T00:28:11.000Z | 2022-03-11T23:48:19.000Z | env/lib/python3.5/site-packages/pylint/test/functional/not_async_context_manager_py37.py | Udolf15/recommedMeMovies | be5ae74acd98e3f93beaaa5bb55623974fb24247 | [
"MIT"
] | 12 | 2019-08-12T07:59:38.000Z | 2022-03-24T08:09:40.000Z | # pylint: disable=missing-docstring
from contextlib import asynccontextmanager
@asynccontextmanager
async with context_manager(42) as ans:
assert ans == 42
| 16.538462 | 42 | 0.776744 | # pylint: disable=missing-docstring
from contextlib import asynccontextmanager
@asynccontextmanager
async def context_manager(value):
yield value
async with context_manager(42) as ans:
assert ans == 42
| 28 | 0 | 22 |
0acc7992c6a4ebe8570e0359ddb4646be53431d5 | 2,445 | py | Python | RottenTomatoes/rt/rt/pipelines.py | hovhannest/TMScrappers | b9218bead4450931359b6827f3caf9278fed17b2 | [
"MIT"
] | null | null | null | RottenTomatoes/rt/rt/pipelines.py | hovhannest/TMScrappers | b9218bead4450931359b6827f3caf9278fed17b2 | [
"MIT"
] | null | null | null | RottenTomatoes/rt/rt/pipelines.py | hovhannest/TMScrappers | b9218bead4450931359b6827f3caf9278fed17b2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import csv
from scrapy import signals
from scrapy.exporters import CsvItemExporter
import inspect
from rt.items import *
from rt.mssqlpipeline import MsSqlPipeline
| 34.928571 | 93 | 0.639264 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import csv
from scrapy import signals
from scrapy.exporters import CsvItemExporter
import inspect
from rt.items import *
from rt.mssqlpipeline import MsSqlPipeline
class RtPipeline(object):
def __init__(self):
self.files = {}
@classmethod
def from_crawler(cls, crawler):
pipeline = cls()
crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
return pipeline
def spider_opened(self, spider):
self.filesList = ['%s_movies.csv', '%s_person.csv', '%s_items.csv']
fl = []
for filename in self.filesList:
fl.append(open(filename % spider.name, 'wb'))
self.files[spider] = fl
self.exporterMovie = CsvItemExporter(fl[0])
self.exporterMovie.start_exporting()
self.exporterPerson = CsvItemExporter(fl[1])
self.exporterPerson.start_exporting()
self.exporter = CsvItemExporter(fl[-1])
self.exporter.start_exporting()
def spider_closed(self, spider):
self.exporterMovie.finish_exporting()
self.exporterPerson.finish_exporting()
self.exporter.finish_exporting()
files = self.files.pop(spider)
for file in files:
file.close()
for filename in self.filesList:
# given I am using Windows i need to elimate the blank lines in the csv file
print("Starting csv blank line cleaning")
with open(filename % spider.name, 'r', encoding="utf8") as f:
reader = csv.reader(f)
original_list = list(reader)
cleaned_list = list(filter(None, original_list))
with open(filename % (spider.name + "_cleaned"), 'w', newline='') as output_file:
wr = csv.writer(output_file, dialect='excel')
for data in cleaned_list:
wr.writerow(data)
def process_item(self, item, spider):
if isinstance(item, MovieItem):
self.exporterMovie.export_item(item)
elif isinstance(item, Person):
self.exporterPerson.export_item(item)
else:
self.exporter.export_item(item)
return item
| 1,911 | 155 | 23 |
62ecc636d39a75a2ad17adc663a12642b740033b | 1,419 | py | Python | tictactoe_project/tests/board_test.py | agryman/sean | 11baf69c6eb9308266126bf9c8b1c67c6fd33afc | [
"MIT"
] | 1 | 2020-03-28T18:17:52.000Z | 2020-03-28T18:17:52.000Z | tictactoe_project/tests/board_test.py | agryman/sean | 11baf69c6eb9308266126bf9c8b1c67c6fd33afc | [
"MIT"
] | 1 | 2022-01-21T21:33:00.000Z | 2022-01-21T21:33:00.000Z | tictactoe_project/tests/board_test.py | agryman/sean | 11baf69c6eb9308266126bf9c8b1c67c6fd33afc | [
"MIT"
] | null | null | null | """This module tests the board module."""
import pytest
from tictactoe.player import Player
from tictactoe.row import Row
from tictactoe.board import Board, cell_indices
| 30.191489 | 63 | 0.617336 | """This module tests the board module."""
import pytest
from tictactoe.player import Player
from tictactoe.row import Row
from tictactoe.board import Board, cell_indices
class TestCellIndices:
def test_fail(self):
with pytest.raises(ValueError):
cell_indices(0)
with pytest.raises(ValueError):
cell_indices(10)
def test_ok(self):
assert cell_indices(1) == (0, 0)
assert cell_indices(2) == (0, 1)
assert cell_indices(5) == (1, 1)
assert cell_indices(7) == (2, 0)
assert cell_indices(9) == (2, 2)
class TestPlayerCount:
def test_empty(self):
board: Board = Board()
assert board.player_count(Player.A) == 0
assert board.player_count(Player.B) == 0
def test_ABA(self):
board: Board = Board(Row(Player.A, Player.B, Player.A))
assert board.player_count(Player.A) == 2
assert board.player_count(Player.B) == 1
class TestWhoMoves:
def test_empty(self):
board: Board = Board()
assert board.who_moves() == Player.A
def test_A_1(self):
row: Row = Row(Player.A)
board: Board = Board(row)
assert board.who_moves() == Player.B
def test_BA_89(self):
empty_row: Row = Row()
row: Row = Row(None, Player.B, Player.A)
board: Board = Board(empty_row, empty_row, row)
assert board.who_moves() == Player.A
| 994 | 0 | 255 |
7b2b4d0056e6419748a5af643eb0d2d7b535b0ad | 351 | py | Python | sandbox/app/order/models.py | fourdigits/django-oscar-mollie | 1d182bf1bcfc6378511b4315c5b2dcb8f42e94a8 | [
"BSD-2-Clause"
] | null | null | null | sandbox/app/order/models.py | fourdigits/django-oscar-mollie | 1d182bf1bcfc6378511b4315c5b2dcb8f42e94a8 | [
"BSD-2-Clause"
] | null | null | null | sandbox/app/order/models.py | fourdigits/django-oscar-mollie | 1d182bf1bcfc6378511b4315c5b2dcb8f42e94a8 | [
"BSD-2-Clause"
] | 1 | 2020-10-27T10:12:01.000Z | 2020-10-27T10:12:01.000Z | from django.conf import settings
from oscar.apps.order.abstract_models import AbstractOrder
from oscar.apps.order.models import *
| 23.4 | 61 | 0.769231 | from django.conf import settings
from oscar.apps.order.abstract_models import AbstractOrder
class Order(AbstractOrder):
def is_open_payment(self):
return self.status == settings.ORDER_PENDING_STATUS
def is_cancelled_order(self):
return self.status == settings.ORDER_CANCELLED_STATUS
from oscar.apps.order.models import *
| 135 | 6 | 76 |
e809129ade419fef97e899f2906ecc9e7c295aa6 | 12,480 | py | Python | src/kafka_rest_client/client.py | bozzzzo/kafka-rest-client | ce3a46f18e57b4a920ebce7d8ad421741be669f4 | [
"Apache-2.0"
] | null | null | null | src/kafka_rest_client/client.py | bozzzzo/kafka-rest-client | ce3a46f18e57b4a920ebce7d8ad421741be669f4 | [
"Apache-2.0"
] | null | null | null | src/kafka_rest_client/client.py | bozzzzo/kafka-rest-client | ce3a46f18e57b4a920ebce7d8ad421741be669f4 | [
"Apache-2.0"
] | 1 | 2021-08-30T13:56:53.000Z | 2021-08-30T13:56:53.000Z | import requests
import urllib
import uuid
import importlib_metadata
import json
import base64
import logging
from collections import namedtuple, defaultdict, ChainMap
from typing import List
__all__ = [
'KafkaRestClient', 'KafkaRestClientException',
'TopicPartition', 'KafkaMessage'
]
log = logging.getLogger(name="kafka-rest-client")
__version__ = importlib_metadata.version('kafka-rest-client')
USER_AGENT = f"kafka-rest-client/{__version__}"
TopicPartition = namedtuple('TopicPartition', "topic, partition")
KafkaMessage = namedtuple("KafkaMessage",
["topic", "partition", "offset", "key", "value"])
class KafkaRestClient:
"""a client for kafka-rest proxy
"""
def __init__(self, *topics: str,
server: str = "http://localhost:8082",
group_id: str = "",
fetch_max_bytes: int = 52428800,
fetch_max_wait_ms: int = 500,
auto_offset_reset: str = "latest",
enable_auto_commit: bool = True,
max_poll_interval_ms: int = 300000,
format: str = "binary",
stop_at_end = False):
"""
"""
self._server = server
self._group_id = group_id or f"kafka-rest-client-{uuid.uuid4()}"
self._fetch_max_bytes = fetch_max_bytes
self._fetch_max_wait_ms = fetch_max_wait_ms
valid_reset = ("earliest", "latest")
if auto_offset_reset not in valid_reset:
raise ValueError(f"auto_offset_reset not in "
f"{valid_reset}, got {auto_offset_reset}")
valid_format = ("json", "avro", "binary")
if format not in valid_format:
raise ValueError(f"format not in "
f"{valid_format}, got {format}")
self._format = format
if self._format == "binary":
self._decode = lambda x: (base64.b64decode(x)
if x is not None
else None)
else:
self._decode = lambda x: x
self._auto_offset_reset = auto_offset_reset
if enable_auto_commit:
raise RuntimeError("autocommit is not implemented yet")
self._enable_auto_commit = enable_auto_commit
self._max_poll_interval_ms = max_poll_interval_ms
self._content_type = f"application/vnd.kafka.v2+json"
self._accept = (f"application/vnd.kafka.{self._format}.v2+json,"
f" {self._content_type}")
if topics:
self.subscribe(topics=topics)
self._observed_offsets = {}
self._returned_offsets = {}
self._stop_at_end = stop_at_end
self._seek_offsets = {}
self._current_offsets = ChainMap(self._observed_offsets, self._seek_offsets)
_consumer = None
@property
| 37.253731 | 93 | 0.565865 | import requests
import urllib
import uuid
import importlib_metadata
import json
import base64
import logging
from collections import namedtuple, defaultdict, ChainMap
from typing import List
__all__ = [
'KafkaRestClient', 'KafkaRestClientException',
'TopicPartition', 'KafkaMessage'
]
log = logging.getLogger(name="kafka-rest-client")
__version__ = importlib_metadata.version('kafka-rest-client')
USER_AGENT = f"kafka-rest-client/{__version__}"
TopicPartition = namedtuple('TopicPartition', "topic, partition")
KafkaMessage = namedtuple("KafkaMessage",
["topic", "partition", "offset", "key", "value"])
class KafkaRestClientException(Exception):
def __init__(self, message, *, error_code, http_code, http_message):
super().__init__(message)
self.error_code = error_code
self.http_code = http_code
self.http_message = http_message
def __repr__(self):
return (f"{self.message} ({self.error_code})."
f" HTTP status {self.http_code} {self.http_message}")
class KafkaRestClient:
"""a client for kafka-rest proxy
"""
def __init__(self, *topics: str,
server: str = "http://localhost:8082",
group_id: str = "",
fetch_max_bytes: int = 52428800,
fetch_max_wait_ms: int = 500,
auto_offset_reset: str = "latest",
enable_auto_commit: bool = True,
max_poll_interval_ms: int = 300000,
format: str = "binary",
stop_at_end = False):
"""
"""
self._server = server
self._group_id = group_id or f"kafka-rest-client-{uuid.uuid4()}"
self._fetch_max_bytes = fetch_max_bytes
self._fetch_max_wait_ms = fetch_max_wait_ms
valid_reset = ("earliest", "latest")
if auto_offset_reset not in valid_reset:
raise ValueError(f"auto_offset_reset not in "
f"{valid_reset}, got {auto_offset_reset}")
valid_format = ("json", "avro", "binary")
if format not in valid_format:
raise ValueError(f"format not in "
f"{valid_format}, got {format}")
self._format = format
if self._format == "binary":
self._decode = lambda x: (base64.b64decode(x)
if x is not None
else None)
else:
self._decode = lambda x: x
self._auto_offset_reset = auto_offset_reset
if enable_auto_commit:
raise RuntimeError("autocommit is not implemented yet")
self._enable_auto_commit = enable_auto_commit
self._max_poll_interval_ms = max_poll_interval_ms
self._content_type = f"application/vnd.kafka.v2+json"
self._accept = (f"application/vnd.kafka.{self._format}.v2+json,"
f" {self._content_type}")
if topics:
self.subscribe(topics=topics)
self._observed_offsets = {}
self._returned_offsets = {}
self._stop_at_end = stop_at_end
self._seek_offsets = {}
self._current_offsets = ChainMap(self._observed_offsets, self._seek_offsets)
def topics(self) -> List[str]:
return self._get("topics")
_consumer = None
@property
def consumer(self):
if self._consumer is not None:
return self._consumer
rq = {
"format": self._format,
"auto.offset.reset": self._auto_offset_reset,
"auto.commit.enable": self._enable_auto_commit,
}
rs = self._post("consumers", self._group_id, data=rq)
self._consumer = self._normalize_url(rs.get("base_uri"))
self._instance_id = rs.get("instance_id")
return self._consumer
def close(self, autocommit=True):
if self._consumer is None:
return
if autocommit and self._enable_auto_commit:
self.commit(self._observed_offsets)
self._delete(self._consumer)
def commit(self, partitions):
raise RuntimeError("Not implemented yet")
def commited(self, position):
raise RuntimeError("Not implemented yet")
def subscribe(self, *, topics: List[str] = [], pattern: str = ""):
if all((topics, pattern)) or not any((topics, pattern)):
raise TypeError("Subscribe() requires topics or pattern")
if topics:
rq = dict(topics=topics)
else:
rq = dict(topic_pattern=pattern)
self._post(self.consumer, "subscription",
data=rq, validator=self._expect_no_content)
next(
self._poll_once(timeout_ms=10, max_records=2,
max_bytes=10000, update_offsets=False),
None)
def subscription(self):
rs = self._get(self.consumer, "subscription")
return set(rs.get("topics", []))
def unsubscribe(self):
self._delete(self.consumer, "subscription")
def partitions_for_topic(self, topic):
assert "/" not in topic
rs = self._get('topics', topic, 'partitions')
return set(p["partition"] for p in rs)
def beginning_offsets(self, partitions: List[TopicPartition]):
return dict(self._get_offsets(partitions, 'beginning_offset'))
def end_offsets(self, partitions: List[TopicPartition]):
return dict(self._get_offsets(partitions, 'end_offset'))
def _check_partitions(self, partitions):
if any(not isinstance(p, TopicPartition) for p in partitions):
raise TypeError("partitions must be list of TopicPartition")
def _get_offsets(self, partitions, which):
self._check_partitions(partitions)
for partition in partitions:
rs = self._get("topics", partition.topic,
"partitions", str(partition.partition),
"offsets")
yield partition, rs[which]
def seek(self, partition, offset):
if not isinstance(partition, TopicPartition):
raise TypeError("partition must be TopicPartition")
if not isinstance(offset, int):
raise TypeError("offset must be int")
rq = {"offsets": [{
"topic": partition.topic,
"partition": partition.partition,
"offset": offset}]}
self._post(self.consumer, "positions",
data=rq, validator=self._expect_no_content)
self._seek_offsets[partition] = offset
def seek_to_beginning(self, *partitions):
self._seek(partitions, "beginning")
self._seek_offsets.update(self.beginning_offsets(partitions))
def seek_to_end(self, *partitions):
self._seek(partitions, "end")
self._seek_offsets.update(self.end_offsets(partitions))
def poll(self, *, timeout_ms: int = 0, max_records: int = None,
update_offsets: bool = True):
ro = self._returned_offsets
self._observed_offsets.update(ro)
ro.clear()
msgs = self._poll_once(timeout_ms=timeout_ms,
max_records=max_records,
update_offsets=update_offsets)
ret = defaultdict(list)
for tp, msg in msgs:
ret[tp].append(msg)
ro[tp] = msg.offset
return ret
def _poll_once(self, *, timeout_ms: int = 0,
max_records: int = None,
max_bytes: int = None,
update_offsets: bool = True):
rs = self._get(self.consumer, "records",
params={
"timeout": timeout_ms or self._fetch_max_wait_ms,
"max_bytes": max_bytes or self._fetch_max_bytes})
for r in rs:
msg = KafkaMessage(topic=r["topic"],
partition=r["partition"],
offset=r["offset"],
key=self._decode(r["key"]),
value=self._decode(r["value"]))
tp = TopicPartition(topic=msg.topic,
partition=msg.partition)
yield tp, msg
def __iter__(self):
oo = self._observed_offsets
topic_partitions = [TopicPartition(t, p)
for t in self.subscription()
for p in self.partitions_for_topic(t)]
beginnings = self.beginning_offsets(topic_partitions)
ends = self.end_offsets(topic_partitions)
active_partitions = ends.copy()
for tp in topic_partitions:
if tp not in self._current_offsets:
if self._auto_offset_reset == 'earliest':
self._seek_offsets[tp] = beginnings[tp]
elif self._auto_offset_reset == 'latest':
self._seek_offsets[tp] = ends[tp]
else:
raise TypeError(f"Unhandled auto_offset_reset {self._auto_offset_reset}")
curr = self._current_offsets[tp]
if curr + 1 > active_partitions[tp]:
active_partitions.pop(tp)
while active_partitions or not self._stop_at_end:
for tp, msg in self._poll_once():
yield msg
oo[tp] = msg.offset
end = ends.get(tp)
if msg.offset + 1 >= end:
active_partitions.pop(tp, None)
def _seek(self, partitions, where):
self._check_partitions(partitions)
rq = {"partitions": [{"topic": partition.topic,
"partition": partition.partition}
for partition in partitions]}
self._post(self.consumer, "positions", where,
data=rq, validator=self._expect_no_content)
def _url(self, *url):
return urllib.parse.urljoin(self._server, "/".join(url))
def _get(self, *url, params=None):
addr = self._url(*url)
log.info("GET %s", addr)
r = requests.get(addr, headers={
'user-agent': USER_AGENT,
'accept': self._accept,
}, params=params)
if r.status_code != requests.codes.ok:
self._raise_response_error(r)
return self._response(r)
def _normalize_url(self, *url):
addr = self._url(*url)
log.info("HEAD %s", addr)
r = requests.head(addr,
headers={
'user-agent': USER_AGENT,
},
allow_redirects=True)
return r.url
def _response(self, r):
ret = r.json()
if log.isEnabledFor(logging.DEBUG):
log.debug("Received %s", json.dumps(ret))
return ret
def _post(self, *url, data=None, validator=None):
if data is None:
assert TypeError("no data to post")
addr = self._url(*url)
headers = {
'user-agent': USER_AGENT,
'accept': self._accept,
'content-type': self._content_type,
}
jdata = json.dumps(data)
log.info("POST %s %s", addr, jdata)
r = requests.post(addr,
headers=headers,
data=jdata)
(validator or self._expect_ok)(r)
if r.status_code == requests.codes.no_content:
return None
return self._response(r)
def _delete(self, *url):
headers = {
'user-agent': USER_AGENT,
'accept': self._accept,
'content-type': self._content_type,
}
r = requests.delete(self._url(*url),
headers=headers)
self._expect_no_content(r)
def _expect_ok(self, r):
if r.status_code != requests.codes.ok:
self._raise_response_error(r)
def _expect_no_content(self, r):
if r.status_code != requests.codes.no_content:
self._raise_response_error(r)
def _raise_response_error(self, r):
try:
err = r.json()
except ValueError:
r.raise_for_status()
err = {}
exc = KafkaRestClientException(message=err.get("message"),
error_code=err.get("error_code"),
http_code=r.status_code,
http_message=r.reason)
raise exc
| 8,718 | 21 | 858 |
8e67b18377011ff3c5dd8ac86b01d0e2311f94b5 | 2,129 | py | Python | social/tests/test_board_details.py | Mangeneh/akkaskhooneh-backend | 2a81e73fbe0d55d5821ba1670a997bd8851c4af6 | [
"MIT"
] | 7 | 2018-09-17T18:34:49.000Z | 2019-09-15T11:39:15.000Z | social/tests/test_board_details.py | Mangeneh/akkaskhooneh-backend | 2a81e73fbe0d55d5821ba1670a997bd8851c4af6 | [
"MIT"
] | 9 | 2019-10-21T17:12:21.000Z | 2022-03-11T23:28:14.000Z | social/tests/test_board_details.py | Mangeneh/akkaskhooneh-backend | 2a81e73fbe0d55d5821ba1670a997bd8851c4af6 | [
"MIT"
] | 1 | 2019-11-29T16:12:12.000Z | 2019-11-29T16:12:12.000Z | from django.test import TestCase
from authentication.models import User
from social.models import Posts, Followers, Board
from rest_framework import status
| 44.354167 | 83 | 0.690465 | from django.test import TestCase
from authentication.models import User
from social.models import Posts, Followers, Board
from rest_framework import status
class FeedTest(TestCase):
def create(self, email, username, password):
user = User.objects.create(email=email, username=username, password='')
user.set_password(password)
user.save()
return user
def setUp(self):
self.password = 'sjkkensks'
self.user1 = self.create('t@t.com', 'test', self.password)
self.user2 = self.create('tt@tt.com', 'test2', self.password)
self.client.login(email=self.user1.email, password=self.password)
self.board = Board.objects.create(owner=self.user2, name='test')
def test_can_see_public_user_board(self):
response = self.client.get("/social/boardsdetails/"+str(self.board.id)+"/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 0)
def test_can_see_private_user_board(self):
self.user2.is_private = True
self.user2.save()
response = self.client.get("/social/boardsdetails/"+str(self.board.id)+"/")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_can_see_my_board(self):
board = Board.objects.create(owner=self.user1, name='test')
response = self.client.get("/social/boardsdetails/"+str(board.id)+"/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 0)
def test_can_see_my_private_board(self):
self.user1.is_private = True
self.user1.save()
board = Board.objects.create(owner=self.user1, name='test')
response = self.client.get("/social/boardsdetails/"+str(board.id)+"/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 0)
def test_bad_board_id(self):
response = self.client.get("/social/boardsdetails/232919/")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 1,758 | 4 | 211 |
56af300e8f4226b5d4e58e02ea559825aed78b3b | 4,417 | py | Python | simulator/delay.py | mmosko/ccnx-beginendfragment-sim | 092d080f92ef0dbbc93d2e8cd66236cd66e8cf47 | [
"BSD-2-Clause"
] | null | null | null | simulator/delay.py | mmosko/ccnx-beginendfragment-sim | 092d080f92ef0dbbc93d2e8cd66236cd66e8cf47 | [
"BSD-2-Clause"
] | null | null | null | simulator/delay.py | mmosko/ccnx-beginendfragment-sim | 092d080f92ef0dbbc93d2e8cd66236cd66e8cf47 | [
"BSD-2-Clause"
] | 1 | 2019-04-01T18:36:29.000Z | 2019-04-01T18:36:29.000Z | #
# Copyright (c) 2016, Xerox Corporation (Xerox) and Palo Alto Research Center, Inc (PARC)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL XEROX OR PARC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
#
# PATENT NOTICE
#
# This software is distributed under the BSD 2-clause License (see LICENSE
# file). This BSD License does not make any patent claims and as such, does
# not act as a patent grant. The purpose of this section is for each contributor
# to define their intentions with respect to intellectual property.
#
# Each contributor to this source code is encouraged to state their patent
# claims and licensing mechanisms for any contributions made. At the end of
# this section contributors may each make their own statements. Contributor's
# claims and grants only apply to the pieces (source code, programs, text,
# media, etc) that they have contributed directly to this software.
#
# There is no guarantee that this section is complete, up to date or accurate. It
# is up to the contributors to maintain their portion of this section and up to
# the user of the software to verify any claims herein.
#
# Do not remove this header notification. The contents of this section must be
# present in all distributions of the software. You may only modify your own
# intellectual property statements. Please provide contact information.
#
# - Palo Alto Research Center, Inc
# This software distribution does not grant any rights to patents owned by Palo
# Alto Research Center, Inc (PARC). Rights to these patents are available via
# various mechanisms. As of January 2016 PARC has committed to FRAND licensing any
# intellectual property used by its contributions to this software. You may
# contact PARC at cipo@parc.com for more information or visit http://www.ccnx.org
# Called to generate a delay value
import abc
import random
class ExponentialDelay(Delay):
"""
Generates a delay from an exponential distribution with the specified mean (1/lambda):
TODO: Should generate its own seed and keep its own RNG stream
"""
def __init__(self, min_delay, mean):
"""
:param min_delay: Added to the exponential sample
:param mean: the mean exponential delay (1 / lambda)
"""
super(ExponentialDelay, self).__init__()
if mean <= 0.0: raise ValueError("Mean must be positive, got {}".format(mean))
self._beta = mean
self._min = min_delay
class UniformDelay(Delay):
"""
TODO: Should generate its own seed and keep its own RNG stream
"""
Delay.register(ExponentialDelay)
Delay.register(UniformDelay)
| 38.408696 | 90 | 0.721304 | #
# Copyright (c) 2016, Xerox Corporation (Xerox) and Palo Alto Research Center, Inc (PARC)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL XEROX OR PARC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
#
# PATENT NOTICE
#
# This software is distributed under the BSD 2-clause License (see LICENSE
# file). This BSD License does not make any patent claims and as such, does
# not act as a patent grant. The purpose of this section is for each contributor
# to define their intentions with respect to intellectual property.
#
# Each contributor to this source code is encouraged to state their patent
# claims and licensing mechanisms for any contributions made. At the end of
# this section contributors may each make their own statements. Contributor's
# claims and grants only apply to the pieces (source code, programs, text,
# media, etc) that they have contributed directly to this software.
#
# There is no guarantee that this section is complete, up to date or accurate. It
# is up to the contributors to maintain their portion of this section and up to
# the user of the software to verify any claims herein.
#
# Do not remove this header notification. The contents of this section must be
# present in all distributions of the software. You may only modify your own
# intellectual property statements. Please provide contact information.
#
# - Palo Alto Research Center, Inc
# This software distribution does not grant any rights to patents owned by Palo
# Alto Research Center, Inc (PARC). Rights to these patents are available via
# various mechanisms. As of January 2016 PARC has committed to FRAND licensing any
# intellectual property used by its contributions to this software. You may
# contact PARC at cipo@parc.com for more information or visit http://www.ccnx.org
# Called to generate a delay value
import abc
import random
class Delay(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
pass
@abc.abstractmethod
def next(self):
"""
Generate the next delay time in seconds
:return: float (seconds)
"""
pass
class ExponentialDelay(Delay):
"""
Generates a delay from an exponential distribution with the specified mean (1/lambda):
TODO: Should generate its own seed and keep its own RNG stream
"""
def __init__(self, min_delay, mean):
"""
:param min_delay: Added to the exponential sample
:param mean: the mean exponential delay (1 / lambda)
"""
super(ExponentialDelay, self).__init__()
if mean <= 0.0: raise ValueError("Mean must be positive, got {}".format(mean))
self._beta = mean
self._min = min_delay
def next(self):
return random.expovariate(1/self._beta) + self._min
class UniformDelay(Delay):
"""
TODO: Should generate its own seed and keep its own RNG stream
"""
def __init__(self, lower, upper):
super(UniformDelay, self).__init__()
self._lower = lower
self._upper = upper
def next(self):
return random.uniform(self._lower, self._upper)
Delay.register(ExponentialDelay)
Delay.register(UniformDelay)
| 228 | 221 | 104 |
f481185e7f3c8609bd32c0aec80a2bc7662bd4d1 | 264 | py | Python | app/models/watcher.py | fabdarice/moby-dick-backend | ade0cc1d06cd69e02c3954a94be8e9befa18f46e | [
"MIT"
] | null | null | null | app/models/watcher.py | fabdarice/moby-dick-backend | ade0cc1d06cd69e02c3954a94be8e9befa18f46e | [
"MIT"
] | null | null | null | app/models/watcher.py | fabdarice/moby-dick-backend | ade0cc1d06cd69e02c3954a94be8e9befa18f46e | [
"MIT"
] | null | null | null | from sqlalchemy import Boolean, Column, String
from app.models.base import BaseModel
| 22 | 51 | 0.734848 | from sqlalchemy import Boolean, Column, String
from app.models.base import BaseModel
class WatcherModel(BaseModel):
__tablename__ = 'watchers'
address = Column(String(128), primary_key=True)
active = Column(Boolean)
alias = Column(String(128))
| 0 | 154 | 23 |
b213981a7fe2cee320079878f5d531a7068db054 | 1,427 | py | Python | core/internationalization.py | aaaimx/covid19-assistant-api | 48293d97991aa69fe9b01a5871be86e9c5e38057 | [
"MIT"
] | 1 | 2020-03-28T06:40:36.000Z | 2020-03-28T06:40:36.000Z | core/internationalization.py | aaaimx/covid19-assistant-api | 48293d97991aa69fe9b01a5871be86e9c5e38057 | [
"MIT"
] | 14 | 2020-03-24T20:59:53.000Z | 2021-12-13T20:36:13.000Z | core/internationalization.py | aaaimx/covid19-assistant-api | 48293d97991aa69fe9b01a5871be86e9c5e38057 | [
"MIT"
] | null | null | null |
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Configuración de formatos de fechas
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d-%m-%Y %H:%M:%S', # '2006-10-25 14:30:59'
'%d-%m-%Y %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%d-%m-%Y %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y', # '10/25/2006'
'%d/%m/%Y', # '10/25/2006'
'%d/%m/%y', # '10/25/06'
)
DATE_INPUT_FORMATS = (
'%d/%m/%Y', # '10/25/2006'
'%d/%m/%y', # '10/25/06'
'%d-%m-%Y', # '10-25-2006'
'%d-%m-%y', # '10-25-06'
)
| 34.804878 | 59 | 0.389629 |
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Configuración de formatos de fechas
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d-%m-%Y %H:%M:%S', # '2006-10-25 14:30:59'
'%d-%m-%Y %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%d-%m-%Y %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y', # '10/25/2006'
'%d/%m/%Y', # '10/25/2006'
'%d/%m/%y', # '10/25/06'
)
DATE_INPUT_FORMATS = (
'%d/%m/%Y', # '10/25/2006'
'%d/%m/%y', # '10/25/06'
'%d-%m-%Y', # '10-25-2006'
'%d-%m-%y', # '10-25-06'
)
| 0 | 0 | 0 |
703e82659ac9ad52df6beb31fc77492e3f62d5db | 3,633 | py | Python | split-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | split-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | split-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/python
from argparse import ArgumentParser
from pathlib import Path
import subprocess
from getkey import getkey
from pymdownx import keys
# from https://gist.github.com/martin-ueding/4007035
class Colorcodes(object):
"""
Provides ANSI terminal color codes which are gathered via the ``tput``
utility. That way, they are portable. If there occurs any error with
``tput``, all codes are initialized as an empty string.
The provides fields are listed below.
Control:
- bold
- reset
Colors:
- blue
- green
- orange
- red
:license: MIT
"""
_c = Colorcodes()
outfs = {}
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("Exiting safely")
for _, f in outfs.items():
f.close()
| 27.522727 | 82 | 0.561795 | #!/usr/bin/python
from argparse import ArgumentParser
from pathlib import Path
import subprocess
from getkey import getkey
from pymdownx import keys
# from https://gist.github.com/martin-ueding/4007035
class Colorcodes(object):
"""
Provides ANSI terminal color codes which are gathered via the ``tput``
utility. That way, they are portable. If there occurs any error with
``tput``, all codes are initialized as an empty string.
The provides fields are listed below.
Control:
- bold
- reset
Colors:
- blue
- green
- orange
- red
:license: MIT
"""
def __init__(self):
try:
self.bold = subprocess.check_output("tput bold".split()).decode()
self.reset = subprocess.check_output("tput sgr0".split()).decode()
self.blue = subprocess.check_output("tput setaf 4".split()).decode()
self.green = subprocess.check_output("tput setaf 2".split()).decode()
self.orange = subprocess.check_output("tput setaf 3".split()).decode()
self.red = subprocess.check_output("tput setaf 1".split()).decode()
except subprocess.CalledProcessError as e:
self.bold = ""
self.reset = ""
self.blue = ""
self.green = ""
self.orange = ""
self.red = ""
_c = Colorcodes()
outfs = {}
def candidate_names():
s = _c.bold + _c.orange + "Output to: "
keys = {"s": None}
for f in sorted(outfs):
for i in range(len(f)):
if f[i].lower() not in keys:
break
assert f[i].lower() not in keys
keys[f[i].lower()] = f
f = f[:i] + _c.green + f[i].upper() + _c.orange + f[i + 1 :]
s += f"{f} "
s = s.rstrip()
s += "? ("
s += _c.green + "S" + _c.orange + " to skip)"
s += _c.reset
return s, keys
def process_region(region, count):
print(f"{_c.orange}{_c.bold}Item {count}/{total}{_c.reset}")
print(region)
print("")
print(options_string)
choice = getkey()
while choice.lower() not in keys:
print("Incorrect input; try again")
choice = getkey()
if choice != "s":
f = outfs[keys[choice]]
print(f"Writing to {f.name}")
f.write("\n\n" + region)
print("")
def main():
global outfs, options_string, keys, total
parser = ArgumentParser()
parser.add_argument("INF", help="File to read.", type=Path)
parser.add_argument(
"OUTFS", nargs="+", help="Output files to split into.", type=Path
)
parser.add_argument("--skip", default=0, help="Entries to skip", type=int)
args = parser.parse_args()
assert args.OUTFS
outfs = {x.stem: x.expanduser().open("a") for x in args.OUTFS}
options_string, keys = candidate_names()
region = ""
count = 0
total = 0
regions = []
with args.INF.expanduser().open() as f:
for line in f.readlines():
if not line.strip() and region.strip():
total += 1
count += 1
if count > args.skip:
regions.append(region.strip())
region = ""
else:
region += line
for count, region in enumerate(regions):
process_region(region, count + args.skip + 1)
count += 1
if count > args.skip:
process_region(region.strip(), count)
for _, f in outfs.items():
f.close()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("Exiting safely")
for _, f in outfs.items():
f.close()
| 2,723 | 0 | 96 |
20a3c05f8f7dbd0b7468631dec757204a38c622f | 4,728 | py | Python | distributed/comm/addressing.py | met-office-lab/distributed | 46e31cadd55456bbd0b85a01f040d1eb33ee587f | [
"BSD-3-Clause"
] | null | null | null | distributed/comm/addressing.py | met-office-lab/distributed | 46e31cadd55456bbd0b85a01f040d1eb33ee587f | [
"BSD-3-Clause"
] | 1 | 2021-04-30T20:41:53.000Z | 2021-04-30T20:41:53.000Z | distributed/comm/addressing.py | met-office-lab/distributed | 46e31cadd55456bbd0b85a01f040d1eb33ee587f | [
"BSD-3-Clause"
] | 1 | 2018-07-06T03:48:08.000Z | 2018-07-06T03:48:08.000Z | from __future__ import print_function, division, absolute_import
import six
from ..config import config
from . import registry
DEFAULT_SCHEME = config.get('default-scheme', 'tcp')
def parse_address(addr, strict=False):
"""
Split address into its scheme and scheme-dependent location string.
>>> parse_address('tcp://127.0.0.1')
('tcp', '127.0.0.1')
If strict is set to true the address must have a scheme.
"""
if not isinstance(addr, six.string_types):
raise TypeError("expected str, got %r" % addr.__class__.__name__)
scheme, sep, loc = addr.rpartition('://')
if strict and not sep:
msg = ("Invalid url scheme. "
"Must include protocol like tcp://localhost:8000. "
"Got %s" % addr)
raise ValueError(msg)
if not sep:
scheme = DEFAULT_SCHEME
return scheme, loc
def unparse_address(scheme, loc):
"""
Undo parse_address().
>>> unparse_address('tcp', '127.0.0.1')
'tcp://127.0.0.1'
"""
return '%s://%s' % (scheme, loc)
def normalize_address(addr):
"""
Canonicalize address, adding a default scheme if necessary.
>>> normalize_address('tls://[::1]')
'tls://[::1]'
>>> normalize_address('[::1]')
'tcp://[::1]'
"""
return unparse_address(*parse_address(addr))
def parse_host_port(address, default_port=None):
"""
Parse an endpoint address given in the form "host:port".
"""
if isinstance(address, tuple):
return address
if address.startswith('['):
# IPv6 notation: '[addr]:port' or '[addr]'.
# The address may contain multiple colons.
host, sep, tail = address[1:].partition(']')
if not sep:
_fail()
if not tail:
port = _default()
else:
if not tail.startswith(':'):
_fail()
port = tail[1:]
else:
# Generic notation: 'addr:port' or 'addr'.
host, sep, port = address.partition(':')
if not sep:
port = _default()
elif ':' in host:
_fail()
return host, int(port)
def unparse_host_port(host, port=None):
"""
Undo parse_host_port().
"""
if ':' in host and not host.startswith('['):
host = '[%s]' % host
if port:
return '%s:%s' % (host, port)
else:
return host
def get_address_host_port(addr, strict=False):
"""
Get a (host, port) tuple out of the given address.
For definition of strict check parse_address
ValueError is raised if the address scheme doesn't allow extracting
the requested information.
>>> get_address_host_port('tcp://1.2.3.4:80')
('1.2.3.4', 80)
"""
scheme, loc = parse_address(addr, strict=strict)
backend = registry.get_backend(scheme)
try:
return backend.get_address_host_port(loc)
except NotImplementedError:
raise ValueError("don't know how to extract host and port "
"for address %r" % (addr,))
def get_address_host(addr):
"""
Return a hostname / IP address identifying the machine this address
is located on.
In contrast to get_address_host_port(), this function should always
succeed for well-formed addresses.
>>> get_address_host('tcp://1.2.3.4:80')
'1.2.3.4'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return backend.get_address_host(loc)
def get_local_address_for(addr):
"""
Get a local listening address suitable for reaching *addr*.
For instance, trying to reach an external TCP address will return
a local TCP address that's routable to that external address.
>>> get_local_address_for('tcp://8.8.8.8:1234')
'tcp://192.168.1.68'
>>> get_local_address_for('tcp://127.0.0.1:1234')
'tcp://127.0.0.1'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return unparse_address(scheme, backend.get_local_address_for(loc))
def resolve_address(addr):
"""
Apply scheme-specific address resolution to *addr*, replacing
all symbolic references with concrete location specifiers.
In practice, this can mean hostnames are resolved to IP addresses.
>>> resolve_address('tcp://localhost:8786')
'tcp://127.0.0.1:8786'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return unparse_address(scheme, backend.resolve_address(loc))
| 27.649123 | 78 | 0.619924 | from __future__ import print_function, division, absolute_import
import six
from ..config import config
from . import registry
DEFAULT_SCHEME = config.get('default-scheme', 'tcp')
def parse_address(addr, strict=False):
"""
Split address into its scheme and scheme-dependent location string.
>>> parse_address('tcp://127.0.0.1')
('tcp', '127.0.0.1')
If strict is set to true the address must have a scheme.
"""
if not isinstance(addr, six.string_types):
raise TypeError("expected str, got %r" % addr.__class__.__name__)
scheme, sep, loc = addr.rpartition('://')
if strict and not sep:
msg = ("Invalid url scheme. "
"Must include protocol like tcp://localhost:8000. "
"Got %s" % addr)
raise ValueError(msg)
if not sep:
scheme = DEFAULT_SCHEME
return scheme, loc
def unparse_address(scheme, loc):
"""
Undo parse_address().
>>> unparse_address('tcp', '127.0.0.1')
'tcp://127.0.0.1'
"""
return '%s://%s' % (scheme, loc)
def normalize_address(addr):
"""
Canonicalize address, adding a default scheme if necessary.
>>> normalize_address('tls://[::1]')
'tls://[::1]'
>>> normalize_address('[::1]')
'tcp://[::1]'
"""
return unparse_address(*parse_address(addr))
def parse_host_port(address, default_port=None):
"""
Parse an endpoint address given in the form "host:port".
"""
if isinstance(address, tuple):
return address
def _fail():
raise ValueError("invalid address %r" % (address,))
def _default():
if default_port is None:
raise ValueError("missing port number in address %r" % (address,))
return default_port
if address.startswith('['):
# IPv6 notation: '[addr]:port' or '[addr]'.
# The address may contain multiple colons.
host, sep, tail = address[1:].partition(']')
if not sep:
_fail()
if not tail:
port = _default()
else:
if not tail.startswith(':'):
_fail()
port = tail[1:]
else:
# Generic notation: 'addr:port' or 'addr'.
host, sep, port = address.partition(':')
if not sep:
port = _default()
elif ':' in host:
_fail()
return host, int(port)
def unparse_host_port(host, port=None):
"""
Undo parse_host_port().
"""
if ':' in host and not host.startswith('['):
host = '[%s]' % host
if port:
return '%s:%s' % (host, port)
else:
return host
def get_address_host_port(addr, strict=False):
"""
Get a (host, port) tuple out of the given address.
For definition of strict check parse_address
ValueError is raised if the address scheme doesn't allow extracting
the requested information.
>>> get_address_host_port('tcp://1.2.3.4:80')
('1.2.3.4', 80)
"""
scheme, loc = parse_address(addr, strict=strict)
backend = registry.get_backend(scheme)
try:
return backend.get_address_host_port(loc)
except NotImplementedError:
raise ValueError("don't know how to extract host and port "
"for address %r" % (addr,))
def get_address_host(addr):
"""
Return a hostname / IP address identifying the machine this address
is located on.
In contrast to get_address_host_port(), this function should always
succeed for well-formed addresses.
>>> get_address_host('tcp://1.2.3.4:80')
'1.2.3.4'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return backend.get_address_host(loc)
def get_local_address_for(addr):
"""
Get a local listening address suitable for reaching *addr*.
For instance, trying to reach an external TCP address will return
a local TCP address that's routable to that external address.
>>> get_local_address_for('tcp://8.8.8.8:1234')
'tcp://192.168.1.68'
>>> get_local_address_for('tcp://127.0.0.1:1234')
'tcp://127.0.0.1'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return unparse_address(scheme, backend.get_local_address_for(loc))
def resolve_address(addr):
"""
Apply scheme-specific address resolution to *addr*, replacing
all symbolic references with concrete location specifiers.
In practice, this can mean hostnames are resolved to IP addresses.
>>> resolve_address('tcp://localhost:8786')
'tcp://127.0.0.1:8786'
"""
scheme, loc = parse_address(addr)
backend = registry.get_backend(scheme)
return unparse_address(scheme, backend.resolve_address(loc))
| 185 | 0 | 54 |
f26711de310e0a56e03746ce2226ae8ef1929490 | 27,085 | py | Python | pypeit/deprecated/flux_old.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | 107 | 2018-08-06T07:07:20.000Z | 2022-02-28T14:33:42.000Z | pypeit/deprecated/flux_old.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | 889 | 2018-07-26T12:14:33.000Z | 2022-03-18T22:49:42.000Z | pypeit/deprecated/flux_old.py | ykwang1/PypeIt | a96cff699f1284905ce7ef19d06a9027cd333c63 | [
"BSD-3-Clause"
] | 74 | 2018-09-25T17:03:07.000Z | 2022-03-10T23:59:24.000Z |
## the following massive function is deprecated
def generate_sensfunc_old(wave, counts, counts_ivar, airmass, exptime, spectrograph, telluric=False, star_type=None,
star_mag=None, ra=None, dec=None, std_file = None, BALM_MASK_WID=5., norder=4, nresln=None,debug=False):
""" Function to generate the sensitivity function.
This can work in different regimes:
- If telluric=False and RA=None and Dec=None
the code creates a sintetic standard star spectrum using the Kurucz models,
and from this it generates a sens func using nresln=20.0 and masking out
telluric regions.
- If telluric=False and RA and Dec are assigned
the standard star spectrum is extracted from the archive, and a sens func
is generated using nresln=20.0 and masking out telluric regions.
- If telluric=True
the code creates a sintetic standard star spectrum using the Kurucz models,
the sens func is created setting nresln=1.5 it contains the correction for
telluric lines.
Parameters:
----------
wave : array
Wavelength of the star [no longer with units]
counts : array
Flux (in counts) of the star
counts_ivar : array
Inverse variance of the star
airmass : float
Airmass
exptime : float
Exposure time in seconds
spectrograph : dict
Instrument specific dict
Used for extinction correction
telluric : bool
if True performs a telluric correction
star_type : str
Spectral type of the telluric star (used if telluric=True)
star_mag : float
Apparent magnitude of telluric star (used if telluric=True)
RA : float
deg, RA of the telluric star
if assigned, the standard star spectrum will be extracted from
the archive
DEC : float
deg, DEC of the telluric star
if assigned, the standard star spectrum will be extracted from
the archive
BALM_MASK_WID : float
Mask parameter for Balmer absorption. A region equal to
BALM_MASK_WID*resln is masked wher resln is the estimate
for the spectral resolution.
nresln : float
Number of resolution elements for break-point placement.
If assigned, overwrites the settings imposed by the code.
norder: int
Order number of polynomial fit.
Returns:
-------
sens_dict : dict
sensitivity function described by a dict
"""
# Create copy of the arrays to avoid modification and convert to
# electrons / s
wave_star = wave.copy()
flux_star = counts.copy() / exptime
ivar_star = counts_ivar.copy() * exptime ** 2
# Units
if not isinstance(wave_star, units.Quantity):
wave_star = wave_star * units.AA
# ToDo
# This should be changed. At the moment the extinction correction procedure
# requires the spectra to be in the optical. For the NIR is probably enough
# to extend the tables to longer wavelength setting the extinction to 0.0mag.
msgs.warn("Extinction correction applyed only if the spectra covers <10000Ang.")
# Apply Extinction if optical bands
if np.max(wave_star) < 10000. * units.AA:
msgs.info("Applying extinction correction")
extinct = load_extinction_data(spectrograph.telescope['longitude'],
spectrograph.telescope['latitude'])
ext_corr = extinction_correction(wave_star, airmass, extinct)
# Correct for extinction
flux_star = flux_star * ext_corr
ivar_star = ivar_star / ext_corr ** 2
else:
msgs.info("Extinction correction not applied")
# Create star model
if (ra is not None) and (dec is not None) and (star_mag is None) and (star_type is None):
# Pull star spectral model from archive
msgs.info("Get standard model")
# Grab closest standard within a tolerance
std_dict = find_standard_file(ra, dec)
if std_dict is not None:
# Load standard
load_standard_file(std_dict)
# Interpolate onto observed wavelengths
#std_xspec = XSpectrum1D.from_tuple((std_dict['wave'], std_dict['flux']))
debugger.set_trace()
xspec = std_xspec.rebin(wave_star) # Conserves flambda
flux_true = xspec.flux.value
else:
msgs.error('No spectrum found in our database for your standard star. Please use another standard star \
or consider add it into out database.')
elif (star_mag is not None) and (star_type is not None):
# Create star spectral model
msgs.info("Creating standard model")
# Create star model
star_loglam, star_flux, std_dict = telluric_sed(star_mag, star_type)
star_lam = 10 ** star_loglam
# Generate a dict matching the output of find_standard_file
std_dict = dict(cal_file='KuruczTelluricModel', name=star_type, fmt=1,
std_ra=None, std_dec=None)
std_dict['wave'] = star_lam * units.AA
std_dict['flux'] = 1e17 * star_flux * units.erg / units.s / units.cm ** 2 / units.AA
# ToDO If the Kuruck model is used, rebin create weird features
# I using scipy interpolate to avoid this
flux_true = scipy.interpolate.interp1d(std_dict['wave'], std_dict['flux'],
bounds_error=False,
fill_value='extrapolate')(wave_star)
else:
debugger.set_trace()
msgs.error('Insufficient information provided for fluxing. '
'Either the coordinates of the standard or a stellar type and magnitude are needed.')
if np.min(flux_true) <= 0.:
msgs.warn('Your spectrum extends beyond calibrated standard star, extrapolating the spectra with polynomial.')
# ToDo: should we extrapolate it using graybody model?
mask_model = flux_true<=0
msk_poly, poly_coeff = utils.robust_polyfit_djs(std_dict['wave'].value, std_dict['flux'].value,8,function='polynomial',
invvar=None, guesses=None, maxiter=50, inmask=None, sigma=None, \
lower=3.0, upper=3.0, maxdev=None, maxrej=3, groupdim=None,
groupsize=None,groupbadpix=False, grow=0, sticky=True, use_mad=True)
star_poly = utils.func_val(poly_coeff, wave_star.value, 'polynomial')
#flux_true[mask_model] = star_poly[mask_model]
flux_true = star_poly.copy()
if debug:
plt.plot(std_dict['wave'], std_dict['flux'],'bo',label='Raw Star Model')
plt.plot(std_dict['wave'], utils.func_val(poly_coeff, std_dict['wave'].value, 'polynomial'), 'k-',label='robust_poly_fit')
plt.plot(wave_star,flux_true,'r-',label='Your Final Star Model used for sensfunc')
plt.show()
# Set nresln
if nresln is None:
if telluric:
nresln = 1.5
msgs.info("Set nresln to 1.5")
else:
nresln = 20.0
msgs.info("Set nresln to 20.0")
# ToDo
# Compute an effective resolution for the standard. This could be improved
# to setup an array of breakpoints based on the resolution. At the
# moment we are using only one number
msgs.work("Should pull resolution from arc line analysis")
msgs.work("At the moment the resolution is taken as the PixelScale")
msgs.work("This needs to be changed!")
std_pix = np.median(np.abs(wave_star - np.roll(wave_star, 1)))
std_res = std_pix
resln = std_res
if (nresln * std_res) < std_pix:
msgs.warn("Bspline breakpoints spacing shoud be larger than 1pixel")
msgs.warn("Changing input nresln to fix this")
nresln = std_res / std_pix
# Mask bad pixels, edges, and Balmer, Paschen, Brackett, and Pfund lines
# Mask (True = good pixels)
msgs.info("Masking spectral regions:")
msk_star = np.ones_like(flux_star).astype(bool)
# Mask bad pixels
msgs.info(" Masking bad pixels")
msk_star[ivar_star <= 0.] = False
msk_star[flux_star <= 0.] = False
# Mask edges
msgs.info(" Masking edges")
msk_star[:1] = False
msk_star[-1:] = False
# Mask Balmer
msgs.info(" Masking Balmer")
lines_balm = np.array([3836.4, 3969.6, 3890.1, 4102.8, 4102.8, 4341.6, 4862.7, 5407.0,
6564.6, 8224.8, 8239.2]) * units.AA
for line_balm in lines_balm:
ibalm = np.abs(wave_star - line_balm) <= BALM_MASK_WID * resln
msk_star[ibalm] = False
# Mask Paschen
msgs.info(" Masking Paschen")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_pasc = np.array([8203.6, 9229.0, 9546.0, 10049.4, 10938.1,
12818.1, 18751.0]) * units.AA
for line_pasc in lines_pasc:
ipasc = np.abs(wave_star - line_pasc) <= BALM_MASK_WID * resln
msk_star[ipasc] = False
# Mask Brackett
msgs.info(" Masking Brackett")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_brac = np.array([14584.0, 18174.0, 19446.0, 21655.0,
26252.0, 40512.0]) * units.AA
for line_brac in lines_brac:
ibrac = np.abs(wave_star - line_brac) <= BALM_MASK_WID * resln
msk_star[ibrac] = False
# Mask Pfund
msgs.info(" Masking Pfund")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_pfund = np.array([22788.0, 32961.0, 37395.0, 46525.0,
74578.0]) * units.AA
for line_pfund in lines_pfund:
ipfund = np.abs(wave_star - line_pfund) <= BALM_MASK_WID * resln
msk_star[ipfund] = False
# Mask Atm. cutoff
msgs.info(" Masking Below the atmospheric cutoff")
atms_cutoff = wave_star <= 3000.0 * units.AA
msk_star[atms_cutoff] = False
#if ~telluric: #Feige: This is a bug
if not telluric:
# Mask telluric absorption
msgs.info("Masking Telluric")
tell = np.any([((wave_star >= 7580.00 * units.AA) & (wave_star <= 7750.00 * units.AA)),
((wave_star >= 7160.00 * units.AA) & (wave_star <= 7340.00 * units.AA)),
((wave_star >= 6860.00 * units.AA) & (wave_star <= 6930.00 * units.AA)),
((wave_star >= 9310.00 * units.AA) & (wave_star <= 9665.00 * units.AA)),
((wave_star >= 11120.0 * units.AA) & (wave_star <= 11615.0 * units.AA)),
((wave_star >= 12610.0 * units.AA) & (wave_star <= 12720.0 * units.AA)),
((wave_star >= 13160.0 * units.AA) & (wave_star <= 15065.0 * units.AA)),
((wave_star >= 15700.0 * units.AA) & (wave_star <= 15770.0 * units.AA)),
((wave_star >= 16000.0 * units.AA) & (wave_star <= 16100.0 * units.AA)),
((wave_star >= 16420.0 * units.AA) & (wave_star <= 16580.0 * units.AA)),
((wave_star >= 17310.0 * units.AA) & (wave_star <= 20775.0 * units.AA)),
(wave_star >= 22680.0 * units.AA)], axis=0)
msk_star[tell] = False
# Apply mask
ivar_star[~msk_star] = 0.0
# Fit in magnitudes
kwargs_bspline = {'bkspace': resln.value * nresln}
kwargs_reject = {'maxrej': 5}
sensfunc, sensfit = bspline_magfit(wave_star.value, flux_star, ivar_star, flux_true, inmask=msk_star,
kwargs_bspline=kwargs_bspline, kwargs_reject=kwargs_reject,debug=debug)
#Cleaning sensfunc
## ToDo: currently I'm fitting the sensfunc in the masked region with a polynomial, should we change the algorithm to
## fit polynomial first and then bsline the poly-subtracted flux ???
## keep tell free region for poly fit. tell2 is different from tell since tell2 include more small trunk of telluric free
## regions. tell2 might be not suitable for the bspline fitting. We need to select a more robust telluric region for both purpose.
tell2 = np.any([((wave_star >= 7580.00 * units.AA) & (wave_star <= 7750.00 * units.AA)),
((wave_star >= 7160.00 * units.AA) & (wave_star <= 7340.00 * units.AA)),
((wave_star >= 6860.00 * units.AA) & (wave_star <= 6930.00 * units.AA)),
((wave_star >= 9310.00 * units.AA) & (wave_star <= 9665.00 * units.AA)),
((wave_star >= 11120.0 * units.AA) & (wave_star <= 11545.0 * units.AA)),
((wave_star >= 12610.0 * units.AA) & (wave_star <= 12720.0 * units.AA)),
((wave_star >= 13400.0 * units.AA) & (wave_star <= 14830.0 * units.AA)),
((wave_star >= 15700.0 * units.AA) & (wave_star <= 15770.0 * units.AA)),
((wave_star >= 16000.0 * units.AA) & (wave_star <= 16100.0 * units.AA)),
((wave_star >= 16420.0 * units.AA) & (wave_star <= 16580.0 * units.AA)),
((wave_star >= 17630.0 * units.AA) & (wave_star <= 19690.0 * units.AA)),
((wave_star >= 19790.0 * units.AA) & (wave_star <= 19810.0 * units.AA)),
((wave_star >= 19950.0 * units.AA) & (wave_star <= 20310.0 * units.AA)),
((wave_star >= 20450.0 * units.AA) & (wave_star <= 20920.0 * units.AA)),
((wave_star >= 24000.0 * units.AA) & (wave_star <= 24280.0 * units.AA)),
((wave_star >= 24320.0 * units.AA) & (wave_star <= 24375.0 * units.AA)),
(wave_star >= 24450.0 * units.AA)], axis=0)
msk_all = msk_star.copy() # mask for polynomial fitting
msk_sens = msk_star.copy() # mask for sensfunc
med, mad = utils.robust_meanstd(sensfunc)
msk_crazy = (sensfunc<=0) | (sensfunc>1e3*med)
msk_all[tell2] = False
msk_all[msk_crazy] = False
msk_sens[msk_crazy] = False
if (len(wave_star.value[msk_all]) < norder+1) or (len(wave_star.value[msk_all]) < 0.1*len(wave_star.value)):
msgs.warn('It seems this order/spectrum well within the telluric region. No polynomial fit will be performed.')
else:
#polyfit the sensfunc
msk_poly, poly_coeff = utils.robust_polyfit_djs(wave_star.value[msk_all],np.log10(sensfunc[msk_all]), norder, function='polynomial',
invvar=None,guesses = None, maxiter = 50, inmask = None, sigma = None,\
lower = 3.0, upper = 3.0,maxdev=None,maxrej=3,groupdim=None,groupsize=None,\
groupbadpix=False, grow=0,sticky=True,use_mad=True)
sensfunc_poly = 10**(utils.func_val(poly_coeff, wave_star.value, 'polynomial'))
sensfunc[~msk_sens] = sensfunc_poly[~msk_sens]
if debug:
plt.rcdefaults()
plt.rcParams['font.family'] = 'times new roman'
plt.plot(wave_star.value[~msk_sens], sensfunc[~msk_sens], 'bo')
plt.plot(wave_star.value, sensfunc_poly, 'r-',label='Polyfit')
plt.plot(wave_star.value, sensfunc, 'k-',label='bspline fitting')
plt.ylim(0.0, 100.0)
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Sensfunc')
plt.show()
plt.close()
plt.figure(figsize=(10, 6))
plt.clf()
plt.plot(wave_star.value,flux_star*sensfunc, label='Calibrated Spectrum')
plt.plot(wave_star.value,flux_true, label='Model')
plt.plot(wave_star.value,np.sqrt(1/ivar_star))
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Flux [erg/s/cm2/Ang.]')
plt.ylim(0,np.median(flux_true)*2.5)
plt.title('Final corrected spectrum')
plt.show()
plt.close()
# JFH Left off here.
# Creating the dict
#msgs.work("Is min, max and wave_min, wave_max a duplicate?")
#sens_dict = dict(wave=wave_sens, sensfunc=sensfunc, min=None, max=None, std=std_dict)
# Add in wavemin,wavemax
sens_dict = {}
sens_dict['wave'] = wave_star
sens_dict['sensfunc'] = sensfunc
sens_dict['wave_min'] = np.min(wave_star)
sens_dict['wave_max'] = np.max(wave_star)
sens_dict['exptime']= exptime
sens_dict['airmass']= airmass
sens_dict['std_file']= std_file
# Get other keys from standard dict
sens_dict['std_ra'] = std_dict['std_ra']
sens_dict['std_dec'] = std_dict['std_dec']
sens_dict['std_name'] = std_dict['name']
sens_dict['cal_file'] = std_dict['cal_file']
sens_dict['flux_true'] = flux_true
#sens_dict['std_dict'] = std_dict
#sens_dict['msk_star'] = msk_star
#sens_dict['mag_set'] = mag_set
return sens_dict
## bspline_magfit is deprecated at this moment.
def bspline_magfit(wave, flux, ivar, flux_std, inmask=None, maxiter=35, upper=2, lower=2,
kwargs_bspline={}, kwargs_reject={}, debug=False, show_QA=False):
"""
Perform a bspline fit to the flux ratio of standard to
observed counts. Used to generate a sensitivity function.
Parameters
----------
wave : ndarray
wavelength as observed
flux : ndarray
counts/s as observed
ivar : ndarray
inverse variance
flux_std : Quantity array
standard star true flux (erg/s/cm^2/A)
inmask : ndarray
bspline mask
maxiter : integer
maximum number of iterations for bspline_iterfit
upper : integer
number of sigma for rejection in bspline_iterfit
lower : integer
number of sigma for rejection in bspline_iterfit
kwargs_bspline : dict, optional
keywords for bspline_iterfit
kwargs_reject : dict, optional
keywords for bspline_iterfit
debug : bool
if True shows some dubugging plots
Returns
-------
bset_log1
"""
# Create copy of the arrays to avoid modification
wave_obs = wave.copy()
flux_obs = flux.copy()
ivar_obs = ivar.copy()
# preparing arrays to run in bspline_iterfit
if np.all(~np.isfinite(ivar_obs)):
msgs.warn("NaN are present in the inverse variance")
# Preparing arrays to run in bspline_iterfit
if np.all(~np.isfinite(ivar_obs)):
msgs.warn("NaN are present in the inverse variance")
# Removing outliers
# Calculate log of flux_obs setting a floor at TINY
logflux_obs = 2.5 * np.log10(np.maximum(flux_obs, TINY))
# Set a fix value for the variance of logflux
logivar_obs = np.ones_like(logflux_obs) * (10.0 ** 2)
# Calculate log of flux_std model setting a floor at TINY
logflux_std = 2.5 * np.log10(np.maximum(flux_std, TINY))
# Calculate ratio setting a floor at MAGFUNC_MIN and a ceiling at
# MAGFUNC_MAX
magfunc = logflux_std - logflux_obs
magfunc = np.maximum(np.minimum(magfunc, MAGFUNC_MAX), MAGFUNC_MIN)
magfunc_mask = (magfunc < 0.99 * MAGFUNC_MAX) & (magfunc > 0.99 * MAGFUNC_MIN)
# Mask outliners
# masktot=True means good pixel
if inmask is None:
masktot = (ivar_obs > 0.0) & np.isfinite(logflux_obs) & np.isfinite(ivar_obs) & \
np.isfinite(logflux_std) & magfunc_mask
else:
masktot = inmask & (ivar_obs > 0.0) & np.isfinite(logflux_obs) & np.isfinite(ivar_obs) & \
np.isfinite(logflux_std) & magfunc_mask
logivar_obs[~masktot] = 0.
# Calculate sensfunc
sensfunc = 10.0 ** (0.4 * magfunc)
msgs.info("Initialize bspline for flux calibration")
init_bspline = pydl.bspline(wave_obs, bkspace=kwargs_bspline['bkspace'])
fullbkpt = init_bspline.breakpoints
# TESTING turning off masking for now
# remove masked regions from breakpoints
msk_obs = np.ones_like(wave_obs).astype(bool)
msk_obs[~masktot] = False
msk_bkpt = scipy.interpolate.interp1d(wave_obs, msk_obs, kind='nearest', fill_value='extrapolate')(fullbkpt)
init_breakpoints = fullbkpt[msk_bkpt > 0.999]
# init_breakpoints = fullbkpt
# First round of the fit:
msgs.info("Bspline fit: step 1")
bset1, bmask = pydl.iterfit(wave_obs, magfunc, invvar=logivar_obs, inmask=masktot, upper=upper, lower=lower,
fullbkpt=init_breakpoints, maxiter=maxiter, kwargs_bspline=kwargs_bspline,
kwargs_reject=kwargs_reject)
logfit1, _ = bset1.value(wave_obs)
logfit_bkpt, _ = bset1.value(init_breakpoints)
if debug:
# Check for calibration
plt.figure(1)
plt.plot(wave_obs, magfunc, drawstyle='steps-mid', color='black', label='magfunc')
plt.plot(wave_obs, logfit1, color='cornflowerblue', label='logfit1')
plt.plot(wave_obs[~masktot], magfunc[~masktot], '+', color='red', markersize=5.0, label='masked magfunc')
plt.plot(wave_obs[~masktot], logfit1[~masktot], '+', color='red', markersize=5.0, label='masked logfit1')
plt.plot(init_breakpoints, logfit_bkpt, '.', color='green', markersize=4.0, label='breakpoints')
plt.plot(init_breakpoints, np.interp(init_breakpoints, wave_obs, magfunc), '.', color='green', markersize=4.0,
label='breakpoints')
plt.plot(wave_obs, 1.0 / np.sqrt(logivar_obs), color='orange', label='sigma')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(0.0, 1.2 * MAGFUNC_MAX)
plt.title('1st Bspline fit')
plt.show()
modelfit1 = np.power(10.0, 0.4 * np.maximum(np.minimum(logfit1, MAGFUNC_MAX), MAGFUNC_MIN))
residual = sensfunc / (modelfit1 + (modelfit1 == 0)) - 1.
# new_mask = masktot & (sensfunc > 0)
# residual_ivar = (modelfit1 * flux_obs / (sensfunc + (sensfunc == 0.0))) ** 2 * ivar_obs
residual_ivar = np.ones_like(residual) / (0.1 ** 2)
residual_ivar = residual_ivar * masktot
(mean, med, stddev) = sigma_clipped_stats(residual[masktot], sigma_lower=3.0, sigma_upper=3.0)
if np.median(stddev > 0.01):
# Second round of the fit:
msgs.info("Bspline fit: step 2")
# Now do one more fit to the ratio of data/model - 1.
bset_residual, bmask2 = pydl.iterfit(wave_obs, residual, invvar=residual_ivar, inmask=masktot, upper=upper,
lower=lower, maxiter=maxiter, fullbkpt=bset1.breakpoints,
kwargs_bspline=kwargs_bspline, kwargs_reject=kwargs_reject)
bset_log1 = bset1.copy()
bset_log1.coeff = bset_log1.coeff + bset_residual.coeff
if debug:
# Check for calibration
resid_fit, _ = bset_residual.value(wave_obs)
logfit2, _ = bset_log1.value(wave_obs)
logfit2_bkpt, _ = bset_log1.value(bset1.breakpoints)
plt.figure(1)
plt.plot(wave_obs, residual, drawstyle='steps-mid', color='black', label='residual')
plt.plot(wave_obs, resid_fit, color='cornflowerblue', label='resid_fit')
plt.plot(wave_obs[~masktot], residual[~masktot], '+', color='red', markersize=5.0, label='masked residual')
plt.plot(wave_obs[~masktot], resid_fit[~masktot], '+', color='red', markersize=5.0, label='masked resid_fit')
plt.plot(init_breakpoints, logfit2_bkpt, '.', color='green', markersize=4.0, label='breakpoints')
plt.plot(wave_obs, 1.0 / np.sqrt(residual_ivar), color='orange', label='sigma')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(-0.1, 0.1)
plt.title('2nd Bspline fit')
plt.show()
else:
bset_log1 = bset1.copy()
# ToDo JFH I think we should move towards writing this out as a vector in a fits table
# rather than the b-spline.
# Create sensitivity function
newlogfit, _ = bset_log1.value(wave_obs)
sensfit = np.power(10.0, 0.4 * np.maximum(np.minimum(newlogfit, MAGFUNC_MAX), MAGFUNC_MIN))
sensfit[~magfunc_mask] = 0.0
if debug:
# Check for calibration
plt.figure(1)
plt.plot(wave_obs, sensfunc, drawstyle='steps-mid', color='black', label='sensfunc')
plt.plot(wave_obs, sensfit, color='cornflowerblue', label='sensfunc fit')
plt.plot(wave_obs[~masktot], sensfunc[~masktot], '+', color='red', markersize=5.0, label='masked sensfunc')
plt.plot(wave_obs[~masktot], sensfit[~masktot], '+', color='red', markersize=5.0, label='masked sensfuncfit')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(0.0, 100.0)
plt.show()
# Check quality of the fit
absdev = np.median(np.abs(sensfit / modelfit1 - 1))
msgs.info('Difference between fits is {:g}'.format(absdev))
# Check for residual of the fit
if debug:
# scale = np.power(10.0, 0.4 * sensfit)
flux_cal = flux_obs * sensfit
ivar_cal = ivar_obs / sensfit ** 2.
plt.rcdefaults()
plt.rcParams['font.family']= 'times new roman'
plt.figure(figsize=(11, 8.5))
plt.clf()
plt.plot(wave_obs,flux_cal, label='Calibrated Spectrum')
plt.plot(wave_obs,flux_std, label='Model')
plt.plot(wave_obs,np.sqrt(1/ivar_cal))
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Flux [erg/s/cm2/Ang.]')
plt.ylim(0,np.median(flux_std)*2.5)
plt.show()
plt.close()
# QA
msgs.work("Add QA for sensitivity function")
if show_QA:
qa_bspline_magfit(wave_obs, bset_log1, magfunc, masktot)
return sensfunc,sensfit
| 45.444631 | 140 | 0.622226 |
## the following massive function is deprecated
def generate_sensfunc_old(wave, counts, counts_ivar, airmass, exptime, spectrograph, telluric=False, star_type=None,
star_mag=None, ra=None, dec=None, std_file = None, BALM_MASK_WID=5., norder=4, nresln=None,debug=False):
""" Function to generate the sensitivity function.
This can work in different regimes:
- If telluric=False and RA=None and Dec=None
the code creates a sintetic standard star spectrum using the Kurucz models,
and from this it generates a sens func using nresln=20.0 and masking out
telluric regions.
- If telluric=False and RA and Dec are assigned
the standard star spectrum is extracted from the archive, and a sens func
is generated using nresln=20.0 and masking out telluric regions.
- If telluric=True
the code creates a sintetic standard star spectrum using the Kurucz models,
the sens func is created setting nresln=1.5 it contains the correction for
telluric lines.
Parameters:
----------
wave : array
Wavelength of the star [no longer with units]
counts : array
Flux (in counts) of the star
counts_ivar : array
Inverse variance of the star
airmass : float
Airmass
exptime : float
Exposure time in seconds
spectrograph : dict
Instrument specific dict
Used for extinction correction
telluric : bool
if True performs a telluric correction
star_type : str
Spectral type of the telluric star (used if telluric=True)
star_mag : float
Apparent magnitude of telluric star (used if telluric=True)
RA : float
deg, RA of the telluric star
if assigned, the standard star spectrum will be extracted from
the archive
DEC : float
deg, DEC of the telluric star
if assigned, the standard star spectrum will be extracted from
the archive
BALM_MASK_WID : float
Mask parameter for Balmer absorption. A region equal to
BALM_MASK_WID*resln is masked wher resln is the estimate
for the spectral resolution.
nresln : float
Number of resolution elements for break-point placement.
If assigned, overwrites the settings imposed by the code.
norder: int
Order number of polynomial fit.
Returns:
-------
sens_dict : dict
sensitivity function described by a dict
"""
# Create copy of the arrays to avoid modification and convert to
# electrons / s
wave_star = wave.copy()
flux_star = counts.copy() / exptime
ivar_star = counts_ivar.copy() * exptime ** 2
# Units
if not isinstance(wave_star, units.Quantity):
wave_star = wave_star * units.AA
# ToDo
# This should be changed. At the moment the extinction correction procedure
# requires the spectra to be in the optical. For the NIR is probably enough
# to extend the tables to longer wavelength setting the extinction to 0.0mag.
msgs.warn("Extinction correction applyed only if the spectra covers <10000Ang.")
# Apply Extinction if optical bands
if np.max(wave_star) < 10000. * units.AA:
msgs.info("Applying extinction correction")
extinct = load_extinction_data(spectrograph.telescope['longitude'],
spectrograph.telescope['latitude'])
ext_corr = extinction_correction(wave_star, airmass, extinct)
# Correct for extinction
flux_star = flux_star * ext_corr
ivar_star = ivar_star / ext_corr ** 2
else:
msgs.info("Extinction correction not applied")
# Create star model
if (ra is not None) and (dec is not None) and (star_mag is None) and (star_type is None):
# Pull star spectral model from archive
msgs.info("Get standard model")
# Grab closest standard within a tolerance
std_dict = find_standard_file(ra, dec)
if std_dict is not None:
# Load standard
load_standard_file(std_dict)
# Interpolate onto observed wavelengths
#std_xspec = XSpectrum1D.from_tuple((std_dict['wave'], std_dict['flux']))
debugger.set_trace()
xspec = std_xspec.rebin(wave_star) # Conserves flambda
flux_true = xspec.flux.value
else:
msgs.error('No spectrum found in our database for your standard star. Please use another standard star \
or consider add it into out database.')
elif (star_mag is not None) and (star_type is not None):
# Create star spectral model
msgs.info("Creating standard model")
# Create star model
star_loglam, star_flux, std_dict = telluric_sed(star_mag, star_type)
star_lam = 10 ** star_loglam
# Generate a dict matching the output of find_standard_file
std_dict = dict(cal_file='KuruczTelluricModel', name=star_type, fmt=1,
std_ra=None, std_dec=None)
std_dict['wave'] = star_lam * units.AA
std_dict['flux'] = 1e17 * star_flux * units.erg / units.s / units.cm ** 2 / units.AA
# ToDO If the Kuruck model is used, rebin create weird features
# I using scipy interpolate to avoid this
flux_true = scipy.interpolate.interp1d(std_dict['wave'], std_dict['flux'],
bounds_error=False,
fill_value='extrapolate')(wave_star)
else:
debugger.set_trace()
msgs.error('Insufficient information provided for fluxing. '
'Either the coordinates of the standard or a stellar type and magnitude are needed.')
if np.min(flux_true) <= 0.:
msgs.warn('Your spectrum extends beyond calibrated standard star, extrapolating the spectra with polynomial.')
# ToDo: should we extrapolate it using graybody model?
mask_model = flux_true<=0
msk_poly, poly_coeff = utils.robust_polyfit_djs(std_dict['wave'].value, std_dict['flux'].value,8,function='polynomial',
invvar=None, guesses=None, maxiter=50, inmask=None, sigma=None, \
lower=3.0, upper=3.0, maxdev=None, maxrej=3, groupdim=None,
groupsize=None,groupbadpix=False, grow=0, sticky=True, use_mad=True)
star_poly = utils.func_val(poly_coeff, wave_star.value, 'polynomial')
#flux_true[mask_model] = star_poly[mask_model]
flux_true = star_poly.copy()
if debug:
plt.plot(std_dict['wave'], std_dict['flux'],'bo',label='Raw Star Model')
plt.plot(std_dict['wave'], utils.func_val(poly_coeff, std_dict['wave'].value, 'polynomial'), 'k-',label='robust_poly_fit')
plt.plot(wave_star,flux_true,'r-',label='Your Final Star Model used for sensfunc')
plt.show()
# Set nresln
if nresln is None:
if telluric:
nresln = 1.5
msgs.info("Set nresln to 1.5")
else:
nresln = 20.0
msgs.info("Set nresln to 20.0")
# ToDo
# Compute an effective resolution for the standard. This could be improved
# to setup an array of breakpoints based on the resolution. At the
# moment we are using only one number
msgs.work("Should pull resolution from arc line analysis")
msgs.work("At the moment the resolution is taken as the PixelScale")
msgs.work("This needs to be changed!")
std_pix = np.median(np.abs(wave_star - np.roll(wave_star, 1)))
std_res = std_pix
resln = std_res
if (nresln * std_res) < std_pix:
msgs.warn("Bspline breakpoints spacing shoud be larger than 1pixel")
msgs.warn("Changing input nresln to fix this")
nresln = std_res / std_pix
# Mask bad pixels, edges, and Balmer, Paschen, Brackett, and Pfund lines
# Mask (True = good pixels)
msgs.info("Masking spectral regions:")
msk_star = np.ones_like(flux_star).astype(bool)
# Mask bad pixels
msgs.info(" Masking bad pixels")
msk_star[ivar_star <= 0.] = False
msk_star[flux_star <= 0.] = False
# Mask edges
msgs.info(" Masking edges")
msk_star[:1] = False
msk_star[-1:] = False
# Mask Balmer
msgs.info(" Masking Balmer")
lines_balm = np.array([3836.4, 3969.6, 3890.1, 4102.8, 4102.8, 4341.6, 4862.7, 5407.0,
6564.6, 8224.8, 8239.2]) * units.AA
for line_balm in lines_balm:
ibalm = np.abs(wave_star - line_balm) <= BALM_MASK_WID * resln
msk_star[ibalm] = False
# Mask Paschen
msgs.info(" Masking Paschen")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_pasc = np.array([8203.6, 9229.0, 9546.0, 10049.4, 10938.1,
12818.1, 18751.0]) * units.AA
for line_pasc in lines_pasc:
ipasc = np.abs(wave_star - line_pasc) <= BALM_MASK_WID * resln
msk_star[ipasc] = False
# Mask Brackett
msgs.info(" Masking Brackett")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_brac = np.array([14584.0, 18174.0, 19446.0, 21655.0,
26252.0, 40512.0]) * units.AA
for line_brac in lines_brac:
ibrac = np.abs(wave_star - line_brac) <= BALM_MASK_WID * resln
msk_star[ibrac] = False
# Mask Pfund
msgs.info(" Masking Pfund")
# air wavelengths from:
# https://www.subarutelescope.org/Science/Resources/lines/hi.html
lines_pfund = np.array([22788.0, 32961.0, 37395.0, 46525.0,
74578.0]) * units.AA
for line_pfund in lines_pfund:
ipfund = np.abs(wave_star - line_pfund) <= BALM_MASK_WID * resln
msk_star[ipfund] = False
# Mask Atm. cutoff
msgs.info(" Masking Below the atmospheric cutoff")
atms_cutoff = wave_star <= 3000.0 * units.AA
msk_star[atms_cutoff] = False
#if ~telluric: #Feige: This is a bug
if not telluric:
# Mask telluric absorption
msgs.info("Masking Telluric")
tell = np.any([((wave_star >= 7580.00 * units.AA) & (wave_star <= 7750.00 * units.AA)),
((wave_star >= 7160.00 * units.AA) & (wave_star <= 7340.00 * units.AA)),
((wave_star >= 6860.00 * units.AA) & (wave_star <= 6930.00 * units.AA)),
((wave_star >= 9310.00 * units.AA) & (wave_star <= 9665.00 * units.AA)),
((wave_star >= 11120.0 * units.AA) & (wave_star <= 11615.0 * units.AA)),
((wave_star >= 12610.0 * units.AA) & (wave_star <= 12720.0 * units.AA)),
((wave_star >= 13160.0 * units.AA) & (wave_star <= 15065.0 * units.AA)),
((wave_star >= 15700.0 * units.AA) & (wave_star <= 15770.0 * units.AA)),
((wave_star >= 16000.0 * units.AA) & (wave_star <= 16100.0 * units.AA)),
((wave_star >= 16420.0 * units.AA) & (wave_star <= 16580.0 * units.AA)),
((wave_star >= 17310.0 * units.AA) & (wave_star <= 20775.0 * units.AA)),
(wave_star >= 22680.0 * units.AA)], axis=0)
msk_star[tell] = False
# Apply mask
ivar_star[~msk_star] = 0.0
# Fit in magnitudes
kwargs_bspline = {'bkspace': resln.value * nresln}
kwargs_reject = {'maxrej': 5}
sensfunc, sensfit = bspline_magfit(wave_star.value, flux_star, ivar_star, flux_true, inmask=msk_star,
kwargs_bspline=kwargs_bspline, kwargs_reject=kwargs_reject,debug=debug)
#Cleaning sensfunc
## ToDo: currently I'm fitting the sensfunc in the masked region with a polynomial, should we change the algorithm to
## fit polynomial first and then bsline the poly-subtracted flux ???
## keep tell free region for poly fit. tell2 is different from tell since tell2 include more small trunk of telluric free
## regions. tell2 might be not suitable for the bspline fitting. We need to select a more robust telluric region for both purpose.
tell2 = np.any([((wave_star >= 7580.00 * units.AA) & (wave_star <= 7750.00 * units.AA)),
((wave_star >= 7160.00 * units.AA) & (wave_star <= 7340.00 * units.AA)),
((wave_star >= 6860.00 * units.AA) & (wave_star <= 6930.00 * units.AA)),
((wave_star >= 9310.00 * units.AA) & (wave_star <= 9665.00 * units.AA)),
((wave_star >= 11120.0 * units.AA) & (wave_star <= 11545.0 * units.AA)),
((wave_star >= 12610.0 * units.AA) & (wave_star <= 12720.0 * units.AA)),
((wave_star >= 13400.0 * units.AA) & (wave_star <= 14830.0 * units.AA)),
((wave_star >= 15700.0 * units.AA) & (wave_star <= 15770.0 * units.AA)),
((wave_star >= 16000.0 * units.AA) & (wave_star <= 16100.0 * units.AA)),
((wave_star >= 16420.0 * units.AA) & (wave_star <= 16580.0 * units.AA)),
((wave_star >= 17630.0 * units.AA) & (wave_star <= 19690.0 * units.AA)),
((wave_star >= 19790.0 * units.AA) & (wave_star <= 19810.0 * units.AA)),
((wave_star >= 19950.0 * units.AA) & (wave_star <= 20310.0 * units.AA)),
((wave_star >= 20450.0 * units.AA) & (wave_star <= 20920.0 * units.AA)),
((wave_star >= 24000.0 * units.AA) & (wave_star <= 24280.0 * units.AA)),
((wave_star >= 24320.0 * units.AA) & (wave_star <= 24375.0 * units.AA)),
(wave_star >= 24450.0 * units.AA)], axis=0)
msk_all = msk_star.copy() # mask for polynomial fitting
msk_sens = msk_star.copy() # mask for sensfunc
med, mad = utils.robust_meanstd(sensfunc)
msk_crazy = (sensfunc<=0) | (sensfunc>1e3*med)
msk_all[tell2] = False
msk_all[msk_crazy] = False
msk_sens[msk_crazy] = False
if (len(wave_star.value[msk_all]) < norder+1) or (len(wave_star.value[msk_all]) < 0.1*len(wave_star.value)):
msgs.warn('It seems this order/spectrum well within the telluric region. No polynomial fit will be performed.')
else:
#polyfit the sensfunc
msk_poly, poly_coeff = utils.robust_polyfit_djs(wave_star.value[msk_all],np.log10(sensfunc[msk_all]), norder, function='polynomial',
invvar=None,guesses = None, maxiter = 50, inmask = None, sigma = None,\
lower = 3.0, upper = 3.0,maxdev=None,maxrej=3,groupdim=None,groupsize=None,\
groupbadpix=False, grow=0,sticky=True,use_mad=True)
sensfunc_poly = 10**(utils.func_val(poly_coeff, wave_star.value, 'polynomial'))
sensfunc[~msk_sens] = sensfunc_poly[~msk_sens]
if debug:
plt.rcdefaults()
plt.rcParams['font.family'] = 'times new roman'
plt.plot(wave_star.value[~msk_sens], sensfunc[~msk_sens], 'bo')
plt.plot(wave_star.value, sensfunc_poly, 'r-',label='Polyfit')
plt.plot(wave_star.value, sensfunc, 'k-',label='bspline fitting')
plt.ylim(0.0, 100.0)
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Sensfunc')
plt.show()
plt.close()
plt.figure(figsize=(10, 6))
plt.clf()
plt.plot(wave_star.value,flux_star*sensfunc, label='Calibrated Spectrum')
plt.plot(wave_star.value,flux_true, label='Model')
plt.plot(wave_star.value,np.sqrt(1/ivar_star))
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Flux [erg/s/cm2/Ang.]')
plt.ylim(0,np.median(flux_true)*2.5)
plt.title('Final corrected spectrum')
plt.show()
plt.close()
# JFH Left off here.
# Creating the dict
#msgs.work("Is min, max and wave_min, wave_max a duplicate?")
#sens_dict = dict(wave=wave_sens, sensfunc=sensfunc, min=None, max=None, std=std_dict)
# Add in wavemin,wavemax
sens_dict = {}
sens_dict['wave'] = wave_star
sens_dict['sensfunc'] = sensfunc
sens_dict['wave_min'] = np.min(wave_star)
sens_dict['wave_max'] = np.max(wave_star)
sens_dict['exptime']= exptime
sens_dict['airmass']= airmass
sens_dict['std_file']= std_file
# Get other keys from standard dict
sens_dict['std_ra'] = std_dict['std_ra']
sens_dict['std_dec'] = std_dict['std_dec']
sens_dict['std_name'] = std_dict['name']
sens_dict['cal_file'] = std_dict['cal_file']
sens_dict['flux_true'] = flux_true
#sens_dict['std_dict'] = std_dict
#sens_dict['msk_star'] = msk_star
#sens_dict['mag_set'] = mag_set
return sens_dict
## bspline_magfit is deprecated at this moment.
def bspline_magfit(wave, flux, ivar, flux_std, inmask=None, maxiter=35, upper=2, lower=2,
kwargs_bspline={}, kwargs_reject={}, debug=False, show_QA=False):
"""
Perform a bspline fit to the flux ratio of standard to
observed counts. Used to generate a sensitivity function.
Parameters
----------
wave : ndarray
wavelength as observed
flux : ndarray
counts/s as observed
ivar : ndarray
inverse variance
flux_std : Quantity array
standard star true flux (erg/s/cm^2/A)
inmask : ndarray
bspline mask
maxiter : integer
maximum number of iterations for bspline_iterfit
upper : integer
number of sigma for rejection in bspline_iterfit
lower : integer
number of sigma for rejection in bspline_iterfit
kwargs_bspline : dict, optional
keywords for bspline_iterfit
kwargs_reject : dict, optional
keywords for bspline_iterfit
debug : bool
if True shows some dubugging plots
Returns
-------
bset_log1
"""
# Create copy of the arrays to avoid modification
wave_obs = wave.copy()
flux_obs = flux.copy()
ivar_obs = ivar.copy()
# preparing arrays to run in bspline_iterfit
if np.all(~np.isfinite(ivar_obs)):
msgs.warn("NaN are present in the inverse variance")
# Preparing arrays to run in bspline_iterfit
if np.all(~np.isfinite(ivar_obs)):
msgs.warn("NaN are present in the inverse variance")
# Removing outliers
# Calculate log of flux_obs setting a floor at TINY
logflux_obs = 2.5 * np.log10(np.maximum(flux_obs, TINY))
# Set a fix value for the variance of logflux
logivar_obs = np.ones_like(logflux_obs) * (10.0 ** 2)
# Calculate log of flux_std model setting a floor at TINY
logflux_std = 2.5 * np.log10(np.maximum(flux_std, TINY))
# Calculate ratio setting a floor at MAGFUNC_MIN and a ceiling at
# MAGFUNC_MAX
magfunc = logflux_std - logflux_obs
magfunc = np.maximum(np.minimum(magfunc, MAGFUNC_MAX), MAGFUNC_MIN)
magfunc_mask = (magfunc < 0.99 * MAGFUNC_MAX) & (magfunc > 0.99 * MAGFUNC_MIN)
# Mask outliners
# masktot=True means good pixel
if inmask is None:
masktot = (ivar_obs > 0.0) & np.isfinite(logflux_obs) & np.isfinite(ivar_obs) & \
np.isfinite(logflux_std) & magfunc_mask
else:
masktot = inmask & (ivar_obs > 0.0) & np.isfinite(logflux_obs) & np.isfinite(ivar_obs) & \
np.isfinite(logflux_std) & magfunc_mask
logivar_obs[~masktot] = 0.
# Calculate sensfunc
sensfunc = 10.0 ** (0.4 * magfunc)
msgs.info("Initialize bspline for flux calibration")
init_bspline = pydl.bspline(wave_obs, bkspace=kwargs_bspline['bkspace'])
fullbkpt = init_bspline.breakpoints
# TESTING turning off masking for now
# remove masked regions from breakpoints
msk_obs = np.ones_like(wave_obs).astype(bool)
msk_obs[~masktot] = False
msk_bkpt = scipy.interpolate.interp1d(wave_obs, msk_obs, kind='nearest', fill_value='extrapolate')(fullbkpt)
init_breakpoints = fullbkpt[msk_bkpt > 0.999]
# init_breakpoints = fullbkpt
# First round of the fit:
msgs.info("Bspline fit: step 1")
bset1, bmask = pydl.iterfit(wave_obs, magfunc, invvar=logivar_obs, inmask=masktot, upper=upper, lower=lower,
fullbkpt=init_breakpoints, maxiter=maxiter, kwargs_bspline=kwargs_bspline,
kwargs_reject=kwargs_reject)
logfit1, _ = bset1.value(wave_obs)
logfit_bkpt, _ = bset1.value(init_breakpoints)
if debug:
# Check for calibration
plt.figure(1)
plt.plot(wave_obs, magfunc, drawstyle='steps-mid', color='black', label='magfunc')
plt.plot(wave_obs, logfit1, color='cornflowerblue', label='logfit1')
plt.plot(wave_obs[~masktot], magfunc[~masktot], '+', color='red', markersize=5.0, label='masked magfunc')
plt.plot(wave_obs[~masktot], logfit1[~masktot], '+', color='red', markersize=5.0, label='masked logfit1')
plt.plot(init_breakpoints, logfit_bkpt, '.', color='green', markersize=4.0, label='breakpoints')
plt.plot(init_breakpoints, np.interp(init_breakpoints, wave_obs, magfunc), '.', color='green', markersize=4.0,
label='breakpoints')
plt.plot(wave_obs, 1.0 / np.sqrt(logivar_obs), color='orange', label='sigma')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(0.0, 1.2 * MAGFUNC_MAX)
plt.title('1st Bspline fit')
plt.show()
modelfit1 = np.power(10.0, 0.4 * np.maximum(np.minimum(logfit1, MAGFUNC_MAX), MAGFUNC_MIN))
residual = sensfunc / (modelfit1 + (modelfit1 == 0)) - 1.
# new_mask = masktot & (sensfunc > 0)
# residual_ivar = (modelfit1 * flux_obs / (sensfunc + (sensfunc == 0.0))) ** 2 * ivar_obs
residual_ivar = np.ones_like(residual) / (0.1 ** 2)
residual_ivar = residual_ivar * masktot
(mean, med, stddev) = sigma_clipped_stats(residual[masktot], sigma_lower=3.0, sigma_upper=3.0)
if np.median(stddev > 0.01):
# Second round of the fit:
msgs.info("Bspline fit: step 2")
# Now do one more fit to the ratio of data/model - 1.
bset_residual, bmask2 = pydl.iterfit(wave_obs, residual, invvar=residual_ivar, inmask=masktot, upper=upper,
lower=lower, maxiter=maxiter, fullbkpt=bset1.breakpoints,
kwargs_bspline=kwargs_bspline, kwargs_reject=kwargs_reject)
bset_log1 = bset1.copy()
bset_log1.coeff = bset_log1.coeff + bset_residual.coeff
if debug:
# Check for calibration
resid_fit, _ = bset_residual.value(wave_obs)
logfit2, _ = bset_log1.value(wave_obs)
logfit2_bkpt, _ = bset_log1.value(bset1.breakpoints)
plt.figure(1)
plt.plot(wave_obs, residual, drawstyle='steps-mid', color='black', label='residual')
plt.plot(wave_obs, resid_fit, color='cornflowerblue', label='resid_fit')
plt.plot(wave_obs[~masktot], residual[~masktot], '+', color='red', markersize=5.0, label='masked residual')
plt.plot(wave_obs[~masktot], resid_fit[~masktot], '+', color='red', markersize=5.0, label='masked resid_fit')
plt.plot(init_breakpoints, logfit2_bkpt, '.', color='green', markersize=4.0, label='breakpoints')
plt.plot(wave_obs, 1.0 / np.sqrt(residual_ivar), color='orange', label='sigma')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(-0.1, 0.1)
plt.title('2nd Bspline fit')
plt.show()
else:
bset_log1 = bset1.copy()
# ToDo JFH I think we should move towards writing this out as a vector in a fits table
# rather than the b-spline.
# Create sensitivity function
newlogfit, _ = bset_log1.value(wave_obs)
sensfit = np.power(10.0, 0.4 * np.maximum(np.minimum(newlogfit, MAGFUNC_MAX), MAGFUNC_MIN))
sensfit[~magfunc_mask] = 0.0
if debug:
# Check for calibration
plt.figure(1)
plt.plot(wave_obs, sensfunc, drawstyle='steps-mid', color='black', label='sensfunc')
plt.plot(wave_obs, sensfit, color='cornflowerblue', label='sensfunc fit')
plt.plot(wave_obs[~masktot], sensfunc[~masktot], '+', color='red', markersize=5.0, label='masked sensfunc')
plt.plot(wave_obs[~masktot], sensfit[~masktot], '+', color='red', markersize=5.0, label='masked sensfuncfit')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylim(0.0, 100.0)
plt.show()
# Check quality of the fit
absdev = np.median(np.abs(sensfit / modelfit1 - 1))
msgs.info('Difference between fits is {:g}'.format(absdev))
# Check for residual of the fit
if debug:
# scale = np.power(10.0, 0.4 * sensfit)
flux_cal = flux_obs * sensfit
ivar_cal = ivar_obs / sensfit ** 2.
plt.rcdefaults()
plt.rcParams['font.family']= 'times new roman'
plt.figure(figsize=(11, 8.5))
plt.clf()
plt.plot(wave_obs,flux_cal, label='Calibrated Spectrum')
plt.plot(wave_obs,flux_std, label='Model')
plt.plot(wave_obs,np.sqrt(1/ivar_cal))
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.ylabel('Flux [erg/s/cm2/Ang.]')
plt.ylim(0,np.median(flux_std)*2.5)
plt.show()
plt.close()
# QA
msgs.work("Add QA for sensitivity function")
if show_QA:
qa_bspline_magfit(wave_obs, bset_log1, magfunc, masktot)
return sensfunc,sensfit
def qa_bspline_magfit(wave, bset, magfunc, mask):
plt.close("all")
plt.rcParams['savefig.dpi'] = 600
plt.rcParams['xtick.top'] = True
plt.rcParams['ytick.right'] = True
plt.rcParams['xtick.minor.visible'] = True
plt.rcParams['ytick.minor.visible'] = True
plt.rcParams['ytick.direction'] = 'in'
plt.rcParams['xtick.direction'] = 'in'
plt.rcParams['xtick.major.size'] = 6
plt.rcParams['ytick.major.size'] = 6
plt.rcParams['xtick.minor.size'] = 3
plt.rcParams['ytick.minor.size'] = 3
plt.rcParams['xtick.major.width'] = 1
plt.rcParams['ytick.major.width'] = 1
plt.rcParams['xtick.minor.width'] = 1
plt.rcParams['ytick.minor.width'] = 1
plt.rcParams['axes.linewidth'] = 1
plt.rcParams['lines.linewidth'] = 2
plt.rcParams['legend.frameon'] = False
plt.rcParams['legend.handletextpad'] = 1.0
final_fit, _ = bset.value(wave)
final_fit_bkpt, _ = bset.value(bset.breakpoints)
plt.figure(1)
plt.plot(bset.breakpoints, final_fit_bkpt, '.', color='green', markersize=4.0, label='breakpoints')
plt.plot(wave, magfunc, drawstyle='steps-mid', color='black', label='magfunc')
plt.plot(wave, final_fit, color='cornflowerblue', label='bspline fit')
plt.plot(wave[~mask], magfunc[~mask], '+', color='red', markersize=5.0, label='masked points')
plt.legend()
plt.xlabel('Wavelength [ang]')
plt.title('Final Result of the Bspline fit')
plt.show()
return
| 1,441 | 0 | 23 |
225bf9f25bf98ea2daf3a88251d26bd761a4c7de | 3,536 | py | Python | virtuso.py | Thanatoz-1/virtuoso | 777b8a526d08751ac93ae236356c035641652824 | [
"Apache-2.0"
] | null | null | null | virtuso.py | Thanatoz-1/virtuoso | 777b8a526d08751ac93ae236356c035641652824 | [
"Apache-2.0"
] | 2 | 2019-12-08T13:18:44.000Z | 2019-12-13T12:12:39.000Z | virtuso.py | Thanatoz-1/virtuoso | 777b8a526d08751ac93ae236356c035641652824 | [
"Apache-2.0"
] | null | null | null | from argparse import ArgumentParser
import os
import re
from collections import defaultdict
import random
import spacy
nlp = spacy.load('en_core_web_sm')
target = DataFetcher()
def ArgParser():
'''
This is the function to parse your arguments into a more understable form and
provide relavant help whenever needed.
The package usage are as follows:
python virtuoso <path_to_templates> <path_to_outputs>
<path_to_templates> is the path to the text file having templates as mentioned
in the README file.
<path_to_generated> is the file path and the file name in which the csv needs to be
stored.
The script is compatible with python>3.5.2
'''
parser = ArgumentParser()
parser.add_argument(
"Text_file_path",
help=".txt file relative path // in which templates are stored")
parser.add_argument(
"Output_file_path",
help="relative path of file where data is to be stored")
args = parser.parse_args()
textPath = args.Text_file_path
# Append a txt extention to the templates file if not specified
textPath = textPath + '.txt' if len(textPath.split('.')) == 1 else textPath
savePath = args.Output_file_path
# Append a csv extention to the templates file if not specified
savePath = args.Output_file_path + '.csv' if len(
savePath.split('.')) == 1 else args.Output_file_path
return textPath, savePath
if __name__ == '__main__':
textPath, savePath = ArgParser()
# Reading templates
with open(textPath, 'r') as f:
textData = f.readlines()
mode = 'a' if os.path.exists(savePath) else 'w'
header = 'data'
count = 0
with open(savePath, mode) as out:
for line in textData:
tokens = line.split()
repeats = 1 if tokens[0].replace("{", "").replace(
"}", "") == '' else tokens[0].replace("{", "").replace(
"}", "")
query = ' '.join(tokens[1:])
res = []
for _ in range(int(repeats)):
res.append(process_string(query))
out.write(res[0] + '\n')
| 30.747826 | 88 | 0.598982 | from argparse import ArgumentParser
import os
import re
from collections import defaultdict
import random
import spacy
nlp = spacy.load('en_core_web_sm')
class DataFetcher:
def __init__(self):
BASE = 'Data/'
self.tables = os.listdir(BASE)
self._data = defaultdict(lambda: [])
for file in self.tables:
file_name, ext = os.path.splitext(file)
if ext == '.txt':
with open(os.path.join(BASE, file), 'r') as f:
self._data[file_name] = f.readlines()
def getItem(self, filename):
ret = random.choice(self._data[filename])
return ret
target = DataFetcher()
def Extractor(keyword, null_token='O'):
if '[' not in str(keyword):
return [(tok.text, null_token) for tok in nlp(keyword)]
e = re.sub('[^0-9a-zA-Z]+', ' ', str(keyword)).strip().split()
value = target.getItem(e[1]).split()
ret_tok = e[0]
# [x for b in a for x in b]
ret = [(int_tok.text, 'I-' + ret_tok) if (ind + idx > 0) else
(int_tok.text, 'B-' + ret_tok) for ind, ent_tok in enumerate(value)
for idx, int_tok in enumerate(nlp(ent_tok))]
return ret
def process_string(query):
query = query.replace('\n', '').split()
sentence = []
labels = []
for token in query:
extracted = Extractor(token)
for ent in extracted:
sentence.append(ent[0])
labels.append(ent[1])
res = []
for i, j in zip(sentence, labels):
res.append(str(i) + '###' + str(j))
return ' '.join(res)
def ArgParser():
'''
This is the function to parse your arguments into a more understable form and
provide relavant help whenever needed.
The package usage are as follows:
python virtuoso <path_to_templates> <path_to_outputs>
<path_to_templates> is the path to the text file having templates as mentioned
in the README file.
<path_to_generated> is the file path and the file name in which the csv needs to be
stored.
The script is compatible with python>3.5.2
'''
parser = ArgumentParser()
parser.add_argument(
"Text_file_path",
help=".txt file relative path // in which templates are stored")
parser.add_argument(
"Output_file_path",
help="relative path of file where data is to be stored")
args = parser.parse_args()
textPath = args.Text_file_path
# Append a txt extention to the templates file if not specified
textPath = textPath + '.txt' if len(textPath.split('.')) == 1 else textPath
savePath = args.Output_file_path
# Append a csv extention to the templates file if not specified
savePath = args.Output_file_path + '.csv' if len(
savePath.split('.')) == 1 else args.Output_file_path
return textPath, savePath
if __name__ == '__main__':
textPath, savePath = ArgParser()
# Reading templates
with open(textPath, 'r') as f:
textData = f.readlines()
mode = 'a' if os.path.exists(savePath) else 'w'
header = 'data'
count = 0
with open(savePath, mode) as out:
for line in textData:
tokens = line.split()
repeats = 1 if tokens[0].replace("{", "").replace(
"}", "") == '' else tokens[0].replace("{", "").replace(
"}", "")
query = ' '.join(tokens[1:])
res = []
for _ in range(int(repeats)):
res.append(process_string(query))
out.write(res[0] + '\n')
| 1,271 | -3 | 122 |
0d79f028460ea1be0d7b0b3b212ae770f0507107 | 94 | py | Python | catcher_modules/__init__.py | Daniel-Han-Yang/catcher_modules | 2eff08d2c19719539f761a7cae0a48b69a3231db | [
"Apache-2.0"
] | 5 | 2019-01-09T14:15:25.000Z | 2020-09-11T12:18:43.000Z | catcher_modules/__init__.py | Daniel-Han-Yang/catcher_modules | 2eff08d2c19719539f761a7cae0a48b69a3231db | [
"Apache-2.0"
] | 44 | 2019-06-30T09:19:42.000Z | 2021-12-30T16:09:09.000Z | catcher_modules/__init__.py | Daniel-Han-Yang/catcher_modules | 2eff08d2c19719539f761a7cae0a48b69a3231db | [
"Apache-2.0"
] | 5 | 2019-09-01T09:49:05.000Z | 2021-09-12T06:00:54.000Z | APPNAME = 'catcher-modules'
APPAUTHOR = 'Valerii Tikhonov, Ekaterina Belova'
APPVSN = '6.0.0'
| 23.5 | 48 | 0.734043 | APPNAME = 'catcher-modules'
APPAUTHOR = 'Valerii Tikhonov, Ekaterina Belova'
APPVSN = '6.0.0'
| 0 | 0 | 0 |
588a6b0bc1e1871fc4e4ef910ec05c9b253e71b8 | 968 | py | Python | src/bxgateway/utils/interval_minimum.py | doubleukay/bxgateway | ac01fc9475c039cf4255576dd4ecd6bff6c48f69 | [
"MIT"
] | 21 | 2019-11-06T17:37:41.000Z | 2022-03-28T07:18:33.000Z | src/bxgateway/utils/interval_minimum.py | doubleukay/bxgateway | ac01fc9475c039cf4255576dd4ecd6bff6c48f69 | [
"MIT"
] | 4 | 2019-11-06T22:08:00.000Z | 2021-12-08T06:20:51.000Z | src/bxgateway/utils/interval_minimum.py | doubleukay/bxgateway | ac01fc9475c039cf4255576dd4ecd6bff6c48f69 | [
"MIT"
] | 10 | 2020-08-05T15:58:16.000Z | 2022-02-07T23:51:10.000Z | from typing import Optional
from bxcommon.utils.alarm_queue import AlarmQueue
from bxutils import logging
logger = logging.get_logger(__name__)
| 30.25 | 79 | 0.72624 | from typing import Optional
from bxcommon.utils.alarm_queue import AlarmQueue
from bxutils import logging
logger = logging.get_logger(__name__)
class IntervalMinimum:
def __init__(self, interval_len_s: int, alarm_queue: AlarmQueue):
self.current_minimum: int = 0
self._interval_len_s = interval_len_s
self._next_interval_minimum: Optional[int] = None
alarm_queue.register_alarm(self._interval_len_s, self._on_interval_end)
def add(self, new_value: int):
next_interval_minimum = self._next_interval_minimum
if next_interval_minimum is None or new_value < next_interval_minimum:
self._next_interval_minimum = new_value
def _on_interval_end(self):
if self._next_interval_minimum is None:
self.current_minimum = 0
else:
self.current_minimum = self._next_interval_minimum
self._next_interval_minimum = None
return self._interval_len_s
| 716 | 1 | 104 |
c428d11f614a84c41c037c142536fd24d6772c42 | 145 | py | Python | AcademicDealerBackend/project_level_tests.py | Acciente717/AcademicDealerBackend | 8024725f88997fa430fa92e1caa28161ffbb06f6 | [
"MIT"
] | 5 | 2019-03-10T06:57:15.000Z | 2019-03-17T03:04:40.000Z | AcademicDealerBackend/project_level_tests.py | Acciente717/AcademicDealerBackend | 8024725f88997fa430fa92e1caa28161ffbb06f6 | [
"MIT"
] | 11 | 2019-05-14T15:13:48.000Z | 2019-05-31T15:31:33.000Z | AcademicDealerBackend/project_level_tests.py | Acciente717/AcademicDealerBackend | 8024725f88997fa430fa92e1caa28161ffbb06f6 | [
"MIT"
] | null | null | null | import sys
sys.path.append("./AcademicDealerBackend/users")
from tests.py import CoreFunctionalTest
CoreFunctionalTest().test_core_functions()
| 20.714286 | 48 | 0.827586 | import sys
sys.path.append("./AcademicDealerBackend/users")
from tests.py import CoreFunctionalTest
CoreFunctionalTest().test_core_functions()
| 0 | 0 | 0 |
0d0a3891596f7bbd389105cdc17d75da9f6f2560 | 108 | py | Python | tasks/EPAM/python_course/foundation-python/l7/m7-18-other-tools.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | 2 | 2022-01-19T18:01:35.000Z | 2022-02-06T06:54:38.000Z | tasks/EPAM/python_course/foundation-python/l7/m7-18-other-tools.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | null | null | null | tasks/EPAM/python_course/foundation-python/l7/m7-18-other-tools.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | null | null | null | # RunSnakeRun
# https://pypi.python.org/pypi/RunSnakeRun
# http://www.vrplumber.com/programming/runsnakerun/ | 36 | 51 | 0.787037 | # RunSnakeRun
# https://pypi.python.org/pypi/RunSnakeRun
# http://www.vrplumber.com/programming/runsnakerun/ | 0 | 0 | 0 |
d7e2763dfb95ef81aa2a40092387dd5fb62e4526 | 2,854 | py | Python | dashboards/soap_explorer/soap_cluster.py | jmmshn/mp_dash_boards | 19f893fb50d88368068e9e6b9518bd2041db41e9 | [
"MIT"
] | null | null | null | dashboards/soap_explorer/soap_cluster.py | jmmshn/mp_dash_boards | 19f893fb50d88368068e9e6b9518bd2041db41e9 | [
"MIT"
] | null | null | null | dashboards/soap_explorer/soap_cluster.py | jmmshn/mp_dash_boards | 19f893fb50d88368068e9e6b9518bd2041db41e9 | [
"MIT"
] | null | null | null | # %%
from typing import List
from monty.serialization import loadfn
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from dash_mp_components import Simple3DScene
from pymatgen import Site
import crystal_toolkit # noqa: F401
from pymatgen.analysis.graphs import MoleculeGraph
from pymatgen.core.structure import Molecule
import os
import pandas as pd
import plotly.express as px
dir_path = os.path.dirname(os.path.realpath(__file__))
DUMMY_SPECIES = "Si"
df_res = pd.read_pickle('df_res.pkl')
cluster_fig = fig = px.scatter(df_res, x="x", y='y', width=1000, height=900,
color='DBSCAN_lab', hover_name='index', title="Clusters of Similar Sites")
external_stylesheets = ["https://codepen.io/chriddyp/pen/bWLwgP.css"]
def get_dbs(db_names: List[str], db_file: str = dir_path + "/./db_info.pub.json") -> List:
"""Read the db_file and get the databases corresponding to <<db_name>>
Args:
db_name (List[str]): A list of names of the database we want
db_file (str): The db_file we are reading from
Returns:
MongograntStore: the store we need to access
"""
db_dict = loadfn(db_file)
stores = []
for j_name in db_names:
if j_name not in db_dict:
raise ValueError(
f"The store named {j_name} is missing from the db_file")
stores.append(db_dict[j_name])
return stores
soap_site_db, = get_dbs(["soap_site_descriptors"])
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# App layout
app.layout = html.Div(
[
dcc.Graph(id="cluster-plot", figure=fig),
html.Pre(id="debug", children=""),
Simple3DScene(
id='site',
sceneSize=400,
settings={'extractAxis': True},
axisView='SW',
data={}
),
]
)
@app.callback(Output('debug', 'children'), [Input('cluster-plot', 'clickData')])
@app.callback(Output('site', 'data'), [Input('cluster-plot', 'clickData')])
if __name__ == "__main__":
app.run_server(debug=True)
# %%
| 28.828283 | 105 | 0.653819 | # %%
from typing import List
from monty.serialization import loadfn
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from dash_mp_components import Simple3DScene
from pymatgen import Site
import crystal_toolkit # noqa: F401
from pymatgen.analysis.graphs import MoleculeGraph
from pymatgen.core.structure import Molecule
import os
import pandas as pd
import plotly.express as px
dir_path = os.path.dirname(os.path.realpath(__file__))
DUMMY_SPECIES = "Si"
df_res = pd.read_pickle('df_res.pkl')
cluster_fig = fig = px.scatter(df_res, x="x", y='y', width=1000, height=900,
color='DBSCAN_lab', hover_name='index', title="Clusters of Similar Sites")
external_stylesheets = ["https://codepen.io/chriddyp/pen/bWLwgP.css"]
def get_dbs(db_names: List[str], db_file: str = dir_path + "/./db_info.pub.json") -> List:
"""Read the db_file and get the databases corresponding to <<db_name>>
Args:
db_name (List[str]): A list of names of the database we want
db_file (str): The db_file we are reading from
Returns:
MongograntStore: the store we need to access
"""
db_dict = loadfn(db_file)
stores = []
for j_name in db_names:
if j_name not in db_dict:
raise ValueError(
f"The store named {j_name} is missing from the db_file")
stores.append(db_dict[j_name])
return stores
soap_site_db, = get_dbs(["soap_site_descriptors"])
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# App layout
app.layout = html.Div(
[
dcc.Graph(id="cluster-plot", figure=fig),
html.Pre(id="debug", children=""),
Simple3DScene(
id='site',
sceneSize=400,
settings={'extractAxis': True},
axisView='SW',
data={}
),
]
)
@app.callback(Output('debug', 'children'), [Input('cluster-plot', 'clickData')])
def debug(data):
if data is None:
return 'NONE'
return data["points"][0]["hovertext"]
@app.callback(Output('site', 'data'), [Input('cluster-plot', 'clickData')])
def get_sites_scene(data):
if data is None:
return {}
task_id, n = data["points"][0]["hovertext"].split("+")
with soap_site_db as db:
doc = db.query_one({'task_id': task_id})
scene = get_m_graph_from_site_data(doc['site_data'][int(n)]).get_scene()
scene.name = "site"
return scene
def get_m_graph_from_site_data(s_data):
mol = Molecule.from_sites([Site.from_dict(isite)
for isite in s_data['local_graph']['sites']])
mg = MoleculeGraph.with_empty_graph(mol)
for i in range(1, len(mg)):
mg.add_edge(0, i)
return mg
if __name__ == "__main__":
app.run_server(debug=True)
# %%
| 644 | 0 | 67 |
0a404d9cd9a4229d8ab11fccf16b502660913ff8 | 954 | py | Python | conftest.py | kriti-d/snyker | 33d256a93428de6eb27cb30b480ce3978551bada | [
"Apache-2.0"
] | 1 | 2021-06-30T02:41:41.000Z | 2021-06-30T02:41:41.000Z | conftest.py | kriti-d/snyker | 33d256a93428de6eb27cb30b480ce3978551bada | [
"Apache-2.0"
] | 1 | 2020-12-04T12:18:48.000Z | 2020-12-04T12:18:48.000Z | conftest.py | kriti-d/snyker | 33d256a93428de6eb27cb30b480ce3978551bada | [
"Apache-2.0"
] | 3 | 2021-01-28T14:47:14.000Z | 2021-10-17T17:08:10.000Z | """
Configuration for pytest fixtures
"""
import boto3 # type: ignore
import pytest # type: ignore
from chalice import Chalice # type: ignore
from moto import mock_ssm # type: ignore
@pytest.fixture
def app() -> Chalice:
"""Return the application for testing"""
from app import app as chalice_app # pylint: disable=import-outside-toplevel
return chalice_app
@pytest.fixture(autouse=True)
def mocked_aws_credentials(monkeypatch):
"""Mocked AWS Credentials for moto."""
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "testing")
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "testing")
monkeypatch.setenv("AWS_SECURITY_TOKEN", "testing")
monkeypatch.setenv("AWS_SESSION_TOKEN", "testing")
monkeypatch.setenv("AWS_DEFAULT_REGION", "eu-west-1")
boto3.setup_default_session()
@pytest.fixture(scope="function")
def ssm():
"""Mock for AWS Systems Manager"""
with mock_ssm():
yield boto3.client("ssm")
| 26.5 | 81 | 0.719078 | """
Configuration for pytest fixtures
"""
import boto3 # type: ignore
import pytest # type: ignore
from chalice import Chalice # type: ignore
from moto import mock_ssm # type: ignore
@pytest.fixture
def app() -> Chalice:
"""Return the application for testing"""
from app import app as chalice_app # pylint: disable=import-outside-toplevel
return chalice_app
@pytest.fixture(autouse=True)
def mocked_aws_credentials(monkeypatch):
"""Mocked AWS Credentials for moto."""
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "testing")
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "testing")
monkeypatch.setenv("AWS_SECURITY_TOKEN", "testing")
monkeypatch.setenv("AWS_SESSION_TOKEN", "testing")
monkeypatch.setenv("AWS_DEFAULT_REGION", "eu-west-1")
boto3.setup_default_session()
@pytest.fixture(scope="function")
def ssm():
"""Mock for AWS Systems Manager"""
with mock_ssm():
yield boto3.client("ssm")
| 0 | 0 | 0 |
f905cd9d13b659e019e1b8c2279b949410a81e30 | 33 | py | Python | foreshadow/tests/test_optimizers/test_param_distribution.py | adithyabsk/foreshadow | ca2e927c396ae0d61923b287d6e32e142f3ba96f | [
"Apache-2.0"
] | 25 | 2018-07-26T17:30:31.000Z | 2021-02-23T22:54:01.000Z | foreshadow/tests/test_optimizers/test_param_distribution.py | adithyabsk/foreshadow | ca2e927c396ae0d61923b287d6e32e142f3ba96f | [
"Apache-2.0"
] | 150 | 2018-11-02T18:09:12.000Z | 2020-05-15T01:01:35.000Z | foreshadow/tests/test_optimizers/test_param_distribution.py | adithyabsk/foreshadow | ca2e927c396ae0d61923b287d6e32e142f3ba96f | [
"Apache-2.0"
] | 1 | 2019-02-20T22:24:00.000Z | 2019-02-20T22:24:00.000Z | """Test param_distribution.py"""
| 16.5 | 32 | 0.727273 | """Test param_distribution.py"""
| 0 | 0 | 0 |
a8591c2532e37813020aa5a34fa1d7e47e702528 | 6,934 | py | Python | a10sdk/core/slb/slb_template_diameter.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 16 | 2015-05-20T07:26:30.000Z | 2021-01-23T11:56:57.000Z | a10sdk/core/slb/slb_template_diameter.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 6 | 2015-03-24T22:07:11.000Z | 2017-03-28T21:31:18.000Z | a10sdk/core/slb/slb_template_diameter.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 23 | 2015-03-29T15:43:01.000Z | 2021-06-02T17:12:01.000Z | from a10sdk.common.A10BaseClass import A10BaseClass
class AvpList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param int32: {"description": "32 bits integer", "format": "number", "not-list": ["int64", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}
:param mandatory: {"default": 0, "type": "number", "description": "mandatory avp", "format": "flag"}
:param string: {"description": "String (string name, max length 127 bytes)", "format": "string", "minLength": 1, "not-list": ["int32", "int64"], "maxLength": 128, "type": "string"}
:param avp: {"description": "customize avps for cer to the server (avp number)", "minimum": 0, "type": "number", "maximum": 2147483647, "format": "number"}
:param int64: {"description": "64 bits integer", "format": "number", "not-list": ["int32", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
class MessageCodeList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param message_code: {"minimum": 1, "type": "number", "maximum": 2147483647, "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
class Diameter(A10BaseClass):
"""Class Description::
diameter template.
Class diameter supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param avp_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"int32": {"description": "32 bits integer", "format": "number", "not-list": ["int64", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}, "mandatory": {"default": 0, "type": "number", "description": "mandatory avp", "format": "flag"}, "string": {"description": "String (string name, max length 127 bytes)", "format": "string", "minLength": 1, "not-list": ["int32", "int64"], "maxLength": 128, "type": "string"}, "avp": {"description": "customize avps for cer to the server (avp number)", "minimum": 0, "type": "number", "maximum": 2147483647, "format": "number"}, "int64": {"description": "64 bits integer", "format": "number", "not-list": ["int32", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}, "optional": true}}]}
:param service_group_name: {"description": "service group name, this is the service group that the message needs to be copied to", "format": "string", "minLength": 1, "optional": true, "maxLength": 127, "type": "string", "$ref": "/axapi/v3/slb/service-group"}
:param name: {"description": "diameter template Name", "format": "string-rlx", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}
:param dwr_time: {"description": "dwr health-check timer interval (in 100 milli second unit, default is 100, 0 means unset this option)", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}
:param avp_string: {"description": "pattern to be matched in the avp string name, max length 127 bytes", "format": "string", "minLength": 1, "optional": true, "maxLength": 128, "type": "string"}
:param idle_timeout: {"description": " user sesison idle timeout (in minutes, default is 5)", "format": "number", "default": 5, "optional": true, "maximum": 65535, "minimum": 1, "type": "number"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param avp_code: {"description": "avp code", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 1, "optional": true}
:param message_code_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "message-code": {"minimum": 1, "type": "number", "maximum": 2147483647, "format": "number"}}}]}
:param origin_realm: {"description": "origin-realm name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param origin_host: {"description": "origin-host name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param customize_cea: {"default": 0, "optional": true, "type": "number", "description": "customizing cea response", "format": "flag"}
:param multiple_origin_host: {"default": 0, "optional": true, "type": "number", "description": "allowing multiple origin-host to a single server", "format": "flag"}
:param product_name: {"description": "product name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param session_age: {"description": "user session age allowed (default 10), this is not idle-time (in minutes)", "format": "number", "default": 10, "optional": true, "maximum": 65535, "minimum": 1, "type": "number"}
:param vendor_id: {"description": "vendor-id avp (Vendon Id)", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 0, "optional": true}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/slb/template/diameter/{name}`.
"""
| 58.268908 | 882 | 0.625036 | from a10sdk.common.A10BaseClass import A10BaseClass
class AvpList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param int32: {"description": "32 bits integer", "format": "number", "not-list": ["int64", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}
:param mandatory: {"default": 0, "type": "number", "description": "mandatory avp", "format": "flag"}
:param string: {"description": "String (string name, max length 127 bytes)", "format": "string", "minLength": 1, "not-list": ["int32", "int64"], "maxLength": 128, "type": "string"}
:param avp: {"description": "customize avps for cer to the server (avp number)", "minimum": 0, "type": "number", "maximum": 2147483647, "format": "number"}
:param int64: {"description": "64 bits integer", "format": "number", "not-list": ["int32", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "avp-list"
self.DeviceProxy = ""
self.int32 = ""
self.mandatory = ""
self.string = ""
self.avp = ""
self.int64 = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class MessageCodeList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param message_code: {"minimum": 1, "type": "number", "maximum": 2147483647, "format": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "message-code-list"
self.DeviceProxy = ""
self.message_code = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Diameter(A10BaseClass):
"""Class Description::
diameter template.
Class diameter supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param avp_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"int32": {"description": "32 bits integer", "format": "number", "not-list": ["int64", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}, "mandatory": {"default": 0, "type": "number", "description": "mandatory avp", "format": "flag"}, "string": {"description": "String (string name, max length 127 bytes)", "format": "string", "minLength": 1, "not-list": ["int32", "int64"], "maxLength": 128, "type": "string"}, "avp": {"description": "customize avps for cer to the server (avp number)", "minimum": 0, "type": "number", "maximum": 2147483647, "format": "number"}, "int64": {"description": "64 bits integer", "format": "number", "not-list": ["int32", "string"], "maximum": 2147483647, "minimum": 0, "type": "number"}, "optional": true}}]}
:param service_group_name: {"description": "service group name, this is the service group that the message needs to be copied to", "format": "string", "minLength": 1, "optional": true, "maxLength": 127, "type": "string", "$ref": "/axapi/v3/slb/service-group"}
:param name: {"description": "diameter template Name", "format": "string-rlx", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}
:param dwr_time: {"description": "dwr health-check timer interval (in 100 milli second unit, default is 100, 0 means unset this option)", "format": "number", "default": 0, "optional": true, "maximum": 2147483647, "minimum": 0, "type": "number"}
:param avp_string: {"description": "pattern to be matched in the avp string name, max length 127 bytes", "format": "string", "minLength": 1, "optional": true, "maxLength": 128, "type": "string"}
:param idle_timeout: {"description": " user sesison idle timeout (in minutes, default is 5)", "format": "number", "default": 5, "optional": true, "maximum": 65535, "minimum": 1, "type": "number"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param avp_code: {"description": "avp code", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 1, "optional": true}
:param message_code_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "message-code": {"minimum": 1, "type": "number", "maximum": 2147483647, "format": "number"}}}]}
:param origin_realm: {"description": "origin-realm name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param origin_host: {"description": "origin-host name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param customize_cea: {"default": 0, "optional": true, "type": "number", "description": "customizing cea response", "format": "flag"}
:param multiple_origin_host: {"default": 0, "optional": true, "type": "number", "description": "allowing multiple origin-host to a single server", "format": "flag"}
:param product_name: {"description": "product name avp", "format": "string", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param session_age: {"description": "user session age allowed (default 10), this is not idle-time (in minutes)", "format": "number", "default": 10, "optional": true, "maximum": 65535, "minimum": 1, "type": "number"}
:param vendor_id: {"description": "vendor-id avp (Vendon Id)", "format": "number", "type": "number", "maximum": 2147483647, "minimum": 0, "optional": true}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/slb/template/diameter/{name}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "name"]
self.b_key = "diameter"
self.a10_url="/axapi/v3/slb/template/diameter/{name}"
self.DeviceProxy = ""
self.avp_list = []
self.service_group_name = ""
self.name = ""
self.dwr_time = ""
self.avp_string = ""
self.idle_timeout = ""
self.uuid = ""
self.avp_code = ""
self.message_code_list = []
self.origin_realm = ""
self.origin_host = ""
self.customize_cea = ""
self.multiple_origin_host = ""
self.product_name = ""
self.session_age = ""
self.vendor_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| 1,299 | 0 | 78 |
4072a3fa19318b9de75a69e7c0ce5bed60bfb034 | 18,031 | py | Python | dexbot/storage.py | bitProfessor/DEXBot | 5d692fbf1acffeec46a82a12474b8a123e4c6370 | [
"MIT"
] | 1 | 2021-04-22T09:18:55.000Z | 2021-04-22T09:18:55.000Z | dexbot/storage.py | bitProfessor/DEXBot | 5d692fbf1acffeec46a82a12474b8a123e4c6370 | [
"MIT"
] | null | null | null | dexbot/storage.py | bitProfessor/DEXBot | 5d692fbf1acffeec46a82a12474b8a123e4c6370 | [
"MIT"
] | 2 | 2021-02-13T10:58:33.000Z | 2022-03-04T14:01:58.000Z | import os
import os.path
import sys
import json
import threading
import queue
import uuid
import alembic
import alembic.config
from appdirs import user_data_dir
from . import helper
from dexbot import APP_NAME, AUTHOR
from sqlalchemy import create_engine, Column, String, Integer, Float, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, load_only
Base = declarative_base()
# For dexbot.sqlite file
storageDatabase = "dexbot.sqlite"
class Storage(dict):
""" Storage class
:param string category: The category to distinguish
different storage namespaces
"""
def save_order(self, order):
""" Save the order to the database
"""
order_id = order['id']
db_worker.save_order(self.category, order_id, order)
def save_order_extended(self, order, virtual=None, custom=None):
""" Save the order to the database providing additional data
:param dict order:
:param bool virtual: True = order is virtual order
:param str custom: any additional data
"""
order_id = order['id']
db_worker.save_order_extended(self.category, order_id, order, virtual, custom)
def remove_order(self, order):
""" Removes an order from the database
:param dict,str order: order to remove, could be an Order instance or just order id
"""
if isinstance(order, dict):
order_id = order['id']
else:
order_id = order
db_worker.remove_order(self.category, order_id)
def clear_orders(self):
""" Removes all worker's orders from the database
"""
db_worker.clear_orders(self.category)
def clear_orders_extended(self, worker=None, only_virtual=False, only_real=False, custom=None):
""" Removes worker's orders matching a criteria from the database
:param str worker: worker name (None means current worker name will be used)
:param bool only_virtual: True = only virtual orders
:param bool only_real: True = only real orders
:param str custom: filter orders by custom field
"""
if only_virtual and only_real:
raise ValueError('only_virtual and only_real are mutually exclusive')
if not worker:
worker = self.category
return db_worker.clear_orders_extended(worker, only_virtual, only_real, custom)
def fetch_orders(self, worker=None):
""" Get all the orders (or just specific worker's orders) from the database
:param str worker: worker name (None means current worker name will be used)
"""
if not worker:
worker = self.category
return db_worker.fetch_orders(worker)
def fetch_orders_extended(
self, worker=None, only_virtual=False, only_real=False, custom=None, return_ids_only=False
):
""" Get orders from the database in extended format (returning all columns)
:param str worker: worker name (None means current worker name will be used)
:param bool only_virtual: True = fetch only virtual orders
:param bool only_real: True = fetch only real orders
:param str custom: filter orders by custom field
:param bool return_ids_only: instead of returning full row data, return only order ids
:rtype: list
:return: list of dicts in format [{order_id: '', order: '', virtual: '', custom: ''}], or [order_id] if
return_ids_only used
"""
if only_virtual and only_real:
raise ValueError('only_virtual and only_real are mutually exclusive')
if not worker:
worker = self.category
return db_worker.fetch_orders_extended(worker, only_virtual, only_real, custom, return_ids_only)
@staticmethod
@staticmethod
@staticmethod
@staticmethod
class DatabaseWorker(threading.Thread):
""" Thread safe database worker
"""
@staticmethod
def run_migrations(script_location, dsn, stamp_only=False):
""" Apply database migrations using alembic
:param str script_location: path to migration scripts
:param str dsn: database URL
:param bool stamp_only: True = only mark the db as "head" without applying migrations
"""
alembic_cfg = alembic.config.Config()
alembic_cfg.set_main_option('script_location', script_location)
alembic_cfg.set_main_option('sqlalchemy.url', dsn)
if stamp_only:
# Mark db as "head" without applying migrations
alembic.command.stamp(alembic_cfg, "head")
else:
alembic.command.upgrade(alembic_cfg, 'head')
@staticmethod
def get_filter_by(worker, only_virtual, only_real, custom):
""" Make filter_by for sqlalchemy query based on args
"""
filter_by = {'worker': worker}
if only_virtual:
filter_by['virtual'] = True
elif only_real:
filter_by['virtual'] = False
if custom:
filter_by['custom'] = json.dumps(custom)
return filter_by
def _get_balance(self, account, worker, timestamp, base_asset, quote_asset, token):
""" Get first item that has bigger time as given timestamp and matches account and worker name
"""
result = (
self.session.query(Balances)
.filter(
Balances.account == account,
Balances.worker == worker,
Balances.base_symbol == base_asset,
Balances.quote_symbol == quote_asset,
Balances.timestamp > timestamp,
)
.first()
)
self._set_result(token, result)
def _get_recent_balance_entry(self, account, worker, base_asset, quote_asset, token):
""" Get most recent balance history item that matches account and worker name
"""
result = (
self.session.query(Balances)
.filter(
Balances.account == account,
Balances.worker == worker,
Balances.base_symbol == base_asset,
Balances.quote_symbol == quote_asset,
)
.order_by(Balances.id.desc())
.first()
)
self._set_result(token, result)
# Derive sqlite file directory
data_dir = user_data_dir(APP_NAME, AUTHOR)
sqlDataBaseFile = os.path.join(data_dir, storageDatabase)
# Create directory for sqlite file
helper.mkdir(data_dir)
db_worker = DatabaseWorker()
| 35.216797 | 120 | 0.638179 | import os
import os.path
import sys
import json
import threading
import queue
import uuid
import alembic
import alembic.config
from appdirs import user_data_dir
from . import helper
from dexbot import APP_NAME, AUTHOR
from sqlalchemy import create_engine, Column, String, Integer, Float, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, load_only
Base = declarative_base()
# For dexbot.sqlite file
storageDatabase = "dexbot.sqlite"
class Config(Base):
__tablename__ = 'config'
id = Column(Integer, primary_key=True)
category = Column(String)
key = Column(String)
value = Column(String)
def __init__(self, c, k, v):
self.category = c
self.key = k
self.value = v
class Orders(Base):
__tablename__ = 'orders'
id = Column(Integer, primary_key=True)
worker = Column(String)
order_id = Column(String)
order = Column(String)
virtual = Column(Boolean)
custom = Column(String)
def __init__(self, worker, order_id, order, virtual, custom):
self.worker = worker
self.order_id = order_id
self.order = order
self.virtual = virtual
self.custom = custom
class Balances(Base):
__tablename__ = 'balances'
id = Column(Integer, primary_key=True)
account = Column(String)
worker = Column(String)
base_total = Column(Float)
base_symbol = Column(String)
quote_total = Column(Float)
quote_symbol = Column(String)
center_price = Column(Float)
timestamp = Column(Integer)
def __init__(self, account, worker, base_total, base_symbol, quote_total, quote_symbol, center_price, timestamp):
self.account = account
self.worker = worker
self.base_total = base_total
self.base_symbol = base_symbol
self.quote_total = quote_total
self.quote_symbol = quote_symbol
self.center_price = center_price
self.timestamp = timestamp
class Storage(dict):
""" Storage class
:param string category: The category to distinguish
different storage namespaces
"""
def __init__(self, category):
self.category = category
def __setitem__(self, key, value):
db_worker.set_item(self.category, key, value)
def __getitem__(self, key):
return db_worker.get_item(self.category, key)
def __delitem__(self, key):
db_worker.del_item(self.category, key)
def __contains__(self, key):
return db_worker.contains(self.category, key)
def items(self):
return db_worker.get_items(self.category)
def clear(self):
db_worker.clear(self.category)
def save_order(self, order):
""" Save the order to the database
"""
order_id = order['id']
db_worker.save_order(self.category, order_id, order)
def save_order_extended(self, order, virtual=None, custom=None):
""" Save the order to the database providing additional data
:param dict order:
:param bool virtual: True = order is virtual order
:param str custom: any additional data
"""
order_id = order['id']
db_worker.save_order_extended(self.category, order_id, order, virtual, custom)
def remove_order(self, order):
""" Removes an order from the database
:param dict,str order: order to remove, could be an Order instance or just order id
"""
if isinstance(order, dict):
order_id = order['id']
else:
order_id = order
db_worker.remove_order(self.category, order_id)
def clear_orders(self):
""" Removes all worker's orders from the database
"""
db_worker.clear_orders(self.category)
def clear_orders_extended(self, worker=None, only_virtual=False, only_real=False, custom=None):
""" Removes worker's orders matching a criteria from the database
:param str worker: worker name (None means current worker name will be used)
:param bool only_virtual: True = only virtual orders
:param bool only_real: True = only real orders
:param str custom: filter orders by custom field
"""
if only_virtual and only_real:
raise ValueError('only_virtual and only_real are mutually exclusive')
if not worker:
worker = self.category
return db_worker.clear_orders_extended(worker, only_virtual, only_real, custom)
def fetch_orders(self, worker=None):
""" Get all the orders (or just specific worker's orders) from the database
:param str worker: worker name (None means current worker name will be used)
"""
if not worker:
worker = self.category
return db_worker.fetch_orders(worker)
def fetch_orders_extended(
self, worker=None, only_virtual=False, only_real=False, custom=None, return_ids_only=False
):
""" Get orders from the database in extended format (returning all columns)
:param str worker: worker name (None means current worker name will be used)
:param bool only_virtual: True = fetch only virtual orders
:param bool only_real: True = fetch only real orders
:param str custom: filter orders by custom field
:param bool return_ids_only: instead of returning full row data, return only order ids
:rtype: list
:return: list of dicts in format [{order_id: '', order: '', virtual: '', custom: ''}], or [order_id] if
return_ids_only used
"""
if only_virtual and only_real:
raise ValueError('only_virtual and only_real are mutually exclusive')
if not worker:
worker = self.category
return db_worker.fetch_orders_extended(worker, only_virtual, only_real, custom, return_ids_only)
@staticmethod
def clear_worker_data(worker):
db_worker.clear_orders(worker)
db_worker.clear(worker)
@staticmethod
def store_balance_entry(
account, worker, base_total, base_symbol, quote_total, quote_symbol, center_price, timestamp
):
balance = Balances(account, worker, base_total, base_symbol, quote_total, quote_symbol, center_price, timestamp)
# Save balance to db
db_worker.save_balance(balance)
@staticmethod
def get_balance_history(account, worker, timestamp, base_asset, quote_asset):
return db_worker.get_balance(account, worker, timestamp, base_asset, quote_asset)
@staticmethod
def get_recent_balance_entry(account, worker, base_asset, quote_asset):
return db_worker.get_recent_balance_entry(account, worker, base_asset, quote_asset)
class DatabaseWorker(threading.Thread):
""" Thread safe database worker
"""
def __init__(self, **kwargs):
super().__init__()
sqlite_file = kwargs.get('sqlite_file', sqlDataBaseFile)
# Obtain engine and session
dsn = 'sqlite:///{}'.format(sqlite_file)
engine = create_engine(dsn, echo=False)
Session = sessionmaker(bind=engine)
self.session = Session()
# Find out where migrations are
if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
# We're bundled into pyinstaller executable
bundle_dir = getattr(sys, '_MEIPASS', os.path.abspath(os.path.dirname(__file__)))
migrations_dir = os.path.join(bundle_dir, 'migrations')
else:
from pkg_resources import resource_filename
migrations_dir = resource_filename('dexbot', 'migrations')
if os.path.exists(sqlite_file) and os.path.getsize(sqlite_file) > 0:
# Run migrations on existing database
self.run_migrations(migrations_dir, dsn)
else:
Base.metadata.create_all(engine)
self.session.commit()
# We're created database from scratch, stamp it with "head" revision
self.run_migrations(migrations_dir, dsn, stamp_only=True)
self.task_queue = queue.Queue()
self.results = {}
self.lock = threading.Lock()
self.event = threading.Event()
self.daemon = True
self.start()
@staticmethod
def run_migrations(script_location, dsn, stamp_only=False):
""" Apply database migrations using alembic
:param str script_location: path to migration scripts
:param str dsn: database URL
:param bool stamp_only: True = only mark the db as "head" without applying migrations
"""
alembic_cfg = alembic.config.Config()
alembic_cfg.set_main_option('script_location', script_location)
alembic_cfg.set_main_option('sqlalchemy.url', dsn)
if stamp_only:
# Mark db as "head" without applying migrations
alembic.command.stamp(alembic_cfg, "head")
else:
alembic.command.upgrade(alembic_cfg, 'head')
@staticmethod
def get_filter_by(worker, only_virtual, only_real, custom):
""" Make filter_by for sqlalchemy query based on args
"""
filter_by = {'worker': worker}
if only_virtual:
filter_by['virtual'] = True
elif only_real:
filter_by['virtual'] = False
if custom:
filter_by['custom'] = json.dumps(custom)
return filter_by
def run(self):
for func, args, token in iter(self.task_queue.get, None):
if token is not None:
args = args + (token,)
func(*args)
def _get_result(self, token):
while True:
with self.lock:
if token in self.results:
return_value = self.results[token]
del self.results[token]
return return_value
else:
self.event.clear()
self.event.wait()
def _set_result(self, token, result):
with self.lock:
self.results[token] = result
self.event.set()
def execute(self, func, *args):
token = str(uuid.uuid4)
self.task_queue.put((func, args, token))
return self._get_result(token)
def execute_noreturn(self, func, *args):
self.task_queue.put((func, args, None))
def set_item(self, category, key, value):
self.execute_noreturn(self._set_item, category, key, value)
def _set_item(self, category, key, value):
value = json.dumps(value)
e = self.session.query(Config).filter_by(category=category, key=key).first()
if e:
e.value = value
else:
e = Config(category, key, value)
self.session.add(e)
self.session.commit()
def get_item(self, category, key):
return self.execute(self._get_item, category, key)
def _get_item(self, category, key, token):
e = self.session.query(Config).filter_by(category=category, key=key).first()
if not e:
result = None
else:
result = json.loads(e.value)
self._set_result(token, result)
def del_item(self, category, key):
self.execute_noreturn(self._del_item, category, key)
def _del_item(self, category, key):
e = self.session.query(Config).filter_by(category=category, key=key).first()
self.session.delete(e)
self.session.commit()
def contains(self, category, key):
return self.execute(self._contains, category, key)
def _contains(self, category, key, token):
e = self.session.query(Config).filter_by(category=category, key=key).first()
self._set_result(token, bool(e))
def get_items(self, category):
return self.execute(self._get_items, category)
def _get_items(self, category, token):
es = self.session.query(Config).filter_by(category=category).all()
result = [(e.key, e.value) for e in es]
self._set_result(token, result)
def clear(self, category):
self.execute_noreturn(self._clear, category)
def _clear(self, category):
rows = self.session.query(Config).filter_by(category=category)
for row in rows:
self.session.delete(row)
self.session.commit()
def save_order(self, worker, order_id, order):
self.execute_noreturn(self._save_order, worker, order_id, order)
def _save_order(self, worker, order_id, order):
value = json.dumps(order)
e = self.session.query(Orders).filter_by(order_id=order_id).first()
if e:
e.value = value
else:
e = Orders(worker, order_id, value, None, None)
self.session.add(e)
self.session.commit()
def save_order_extended(self, worker, order_id, order, virtual, custom):
self.execute_noreturn(self._save_order_extended, worker, order_id, order, virtual, custom)
def _save_order_extended(self, worker, order_id, order, virtual, custom):
order_json = json.dumps(order)
custom_json = json.dumps(custom)
e = self.session.query(Orders).filter_by(order_id=order_id).first()
if e:
e.order = order_json
e.virtual = virtual
e.custom = custom_json
else:
e = Orders(worker, order_id, order_json, virtual, custom_json)
self.session.add(e)
self.session.commit()
def remove_order(self, worker, order_id):
self.execute_noreturn(self._remove_order, worker, order_id)
def _remove_order(self, worker, order_id):
e = self.session.query(Orders).filter_by(worker=worker, order_id=order_id).first()
if e:
self.session.delete(e)
self.session.commit()
def clear_orders(self, worker):
self.execute_noreturn(self._clear_orders, worker)
def _clear_orders(self, worker):
self.session.query(Orders).filter_by(worker=worker).delete()
self.session.commit()
def clear_orders_extended(self, worker, only_virtual, only_real, custom):
self.execute_noreturn(self._clear_orders_extended, worker, only_virtual, only_real, custom)
def _clear_orders_extended(self, worker, only_virtual, only_real, custom):
filter_by = self.get_filter_by(worker, only_virtual, only_real, custom)
self.session.query(Orders).filter_by(**filter_by).delete()
self.session.commit()
def fetch_orders(self, category):
return self.execute(self._fetch_orders, category)
def _fetch_orders(self, worker, token):
results = self.session.query(Orders).filter_by(worker=worker).all()
if not results:
result = None
else:
result = {}
for row in results:
result[row.order_id] = json.loads(row.order)
self._set_result(token, result)
def fetch_orders_extended(self, category, only_virtual, only_real, custom, return_ids_only):
return self.execute(self._fetch_orders_extended, category, only_virtual, only_real, custom, return_ids_only)
def _fetch_orders_extended(self, worker, only_virtual, only_real, custom, return_ids_only, token):
filter_by = self.get_filter_by(worker, only_virtual, only_real, custom)
if return_ids_only:
query = self.session.query(Orders).options(load_only('order_id'))
results = query.filter_by(**filter_by).all()
result = [row.order_id for row in results]
else:
results = self.session.query(Orders).filter_by(**filter_by).all()
result = []
for row in results:
entry = {
'order_id': row.order_id,
'order': json.loads(row.order),
'virtual': row.virtual,
'custom': json.loads(row.custom),
}
result.append(entry)
self._set_result(token, result)
def save_balance(self, balance):
self.execute_noreturn(self._save_balance, balance)
def _save_balance(self, balance):
self.session.add(balance)
self.session.commit()
def get_balance(self, account, worker, timestamp, base_asset, quote_asset):
return self.execute(self._get_balance, account, worker, timestamp, base_asset, quote_asset)
def _get_balance(self, account, worker, timestamp, base_asset, quote_asset, token):
""" Get first item that has bigger time as given timestamp and matches account and worker name
"""
result = (
self.session.query(Balances)
.filter(
Balances.account == account,
Balances.worker == worker,
Balances.base_symbol == base_asset,
Balances.quote_symbol == quote_asset,
Balances.timestamp > timestamp,
)
.first()
)
self._set_result(token, result)
def get_recent_balance_entry(self, account, worker, base_asset, quote_asset):
return self.execute(self._get_recent_balance_entry, account, worker, base_asset, quote_asset)
def _get_recent_balance_entry(self, account, worker, base_asset, quote_asset, token):
""" Get most recent balance history item that matches account and worker name
"""
result = (
self.session.query(Balances)
.filter(
Balances.account == account,
Balances.worker == worker,
Balances.base_symbol == base_asset,
Balances.quote_symbol == quote_asset,
)
.order_by(Balances.id.desc())
.first()
)
self._set_result(token, result)
# Derive sqlite file directory
data_dir = user_data_dir(APP_NAME, AUTHOR)
sqlDataBaseFile = os.path.join(data_dir, storageDatabase)
# Create directory for sqlite file
helper.mkdir(data_dir)
db_worker = DatabaseWorker()
| 9,251 | 775 | 1,334 |
af540bd821c2bc1496e49d1e9f07b11f45c0e73e | 16,643 | py | Python | fixtures/bridge_domain_fixture.py | lmadhusudhanan/contrail-test | bd39ff19da06a20bd79af8c25e3cde07375577cf | [
"Apache-2.0"
] | null | null | null | fixtures/bridge_domain_fixture.py | lmadhusudhanan/contrail-test | bd39ff19da06a20bd79af8c25e3cde07375577cf | [
"Apache-2.0"
] | 1 | 2021-06-01T22:19:48.000Z | 2021-06-01T22:19:48.000Z | fixtures/bridge_domain_fixture.py | lmadhusudhanan/contrail-test | bd39ff19da06a20bd79af8c25e3cde07375577cf | [
"Apache-2.0"
] | null | null | null | from tcutils.util import *
from vnc_api.vnc_api import *
import vnc_api_test
class BDFixture(vnc_api_test.VncLibFixture):
'''
Bridge Domain Fixture
'''
def create_bd(self):
'''
Creates a bridge domain
'''
self.bd_obj = BridgeDomain(name=self.bd_name,
parent_obj=self.parent_obj,
mac_learning_enabled=self.mac_learning_enabled,
mac_limit_control=self.mac_limit_control,
mac_move_control=self.mac_move_control,
mac_aging_time=self.mac_aging_time,
isid=self.isid)
self.bd_uuid = self.vnc_lib.bridge_domain_create(self.bd_obj)
self.logger.info('Created Bridge Domain %s, UUID: %s' % (
self.vnc_lib.id_to_fq_name(self.bd_uuid), self.bd_uuid))
self._populate_attr()
return self.bd_obj
# end create_bd
def delete_bd(self, uuid=None):
'''
Delete Bridge Domain object
Args:
uuid : UUID of BridgeDomain object
'''
uuid = uuid or self.bd_uuid
self.vnc_lib.bridge_domain_delete(id=uuid)
self.logger.info('Deleted Bridge Domain %s' % (uuid))
# end delete_bd
def read_bd(self, uuid=None):
'''
Read Bridge Domain object
Args:
uuid : UUID of BridgeDomain object
'''
uuid = uuid or self.bd_uuid
bd_obj = self.vnc_lib.bridge_domain_read(id=uuid)
self.logger.info('Bridge Domain %s info %s' % (uuid,bd_obj))
return bd_obj
# end read_bd
def update_bd(self, **kwargs):
'''
Updates bridge domain
'''
self.parse_bd_kwargs(**kwargs)
self.vnc_h.update_bd(uuid=self.bd_uuid, **kwargs)
# end verify_on_setup
@retry(delay=2, tries=5)
def verify_bd_in_api_server(self):
""" Checks for Bridge Domain in API Server.
"""
self.api_verification_flag = True
cfgm_ip = self.inputs.cfgm_ips[0]
self.api_s_bd_obj = self.api_s_inspects[cfgm_ip].get_cs_bridge_domain(
bd_name=self.bd_name, refresh=True)
if not self.api_s_bd_obj:
self.logger.warn("Bridge Domain %s is not found in API-Server" %
(self.bd_name))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.api_s_bd_obj['bridge-domain']['uuid'] != self.bd_uuid:
self.logger.warn(
"BD Object ID %s in API-Server is not what was created" % (
self.bd_uuid))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.api_s_bd_obj['bridge-domain']['parent_type'] != 'virtual-network' or \
self.api_s_bd_obj['bridge-domain']['parent_uuid'] != self.parent_obj.uuid:
self.logger.warn(
"BD parent type %s and ID %s in API-Server is not as expected: %s" % (
self.api_s_bd_obj['bridge-domain']['parent_type'],
self.api_s_bd_obj['bridge-domain']['parent_uuid'],
self.parent_obj.uuid))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_learning_enabled and (
self.api_s_bd_obj['bridge-domain']['mac_learning_enabled'] !=
self.mac_learning_enabled):
self.logger.warn("BD mac_learning_enabled %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_learning_enabled'],
self.mac_learning_enabled))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_limit_control and (
(self.api_s_bd_obj['bridge-domain']['mac_limit_control']
['mac_limit'] != self.mac_limit_control.mac_limit) or (
self.api_s_bd_obj['bridge-domain']['mac_limit_control']
['mac_limit_action'] != self.mac_limit_control.mac_limit_action)):
self.logger.warn("BD mac_limit_control %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_limit_control'],
self.mac_limit_control))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_move_control and (
(self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_limit'] != self.mac_move_control.mac_move_limit
) or (
self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_limit_action'] !=
self.mac_move_control.mac_move_limit_action) or (
self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_time_window'] !=
self.mac_move_control.mac_move_time_window)):
self.logger.warn("BD mac_move_control %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_move_control'],
self.mac_move_control))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_aging_time and (self.api_s_bd_obj['bridge-domain']
['mac_aging_time'] != self.mac_aging_time):
self.logger.warn("BD mac_aging_time %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_aging_time'],
self.mac_aging_time))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.isid and (self.api_s_bd_obj['bridge-domain']['isid'] !=
self.isid):
self.logger.warn("BD isid %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['isid'],
self.isid))
self.api_verification_flag = self.api_verification_flag and False
return False
self.logger.info("Verifications in API Server %s for BD %s passed" %(
cfgm_ip, self.bd_name))
return True
# end verify_bd_in_api_server
@retry(delay=2, tries=2)
def verify_bd_for_vn_in_agent(self, vmi_uuid):
"""
Verify Bridge Domain for VN info in agent
"""
vn_obj = self.parent_obj
vmi_host = self.vnc_h.get_vmi_host_name(vmi_uuid)
if not vmi_host:
self.logger.error("VMI %s host could not be found from VNC API" % (
vmi_uuid))
return False
vmi_host_ip = self.inputs.get_host_ip(vmi_host)
bd_in_agent = self.agent_inspect[vmi_host_ip].get_bd(self.bd_uuid)
if not bd_in_agent:
self.logger.warn("Bridge Domain %s is not found in Agent %s" % (
self.bd_name, vmi_host_ip))
return False
#Verify expected values in agent
for bd in bd_in_agent:
if bd['vn'] != vn_obj.uuid:
self.logger.warn("VN uuid mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['vn'], vn_obj.uuid))
result = False
return result
if bd['uuid'] != self.bd_uuid:
self.logger.warn("BD uuid mismatch in agent"
", actual: %s, expected: %s" % (
bd['uuid'], self.bd_uuid))
result = False
return result
if int(bd['isid']) != self.isid:
self.logger.warn("isid mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['isid'], self.isid))
result = False
return result
if bd['pbb_etree_enabled'] != str(vn_obj.pbb_etree_enable):
self.logger.warn("pbb_etree_enable value mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['pbb_etree_enabled'], str(vn_obj.pbb_etree_enable)))
result = False
return result
if bool(bd['learning_enabled']) != self.mac_learning_enabled:
self.logger.warn("mac_learning_enabled value mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['learning_enabled'], self.mac_learning_enabled))
result = False
return result
#Uncomment BD name check, when bug 1665253 is fixed
if bd['name'] != self.fq_name_str:
self.logger.warn("Name mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['name'], self.bd_name))
result = False
return result
self.logger.info("Verifications in Agent %s for BD %s for VN info"
" passed" %(vmi_host_ip, self.bd_name))
return True
#end verify_bd_for_vn_in_agent
@retry(delay=2, tries=2)
def verify_bd_for_vmi_in_computes(self, vmi_uuid):
'''
Verify BD details in VMI in computes:
Verify in agent as well as vrouter
'''
if vmi_uuid:
vmi_host = self.vnc_h.get_vmi_host_name(vmi_uuid)
if not vmi_host:
self.logger.warn("VMI %s host could not be found from VNC API" % (
vmi_uuid))
return False
vmi_host_ip = self.inputs.get_host_ip(vmi_host)
vmis_in_agent = self.agent_inspect[vmi_host_ip].get_vna_tap_interface_by_vmi(vmi_uuid)
if not vmis_in_agent:
self.logger.warn("VMI %s is not found in Agent %s" % (
vmi_uuid, vmi_host_ip))
return False
vmi_in_agent = vmis_in_agent[0]
if not vmi_in_agent['bridge_domain_list']:
self.logger.warn("Bridge Domain for VMI %s is not found in Agent %s" % (
vmi_uuid, vmi_host_ip))
return False
bd_uuid_in_vmi = vmi_in_agent['bridge_domain_list'][0]['bridge_domain_uuid']
#Verify bd uuid in agent
if (self.bd_uuid != bd_uuid_in_vmi):
self.logger.warn("Bridge Domain uuid mismatch"
" in agent, actual: %s, expected: %s" % (
bd_uuid_in_vmi, self.bd_uuid))
result = False
return result
else:
self.logger.info("Verification for Bridge Domain uuid %s for "
"VMI %s passed in agent %s" % (
bd_uuid_in_vmi, vmi_uuid, vmi_host_ip))
#Vrouter verifications
#Interface verification
vmi_in_vrouter = self.agent_inspect[
vmi_host_ip].get_vrouter_interfaces_by_name(vmi_in_agent['name'])
#[TBD]Verify ISID and Bmac value here
#[TBD]Route table verification
return True
#end verify_bd_for_vmi_in_computes
@retry(delay=2, tries=2)
def verify_bd_not_in_agent(self):
""" Verify Bridge Domain not present in agent after BD is deleted.
"""
for ip in self.inputs.compute_ips:
bd_in_agent = self.agent_inspect[ip].get_bd(self.bd_uuid)
if bd_in_agent:
self.logger.warn("Bridge Domain %s is still seen in Agent %s as %s" % (
self.bd_name, ip, bd_in_agent))
return False
self.logger.info("Bridge Domain %s is removed from Agent %s" % (
self.bd_name, ip))
return True
#end verify_bd_not_in_agent
def verify_cleared_from_setup(self, verify=True):
'''
Verify that Bridge Domain is deleted from the setup
'''
if verify:
assert self.verify_bd_not_in_agent(), ("BD cleanup verification "
"failed in agent")
| 39.252358 | 98 | 0.577841 | from tcutils.util import *
from vnc_api.vnc_api import *
import vnc_api_test
class BDFixture(vnc_api_test.VncLibFixture):
'''
Bridge Domain Fixture
'''
def __init__(self, parent_obj, bd_name=None, bd_uuid=None, **kwargs):
super(BDFixture, self).__init__(self, **kwargs)
self.parent_obj = parent_obj
self.bd_name = bd_name
self.bd_uuid = bd_uuid
self.bd_obj = None
self.mac_learning_enabled = None
self.mac_limit_control = None
self.mac_move_control = None
self.mac_aging_time = None
self.isid = None
self.parse_bd_kwargs(**kwargs)
self.already_present = None
def parse_bd_kwargs(self, **kwargs):
self.mac_learning_enabled = kwargs.get('mac_learning_enabled',
self.mac_learning_enabled)
self.mac_limit_control = kwargs.get('mac_limit_control',
self.mac_limit_control)
self.mac_move_control = kwargs.get('mac_move_control',
self.mac_move_control)
self.mac_aging_time = kwargs.get('mac_aging_time',
self.mac_aging_time)
self.isid = kwargs.get('isid', self.isid)
def _populate_attr(self):
if self.bd_obj:
self.bd_name = self.bd_obj.name
self.mac_learning_enabled = self.bd_obj.mac_learning_enabled
self.mac_limit_control = self.bd_obj.mac_limit_control
self.mac_move_control = self.bd_obj.mac_move_control
self.mac_aging_time = self.bd_obj.mac_aging_time
self.isid = self.bd_obj.isid
self.bd_uuid = self.bd_obj.uuid
self.fq_name_str = self.bd_obj.get_fq_name_str()
def read(self):
if self.bd_uuid:
self.bd_obj = self.read_bd(self.bd_uuid)
if not self.bd_obj:
raise Exception('Bridge Domain with id %s not found' % (
self.bd_uuid))
self._populate_attr()
return self.bd_obj
return False
def setUp(self):
super(BDFixture, self).setUp()
self.api_s_inspects = self.connections.api_server_inspects
self.agent_inspect = self.connections.agent_inspect
self.vnc_lib_fixture = self.connections.vnc_lib_fixture
self.vnc_lib = self.connections.vnc_lib
self.vnc_h = self.vnc_lib_fixture.vnc_h
self.project_name = self.connections.project_name
self.project_id = self.connections.project_id
self.bd_name = self.bd_name or get_random_name('bd_' + self.project_name)
self.create()
def cleanUp(self):
super(BDFixture, self).cleanUp()
self.delete()
def create(self):
if self.read():
self.already_present = True
self.logger.debug("Bridge Domain %s already present,"
"not creating it" % (self.bd_name))
return
if self.bd_obj:
self._populate_attr()
self.already_present = True
self.logger.debug("Bridge Domain %s already present,"
"not creating it" % (self.bd_name))
else:
self.create_bd()
self.already_present = False
def delete(self, verify=True):
do_cleanup = True
if self.inputs.fixture_cleanup == 'no':
do_cleanup = False
if self.already_present:
do_cleanup = False
if self.inputs.fixture_cleanup == 'force':
do_cleanup = True
if do_cleanup:
self.delete_bd(self.bd_uuid)
self.verify_cleared_from_setup(verify=verify)
else:
self.logger.info('Skipping the deletion of Bridge Domain %s' %
(self.bd_name))
def create_bd(self):
'''
Creates a bridge domain
'''
self.bd_obj = BridgeDomain(name=self.bd_name,
parent_obj=self.parent_obj,
mac_learning_enabled=self.mac_learning_enabled,
mac_limit_control=self.mac_limit_control,
mac_move_control=self.mac_move_control,
mac_aging_time=self.mac_aging_time,
isid=self.isid)
self.bd_uuid = self.vnc_lib.bridge_domain_create(self.bd_obj)
self.logger.info('Created Bridge Domain %s, UUID: %s' % (
self.vnc_lib.id_to_fq_name(self.bd_uuid), self.bd_uuid))
self._populate_attr()
return self.bd_obj
# end create_bd
def delete_bd(self, uuid=None):
'''
Delete Bridge Domain object
Args:
uuid : UUID of BridgeDomain object
'''
uuid = uuid or self.bd_uuid
self.vnc_lib.bridge_domain_delete(id=uuid)
self.logger.info('Deleted Bridge Domain %s' % (uuid))
# end delete_bd
def read_bd(self, uuid=None):
'''
Read Bridge Domain object
Args:
uuid : UUID of BridgeDomain object
'''
uuid = uuid or self.bd_uuid
bd_obj = self.vnc_lib.bridge_domain_read(id=uuid)
self.logger.info('Bridge Domain %s info %s' % (uuid,bd_obj))
return bd_obj
# end read_bd
def update_bd(self, **kwargs):
'''
Updates bridge domain
'''
self.parse_bd_kwargs(**kwargs)
self.vnc_h.update_bd(uuid=self.bd_uuid, **kwargs)
def add_bd_to_vmi(self, vmi_id, vlan_tag, verify=True):
result = True
bd_id = self.bd_uuid
self.vnc_h.add_bd_to_vmi(bd_id, vmi_id, vlan_tag)
if verify:
result = self.verify_bd_for_vmi_in_computes(vmi_uuid=vmi_id)
result = result and self.verify_bd_for_vn_in_agent(vmi_uuid=vmi_id)
return result
def verify_on_setup(self):
result = True
if not self.verify_bd_in_api_server():
result = result and False
self.logger.error(
"One or more verifications in API Server for Bridge Domain "
"%s failed" % (self.bd_name))
return result
self.verify_is_run = True
self.verify_result = result
return result
# end verify_on_setup
@retry(delay=2, tries=5)
def verify_bd_in_api_server(self):
""" Checks for Bridge Domain in API Server.
"""
self.api_verification_flag = True
cfgm_ip = self.inputs.cfgm_ips[0]
self.api_s_bd_obj = self.api_s_inspects[cfgm_ip].get_cs_bridge_domain(
bd_name=self.bd_name, refresh=True)
if not self.api_s_bd_obj:
self.logger.warn("Bridge Domain %s is not found in API-Server" %
(self.bd_name))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.api_s_bd_obj['bridge-domain']['uuid'] != self.bd_uuid:
self.logger.warn(
"BD Object ID %s in API-Server is not what was created" % (
self.bd_uuid))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.api_s_bd_obj['bridge-domain']['parent_type'] != 'virtual-network' or \
self.api_s_bd_obj['bridge-domain']['parent_uuid'] != self.parent_obj.uuid:
self.logger.warn(
"BD parent type %s and ID %s in API-Server is not as expected: %s" % (
self.api_s_bd_obj['bridge-domain']['parent_type'],
self.api_s_bd_obj['bridge-domain']['parent_uuid'],
self.parent_obj.uuid))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_learning_enabled and (
self.api_s_bd_obj['bridge-domain']['mac_learning_enabled'] !=
self.mac_learning_enabled):
self.logger.warn("BD mac_learning_enabled %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_learning_enabled'],
self.mac_learning_enabled))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_limit_control and (
(self.api_s_bd_obj['bridge-domain']['mac_limit_control']
['mac_limit'] != self.mac_limit_control.mac_limit) or (
self.api_s_bd_obj['bridge-domain']['mac_limit_control']
['mac_limit_action'] != self.mac_limit_control.mac_limit_action)):
self.logger.warn("BD mac_limit_control %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_limit_control'],
self.mac_limit_control))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_move_control and (
(self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_limit'] != self.mac_move_control.mac_move_limit
) or (
self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_limit_action'] !=
self.mac_move_control.mac_move_limit_action) or (
self.api_s_bd_obj['bridge-domain']['mac_move_control']
['mac_move_time_window'] !=
self.mac_move_control.mac_move_time_window)):
self.logger.warn("BD mac_move_control %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_move_control'],
self.mac_move_control))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.mac_aging_time and (self.api_s_bd_obj['bridge-domain']
['mac_aging_time'] != self.mac_aging_time):
self.logger.warn("BD mac_aging_time %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['mac_aging_time'],
self.mac_aging_time))
self.api_verification_flag = self.api_verification_flag and False
return False
if self.isid and (self.api_s_bd_obj['bridge-domain']['isid'] !=
self.isid):
self.logger.warn("BD isid %s in API-Server is "
"not what was created %s" % (
self.api_s_bd_obj['bridge-domain']['isid'],
self.isid))
self.api_verification_flag = self.api_verification_flag and False
return False
self.logger.info("Verifications in API Server %s for BD %s passed" %(
cfgm_ip, self.bd_name))
return True
# end verify_bd_in_api_server
@retry(delay=2, tries=2)
def verify_bd_for_vn_in_agent(self, vmi_uuid):
"""
Verify Bridge Domain for VN info in agent
"""
vn_obj = self.parent_obj
vmi_host = self.vnc_h.get_vmi_host_name(vmi_uuid)
if not vmi_host:
self.logger.error("VMI %s host could not be found from VNC API" % (
vmi_uuid))
return False
vmi_host_ip = self.inputs.get_host_ip(vmi_host)
bd_in_agent = self.agent_inspect[vmi_host_ip].get_bd(self.bd_uuid)
if not bd_in_agent:
self.logger.warn("Bridge Domain %s is not found in Agent %s" % (
self.bd_name, vmi_host_ip))
return False
#Verify expected values in agent
for bd in bd_in_agent:
if bd['vn'] != vn_obj.uuid:
self.logger.warn("VN uuid mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['vn'], vn_obj.uuid))
result = False
return result
if bd['uuid'] != self.bd_uuid:
self.logger.warn("BD uuid mismatch in agent"
", actual: %s, expected: %s" % (
bd['uuid'], self.bd_uuid))
result = False
return result
if int(bd['isid']) != self.isid:
self.logger.warn("isid mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['isid'], self.isid))
result = False
return result
if bd['pbb_etree_enabled'] != str(vn_obj.pbb_etree_enable):
self.logger.warn("pbb_etree_enable value mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['pbb_etree_enabled'], str(vn_obj.pbb_etree_enable)))
result = False
return result
if bool(bd['learning_enabled']) != self.mac_learning_enabled:
self.logger.warn("mac_learning_enabled value mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['learning_enabled'], self.mac_learning_enabled))
result = False
return result
#Uncomment BD name check, when bug 1665253 is fixed
if bd['name'] != self.fq_name_str:
self.logger.warn("Name mismatch for Bridge Domain"
" in agent, actual: %s, expected: %s" % (
bd['name'], self.bd_name))
result = False
return result
self.logger.info("Verifications in Agent %s for BD %s for VN info"
" passed" %(vmi_host_ip, self.bd_name))
return True
#end verify_bd_for_vn_in_agent
@retry(delay=2, tries=2)
def verify_bd_for_vmi_in_computes(self, vmi_uuid):
'''
Verify BD details in VMI in computes:
Verify in agent as well as vrouter
'''
if vmi_uuid:
vmi_host = self.vnc_h.get_vmi_host_name(vmi_uuid)
if not vmi_host:
self.logger.warn("VMI %s host could not be found from VNC API" % (
vmi_uuid))
return False
vmi_host_ip = self.inputs.get_host_ip(vmi_host)
vmis_in_agent = self.agent_inspect[vmi_host_ip].get_vna_tap_interface_by_vmi(vmi_uuid)
if not vmis_in_agent:
self.logger.warn("VMI %s is not found in Agent %s" % (
vmi_uuid, vmi_host_ip))
return False
vmi_in_agent = vmis_in_agent[0]
if not vmi_in_agent['bridge_domain_list']:
self.logger.warn("Bridge Domain for VMI %s is not found in Agent %s" % (
vmi_uuid, vmi_host_ip))
return False
bd_uuid_in_vmi = vmi_in_agent['bridge_domain_list'][0]['bridge_domain_uuid']
#Verify bd uuid in agent
if (self.bd_uuid != bd_uuid_in_vmi):
self.logger.warn("Bridge Domain uuid mismatch"
" in agent, actual: %s, expected: %s" % (
bd_uuid_in_vmi, self.bd_uuid))
result = False
return result
else:
self.logger.info("Verification for Bridge Domain uuid %s for "
"VMI %s passed in agent %s" % (
bd_uuid_in_vmi, vmi_uuid, vmi_host_ip))
#Vrouter verifications
#Interface verification
vmi_in_vrouter = self.agent_inspect[
vmi_host_ip].get_vrouter_interfaces_by_name(vmi_in_agent['name'])
#[TBD]Verify ISID and Bmac value here
#[TBD]Route table verification
return True
#end verify_bd_for_vmi_in_computes
@retry(delay=2, tries=2)
def verify_bd_not_in_agent(self):
""" Verify Bridge Domain not present in agent after BD is deleted.
"""
for ip in self.inputs.compute_ips:
bd_in_agent = self.agent_inspect[ip].get_bd(self.bd_uuid)
if bd_in_agent:
self.logger.warn("Bridge Domain %s is still seen in Agent %s as %s" % (
self.bd_name, ip, bd_in_agent))
return False
self.logger.info("Bridge Domain %s is removed from Agent %s" % (
self.bd_name, ip))
return True
#end verify_bd_not_in_agent
def verify_cleared_from_setup(self, verify=True):
'''
Verify that Bridge Domain is deleted from the setup
'''
if verify:
assert self.verify_bd_not_in_agent(), ("BD cleanup verification "
"failed in agent")
| 4,065 | 0 | 270 |
d9dbe1564fc700c61b7507bb9b58065a2f63e0e5 | 14,490 | py | Python | povary/apps/master_class/migrations/0009_auto__add_field_masterclass_visits_num__add_field_categorymc_visits_nu.py | TorinAsakura/cooking | cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a | [
"BSD-3-Clause"
] | null | null | null | povary/apps/master_class/migrations/0009_auto__add_field_masterclass_visits_num__add_field_categorymc_visits_nu.py | TorinAsakura/cooking | cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a | [
"BSD-3-Clause"
] | null | null | null | povary/apps/master_class/migrations/0009_auto__add_field_masterclass_visits_num__add_field_categorymc_visits_nu.py | TorinAsakura/cooking | cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
| 71.732673 | 182 | 0.560663 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'MasterClass.visits_num'
db.add_column('master_class_masterclass', 'visits_num',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Adding field 'CategoryMC.visits_num'
db.add_column('master_class_categorymc', 'visits_num',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Adding field 'SubCategoryMC.visits_num'
db.add_column('master_class_subcategorymc', 'visits_num',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'MasterClass.visits_num'
db.delete_column('master_class_masterclass', 'visits_num')
# Deleting field 'CategoryMC.visits_num'
db.delete_column('master_class_categorymc', 'visits_num')
# Deleting field 'SubCategoryMC.visits_num'
db.delete_column('master_class_subcategorymc', 'visits_num')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ingredients.usaingredient': {
'Meta': {'object_name': 'USAIngredient'},
'alpha_carot': ('django.db.models.fields.FloatField', [], {}),
'ash': ('django.db.models.fields.FloatField', [], {}),
'beta_carot': ('django.db.models.fields.FloatField', [], {}),
'beta_crypt': ('django.db.models.fields.FloatField', [], {}),
'calcium': ('django.db.models.fields.FloatField', [], {}),
'carbohydrt': ('django.db.models.fields.FloatField', [], {}),
'cholestrl': ('django.db.models.fields.FloatField', [], {}),
'choline_total': ('django.db.models.fields.FloatField', [], {}),
'copper': ('django.db.models.fields.FloatField', [], {}),
'energy': ('django.db.models.fields.FloatField', [], {}),
'fa_mono': ('django.db.models.fields.FloatField', [], {}),
'fa_poly': ('django.db.models.fields.FloatField', [], {}),
'fa_sat': ('django.db.models.fields.FloatField', [], {}),
'fiber_td': ('django.db.models.fields.FloatField', [], {}),
'folate_dfe': ('django.db.models.fields.FloatField', [], {}),
'folate_total': ('django.db.models.fields.FloatField', [], {}),
'folic_acid': ('django.db.models.fields.FloatField', [], {}),
'food_folate': ('django.db.models.fields.FloatField', [], {}),
'gm_wt1': ('django.db.models.fields.FloatField', [], {}),
'gmwt_2': ('django.db.models.fields.FloatField', [], {}),
'gmwt_desc1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gmwt_desc2': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iron': ('django.db.models.fields.FloatField', [], {}),
'lipid_total': ('django.db.models.fields.FloatField', [], {}),
'lut_zea': ('django.db.models.fields.FloatField', [], {}),
'lycopene': ('django.db.models.fields.FloatField', [], {}),
'magnesium': ('django.db.models.fields.FloatField', [], {}),
'manganese': ('django.db.models.fields.FloatField', [], {}),
'name_rus': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ndb_no': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'niacin': ('django.db.models.fields.FloatField', [], {}),
'panto_acid': ('django.db.models.fields.FloatField', [], {}),
'phosphorus': ('django.db.models.fields.FloatField', [], {}),
'potassium': ('django.db.models.fields.FloatField', [], {}),
'protein': ('django.db.models.fields.FloatField', [], {}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'refuse_pct': ('django.db.models.fields.FloatField', [], {}),
'retinol': ('django.db.models.fields.FloatField', [], {}),
'riboflavin': ('django.db.models.fields.FloatField', [], {}),
'selenium': ('django.db.models.fields.FloatField', [], {}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sodium': ('django.db.models.fields.FloatField', [], {}),
'sugar_total': ('django.db.models.fields.FloatField', [], {}),
'thiamin': ('django.db.models.fields.FloatField', [], {}),
'translated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updatable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'vi_vit_d_ui': ('django.db.models.fields.FloatField', [], {}),
'vitamin_a_rae': ('django.db.models.fields.FloatField', [], {}),
'vitamin_a_ui': ('django.db.models.fields.FloatField', [], {}),
'vitamin_b12': ('django.db.models.fields.FloatField', [], {}),
'vitamin_b6': ('django.db.models.fields.FloatField', [], {}),
'vitamin_c': ('django.db.models.fields.FloatField', [], {}),
'vitamin_d': ('django.db.models.fields.FloatField', [], {}),
'vitamin_e': ('django.db.models.fields.FloatField', [], {}),
'vitamin_k': ('django.db.models.fields.FloatField', [], {}),
'water': ('django.db.models.fields.FloatField', [], {}),
'zinc': ('django.db.models.fields.FloatField', [], {})
},
'master_class.categorymc': {
'Meta': {'object_name': 'CategoryMC'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'visits_num': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'master_class.ingredient': {
'Meta': {'object_name': 'Ingredient'},
'addit_info': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ingredient_group': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ingredient_info': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'usa_ingredients'", 'to': "orm['ingredients.USAIngredient']"}),
'master_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ingredients'", 'to': "orm['master_class.MasterClass']"}),
'measure': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'master_class.masterclass': {
'Meta': {'object_name': 'MasterClass'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['master_class.CategoryMC']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'for_registered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'ip_addr': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['master_class.SubCategoryMC']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'visits_num': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'master_class.mcstep': {
'Meta': {'object_name': 'MCStep'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'master_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterclasses'", 'to': "orm['master_class.MasterClass']"}),
'step_num': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'master_class.mctool': {
'Meta': {'object_name': 'MCTool'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'master_class.subcategorymc': {
'Meta': {'object_name': 'SubCategoryMC'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['master_class.CategoryMC']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'visits_num': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['master_class'] | 1,098 | 13,239 | 23 |
f16671322873b3ab7c7a3a7ce2fce8aa55512aba | 1,995 | py | Python | 150-Challenges/Challenges 88 - 95/Challenge 94.py | DGrifferty/Python | d725301664db2cbcfd5c4f5974745b4d81c8e28a | [
"Apache-2.0"
] | null | null | null | 150-Challenges/Challenges 88 - 95/Challenge 94.py | DGrifferty/Python | d725301664db2cbcfd5c4f5974745b4d81c8e28a | [
"Apache-2.0"
] | null | null | null | 150-Challenges/Challenges 88 - 95/Challenge 94.py | DGrifferty/Python | d725301664db2cbcfd5c4f5974745b4d81c8e28a | [
"Apache-2.0"
] | null | null | null | # 094
# Display an array of five numbers. Ask the user to select one
# of the numbers. Once they have selected a number, display the
# position of that item in the array. If they enter something that
# is not in the array, ask them to try again until they select
# a relevant item.
import array as ar
import numpy as np
import random
from typing import List
def get_num_int(prompt: str) -> int:
"""Function to check if users input is an integer"""
while True:
try:
number = int(input(prompt))
return number
except Exception as e:
print(e)
def create_random_list(length: int = 50, lowest_num: int = 0,
highest_num: int = 90) -> List[int]:
"""Returns a random list at a user set len, and lower and upper
bounds"""
# used to test return_index function
random_list = list()
for i in range(length):
random_list.append(random.randint(lowest_num, highest_num))
return random_list
def return_index(tp, element) -> List[int]:
"""Returns all the indexes of an element"""
indexes = []
[indexes.append(index) for index, value in enumerate(tp)
if value == element]
return indexes
if __name__ == '__main__':
# Using built in array module
nums_ar = ar.array('i', create_random_list(5))
print(nums_ar)
while True:
num = get_num_int('Please select a number to get the index of'
'- ')
if num in nums_ar:
print(f'Index(s) of {num} are at '
f'{return_index(nums_ar, num)}')
break
# Using numpy module
nums_np = np.array(create_random_list(5), dtype=np.int32)
print(nums_np)
while True:
num = get_num_int('Please select a number to get the index of'
'- ')
if num in nums_np:
print(f'Index(s) of {num} are at '
f'{return_index(nums_np, num)}')
break
| 23.197674 | 70 | 0.601504 | # 094
# Display an array of five numbers. Ask the user to select one
# of the numbers. Once they have selected a number, display the
# position of that item in the array. If they enter something that
# is not in the array, ask them to try again until they select
# a relevant item.
import array as ar
import numpy as np
import random
from typing import List
def get_num_int(prompt: str) -> int:
"""Function to check if users input is an integer"""
while True:
try:
number = int(input(prompt))
return number
except Exception as e:
print(e)
def create_random_list(length: int = 50, lowest_num: int = 0,
highest_num: int = 90) -> List[int]:
"""Returns a random list at a user set len, and lower and upper
bounds"""
# used to test return_index function
random_list = list()
for i in range(length):
random_list.append(random.randint(lowest_num, highest_num))
return random_list
def return_index(tp, element) -> List[int]:
"""Returns all the indexes of an element"""
indexes = []
[indexes.append(index) for index, value in enumerate(tp)
if value == element]
return indexes
if __name__ == '__main__':
# Using built in array module
nums_ar = ar.array('i', create_random_list(5))
print(nums_ar)
while True:
num = get_num_int('Please select a number to get the index of'
'- ')
if num in nums_ar:
print(f'Index(s) of {num} are at '
f'{return_index(nums_ar, num)}')
break
# Using numpy module
nums_np = np.array(create_random_list(5), dtype=np.int32)
print(nums_np)
while True:
num = get_num_int('Please select a number to get the index of'
'- ')
if num in nums_np:
print(f'Index(s) of {num} are at '
f'{return_index(nums_np, num)}')
break
| 0 | 0 | 0 |
8a1288e18e7a9b82a827aad422ef1aa964f3fc0c | 334 | py | Python | PycharmProjects/pythonteste/ex017.py | caioalexleme/Curso_Python | 6394f60689531c7431765538f1b699aabbf4acb2 | [
"MIT"
] | 3 | 2021-07-09T20:41:47.000Z | 2021-11-17T10:25:01.000Z | PycharmProjects/pythonteste/ex017.py | caioalexleme/Curso_Python | 6394f60689531c7431765538f1b699aabbf4acb2 | [
"MIT"
] | null | null | null | PycharmProjects/pythonteste/ex017.py | caioalexleme/Curso_Python | 6394f60689531c7431765538f1b699aabbf4acb2 | [
"MIT"
] | 1 | 2021-09-09T20:24:07.000Z | 2021-09-09T20:24:07.000Z | import math
co = float(input('Comprimento do cateto oposto: '))
ca = float(input('Comprimento do cateto adjacente: '))
hi = math.hypot(co, ca)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''hi = (co ** 2 + ca ** 2) **(1/2)
print('A hipotenusa vai medir {:.2f}'.format(hi))''' '''Usando matematicamente sem precisar importar'''
| 41.75 | 104 | 0.658683 | import math
co = float(input('Comprimento do cateto oposto: '))
ca = float(input('Comprimento do cateto adjacente: '))
hi = math.hypot(co, ca)
print('A hipotenusa vai medir {:.2f}'.format(hi))
'''hi = (co ** 2 + ca ** 2) **(1/2)
print('A hipotenusa vai medir {:.2f}'.format(hi))''' '''Usando matematicamente sem precisar importar'''
| 0 | 0 | 0 |
cceeb312e1e0b4d346b44ac72c5a6f4d2bafbd85 | 9,439 | py | Python | monasca_notification/plugins/jira_notifier.py | openstack/monasca-notification | 975f46d226e479180c6499fe34073225aeadefdb | [
"Apache-2.0"
] | 25 | 2015-10-18T02:54:54.000Z | 2020-04-16T12:05:27.000Z | monasca_notification/plugins/jira_notifier.py | openstack/monasca-notification | 975f46d226e479180c6499fe34073225aeadefdb | [
"Apache-2.0"
] | 1 | 2020-12-05T06:18:12.000Z | 2020-12-05T06:18:14.000Z | monasca_notification/plugins/jira_notifier.py | openstack/monasca-notification | 975f46d226e479180c6499fe34073225aeadefdb | [
"Apache-2.0"
] | 14 | 2016-01-11T08:58:56.000Z | 2021-11-19T09:11:19.000Z | # (C) Copyright 2016 Hewlett Packard Enterprise Development Company LP
# Copyright 2017 Fujitsu LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from debtcollector import removals
from jinja2 import Template
import jira
from oslo_config import cfg
import simplejson as json
import urllib
import yaml
from monasca_notification.plugins.abstract_notifier import AbstractNotifier
"""
Note:
This plugin doesn't support multi tenancy. Multi tenancy requires support for
multiple JIRA server url. JIRA doesn't support OAUTH2 tokens, we may need to get
the user credentials in query params and store them in monasca DB which we don't want to do.
That is the reason for not supporting true multitenancy.
MultiTenancy can be achieved by creating issues in different project for different tenant on
the same JIRA server.
notification.address = https://<jira_url>/?project=<project_name>
Jira Configuration
1) jira:
user: username
password: password
Sample notification:
monasca notification-create MyIssuer JIRA https://jira.hpcloud.net/?project=MyProject
monasca notification-create MyIssuer1 JIRA https://jira.hpcloud.net/?project=MyProject&
component=MyComponent
"""
CONF = cfg.CONF
jira_notifier_group = cfg.OptGroup(name='%s_notifier' % JiraNotifier.type)
jira_notifier_opts = [
cfg.IntOpt(name='timeout', default=5, min=1),
cfg.StrOpt(name='user', required=False),
cfg.StrOpt(name='password', required=False, secret=True),
cfg.StrOpt(name='custom_formatter', default=None),
cfg.StrOpt(name='proxy', default=None)
]
| 38.369919 | 97 | 0.637568 | # (C) Copyright 2016 Hewlett Packard Enterprise Development Company LP
# Copyright 2017 Fujitsu LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from debtcollector import removals
from jinja2 import Template
import jira
from oslo_config import cfg
import simplejson as json
import urllib
import yaml
from monasca_notification.plugins.abstract_notifier import AbstractNotifier
"""
Note:
This plugin doesn't support multi tenancy. Multi tenancy requires support for
multiple JIRA server url. JIRA doesn't support OAUTH2 tokens, we may need to get
the user credentials in query params and store them in monasca DB which we don't want to do.
That is the reason for not supporting true multitenancy.
MultiTenancy can be achieved by creating issues in different project for different tenant on
the same JIRA server.
notification.address = https://<jira_url>/?project=<project_name>
Jira Configuration
1) jira:
user: username
password: password
Sample notification:
monasca notification-create MyIssuer JIRA https://jira.hpcloud.net/?project=MyProject
monasca notification-create MyIssuer1 JIRA https://jira.hpcloud.net/?project=MyProject&
component=MyComponent
"""
CONF = cfg.CONF
class JiraNotifier(AbstractNotifier):
type = 'jira'
_search_query = search_query = "project={} and reporter='{}' and summary ~ '{}'"
def __init__(self, log):
super(JiraNotifier, self).__init__()
self._log = log
self.jira_fields_format = None
@removals.remove(
message='Configuration of notifier is available through oslo.cfg',
version='1.9.0',
removal_version='3.0.0'
)
def config(self, config_dict):
pass
@property
def statsd_name(self):
return 'jira_notifier'
def _get_jira_custom_format_fields(self):
jira_fields_format = None
formatter = CONF.jira_notifier.custom_formatter
if not self.jira_fields_format and formatter:
try:
with open(formatter, 'r') as f:
jira_fields_format = yaml.safe_load(f)
except Exception:
self._log.exception("Unable to read custom_formatter file. Check file location")
raise
# Remove the top element
jira_fields_format = jira_fields_format["jira_format"]
return jira_fields_format
def _build_custom_jira_message(self, notification, jira_fields_format):
jira_fields = {}
# Templatize the message object
jira_field_summary_field = jira_fields_format.get("summary", None)
if jira_field_summary_field:
template = Template(jira_field_summary_field)
jira_fields["summary"] = template.render(notification=notification)
jira_field_comments_field = jira_fields_format.get("comments", None)
if jira_field_comments_field:
template = Template(jira_field_comments_field)
jira_fields["comments"] = template.render(notification=notification)
jira_field_description_field = jira_fields_format.get("description", None)
if jira_field_description_field:
template = Template(jira_field_description_field)
jira_fields["description"] = template.render(notification=notification)
return jira_fields
def _build_default_jira_message(self, notification):
"""Builds jira message body
"""
body = {'alarm_id': notification.alarm_id,
'alarm_definition_id': notification.raw_alarm['alarmDefinitionId'],
'alarm_name': notification.alarm_name,
'alarm_description': notification.raw_alarm['alarmDescription'],
'alarm_timestamp': notification.alarm_timestamp,
'state': notification.state,
'old_state': notification.raw_alarm['oldState'],
'message': notification.message,
'tenant_id': notification.tenant_id,
'metrics': notification.metrics}
jira_fields = {}
summary_format_string = ("Monasca alarm for alarm_defintion {0} status changed to {1} "
"for the alarm_id {2}")
jira_fields["summary"] = summary_format_string.format(notification.alarm_name,
notification.state,
notification.alarm_id)
jira_fields["comments"] = "{code}%s{code}" % (json.dumps(body, indent=3))
jira_fields["description"] = 'Monasca alarm'
return jira_fields
def _build_jira_message(self, notification):
formatter = CONF.jira_notifier.custom_formatter
if formatter:
return self._build_custom_jira_message(notification,
self._get_jira_custom_format_fields())
return self._build_default_jira_message(notification)
def send_notification(self, notification):
"""Creates or Updates an issue in Jira
"""
jira_fields = self._build_jira_message(notification)
parsed_url = urllib.parse.urlsplit(notification.address)
query_params = urllib.parse.parse_qs(parsed_url.query)
# URL without query params
url = urllib.parse.urljoin(
notification.address,
urllib.parse.urlparse(
notification.address).path)
jira_fields["project"] = query_params["project"][0]
if query_params.get("component"):
jira_fields["component"] = query_params["component"][0]
auth = (
CONF.jira_notifier.user,
CONF.jira_notifier.password
)
proxy = CONF.jira_notifier.proxy
proxy_dict = None
if proxy is not None:
proxy_dict = {"https": proxy}
try:
jira_obj = jira.JIRA(url, basic_auth=auth, proxies=proxy_dict)
self.jira_workflow(jira_fields, jira_obj, notification)
except Exception:
self._log.exception("Error creating issue in Jira at URL {}".format(url))
return False
return True
def jira_workflow(self, jira_fields, jira_obj, notification):
"""How does Jira plugin work?
1) Check whether the issue with same description exists?
2) If issue exists, and if it is closed state, open it
3) if the issue doesn't exist, then create the issue
4) Add current alarm details in comments
"""
issue_dict = {'project': {'key': jira_fields["project"]},
'summary': jira_fields["summary"],
'description': jira_fields["description"],
'issuetype': {'name': 'Bug'}, }
# If the JIRA workflow is created with mandatory components
if jira_fields.get("component"):
issue_dict["components"] = [{"name": jira_fields.get("component")}]
search_term = self._search_query.format(issue_dict["project"]["key"],
CONF.jira_notifier.user,
notification.alarm_id)
issue_list = jira_obj.search_issues(search_term)
if not issue_list:
self._log.debug("Creating an issue with the data {}".format(issue_dict))
issue = jira_obj.create_issue(fields=issue_dict)
else:
issue = issue_list[0]
self._log.debug("Found an existing issue {} for this notification".format(issue))
current_state = issue.fields.status.name
if current_state.lower() in ["resolved", "closed"]:
# Open the issue
transitions = jira_obj.transitions(issue)
allowed_transistions = [(t['id'], t['name'])
for t in transitions if "reopen" in t['name'].lower()]
if allowed_transistions:
# Reopen the issue
jira_obj.transition_issue(issue, allowed_transistions[0][0])
jira_comment_message = jira_fields.get("comments")
if jira_comment_message:
jira_obj.add_comment(issue, jira_comment_message)
jira_notifier_group = cfg.OptGroup(name='%s_notifier' % JiraNotifier.type)
jira_notifier_opts = [
cfg.IntOpt(name='timeout', default=5, min=1),
cfg.StrOpt(name='user', required=False),
cfg.StrOpt(name='password', required=False, secret=True),
cfg.StrOpt(name='custom_formatter', default=None),
cfg.StrOpt(name='proxy', default=None)
]
def register_opts(conf):
conf.register_group(jira_notifier_group)
conf.register_opts(jira_notifier_opts, group=jira_notifier_group)
def list_opts():
return {
jira_notifier_group: jira_notifier_opts
}
| 2,163 | 5,043 | 69 |
f445224ff812a87f09a566ea2f913fa27571701a | 3,889 | py | Python | vectorhub/encoders/text/torch_transformers/legal_bert.py | NanaAkwasiAbayieBoateng/vectorhub | 265933521cf0a3113a47182a30b0037bf163584b | [
"Apache-2.0"
] | 1 | 2020-11-04T16:02:39.000Z | 2020-11-04T16:02:39.000Z | vectorhub/encoders/text/torch_transformers/legal_bert.py | NanaAkwasiAbayieBoateng/vectorhub | 265933521cf0a3113a47182a30b0037bf163584b | [
"Apache-2.0"
] | null | null | null | vectorhub/encoders/text/torch_transformers/legal_bert.py | NanaAkwasiAbayieBoateng/vectorhub | 265933521cf0a3113a47182a30b0037bf163584b | [
"Apache-2.0"
] | null | null | null | from typing import List, Union
from ..base import BaseText2Vec
from ....base import catch_vector_errors
from ....doc_utils import ModelDefinition
from ....import_utils import *
from ....models_dict import MODEL_REQUIREMENTS
from datetime import date
if is_all_dependency_installed(MODEL_REQUIREMENTS['encoders-text-torch-transformers-auto']):
from transformers import AutoTokenizer, AutoModel
import torch
LegalBertModelDefinition = ModelDefinition(
model_id="text/legal-bert",
model_name="Legal Bert",
vector_length=768,
description="BERT has achieved impressive performance in several NLP tasks. However, there has been limited investigation on its adaptation guidelines in specialised domains. Here we focus on the legal domain, where we explore several approaches for applying BERT models to downstream legal tasks, evaluating on multiple datasets. Our findings indicate that the previous guidelines for pre-training and fine-tuning, often blindly followed, do not always generalize well in the legal domain. Thus we propose a systematic investigation of the available strategies when applying BERT in specialised domains. These are: (a) use the original BERT out of the box, (b) adapt BERT by additional pre-training on domain-specific corpora, and (c) pre-train BERT from scratch on domain-specific corpora. We also propose a broader hyper-parameter search space when fine-tuning for downstream tasks and we release LEGAL-BERT, a family of BERT models intended to assist legal NLP research, computational law, and legal technology applications.",
paper="https://arxiv.org/abs/2010.02559",
repo="https://huggingface.co/nlpaueb/legal-bert-base-uncased",
release_date=date(2020,10,6),
installation="pip install vectorhub[encoders-text-torch-transformers]",
example="""
#pip install vectorhub[encoders-text-torch-transformers]
from vectorhub.encoders.text.torch_transformers import LegalBert2Vec
model = LegalBert2Vec()
model.encode("I enjoy taking long walks along the beach with my dog.")
"""
)
__doc__ = LegalBertModelDefinition.create_docs()
| 54.774648 | 1,034 | 0.717151 | from typing import List, Union
from ..base import BaseText2Vec
from ....base import catch_vector_errors
from ....doc_utils import ModelDefinition
from ....import_utils import *
from ....models_dict import MODEL_REQUIREMENTS
from datetime import date
if is_all_dependency_installed(MODEL_REQUIREMENTS['encoders-text-torch-transformers-auto']):
from transformers import AutoTokenizer, AutoModel
import torch
LegalBertModelDefinition = ModelDefinition(
model_id="text/legal-bert",
model_name="Legal Bert",
vector_length=768,
description="BERT has achieved impressive performance in several NLP tasks. However, there has been limited investigation on its adaptation guidelines in specialised domains. Here we focus on the legal domain, where we explore several approaches for applying BERT models to downstream legal tasks, evaluating on multiple datasets. Our findings indicate that the previous guidelines for pre-training and fine-tuning, often blindly followed, do not always generalize well in the legal domain. Thus we propose a systematic investigation of the available strategies when applying BERT in specialised domains. These are: (a) use the original BERT out of the box, (b) adapt BERT by additional pre-training on domain-specific corpora, and (c) pre-train BERT from scratch on domain-specific corpora. We also propose a broader hyper-parameter search space when fine-tuning for downstream tasks and we release LEGAL-BERT, a family of BERT models intended to assist legal NLP research, computational law, and legal technology applications.",
paper="https://arxiv.org/abs/2010.02559",
repo="https://huggingface.co/nlpaueb/legal-bert-base-uncased",
release_date=date(2020,10,6),
installation="pip install vectorhub[encoders-text-torch-transformers]",
example="""
#pip install vectorhub[encoders-text-torch-transformers]
from vectorhub.encoders.text.torch_transformers import LegalBert2Vec
model = LegalBert2Vec()
model.encode("I enjoy taking long walks along the beach with my dog.")
"""
)
__doc__ = LegalBertModelDefinition.create_docs()
class LegalBert2Vec(BaseText2Vec):
definition = LegalBertModelDefinition
def __init__(self, model_name: str="nlpaueb/legal-bert-base-uncased"):
self.model = AutoModel.from_pretrained(model_name)
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
@staticmethod
def list_possible_models():
return {
"nlpaueb/bert-base-uncased-contracts": "Trained on US contracts",
"nlpaueb/bert-base-uncased-eurlex": "Trained on EU legislation",
"nlpaueb/bert-base-uncased-echr ": "Trained on ECHR cases",
"nlpaueb/legal-bert-base-uncased": "Trained on all the above",
"nlpaueb/legal-bert-small-uncased": "Trained on all the above"
}
@catch_vector_errors
def encode(self, text: Union[str, List[str]]) -> List[float]:
"""
Encode words using transformers.
Args:
text: str
"""
if isinstance(text, str):
return torch.mean(self.model(**self.tokenizer(text, return_tensors='pt'))[0], axis=1).detach().tolist()[0]
if isinstance(text, list):
return self.bulk_encode(text)
raise ValueError("Not a string or a list of strings, please enter valid data type.")
@catch_vector_errors
def bulk_encode(self, texts: List[str]) -> List[List[float]]:
"""
Encode multiple sentences using transformers.
args:
texts: List[str]
"""
# We use pad_to_multiple_of as other arguments usually do not work.
return torch.mean(self.model(**self.tokenizer(texts, return_tensors='pt', pad_to_multiple_of=self.tokenizer.model_max_length,
truncation=True, padding=True))[0], axis=1).detach().tolist()
| 587 | 1,161 | 23 |
6bfaa00fe949d7084b0d4ac71e333e1f7a72d58c | 3,365 | py | Python | tests/storage/test_keys.py | skalarproduktraum/synapse | c831748f4d243d74e9a3fd2042bc2b35cc30f961 | [
"Apache-2.0"
] | null | null | null | tests/storage/test_keys.py | skalarproduktraum/synapse | c831748f4d243d74e9a3fd2042bc2b35cc30f961 | [
"Apache-2.0"
] | null | null | null | tests/storage/test_keys.py | skalarproduktraum/synapse | c831748f4d243d74e9a3fd2042bc2b35cc30f961 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signedjson.key
from twisted.internet.defer import Deferred
import tests.unittest
KEY_1 = signedjson.key.decode_verify_key_base64(
"ed25519", "key1", "fP5l4JzpZPq/zdbBg5xx6lQGAAOM9/3w94cqiJ5jPrw"
)
KEY_2 = signedjson.key.decode_verify_key_base64(
"ed25519", "key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
)
| 36.576087 | 82 | 0.652006 | # -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signedjson.key
from twisted.internet.defer import Deferred
import tests.unittest
KEY_1 = signedjson.key.decode_verify_key_base64(
"ed25519", "key1", "fP5l4JzpZPq/zdbBg5xx6lQGAAOM9/3w94cqiJ5jPrw"
)
KEY_2 = signedjson.key.decode_verify_key_base64(
"ed25519", "key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
)
class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
def test_get_server_verify_keys(self):
store = self.hs.get_datastore()
d = store.store_server_verify_key("server1", "from_server", 0, KEY_1)
self.get_success(d)
d = store.store_server_verify_key("server1", "from_server", 0, KEY_2)
self.get_success(d)
d = store.get_server_verify_keys(
[
("server1", "ed25519:key1"),
("server1", "ed25519:key2"),
("server1", "ed25519:key3"),
]
)
res = self.get_success(d)
self.assertEqual(len(res.keys()), 3)
self.assertEqual(res[("server1", "ed25519:key1")].version, "key1")
self.assertEqual(res[("server1", "ed25519:key2")].version, "key2")
# non-existent result gives None
self.assertIsNone(res[("server1", "ed25519:key3")])
def test_cache(self):
"""Check that updates correctly invalidate the cache."""
store = self.hs.get_datastore()
key_id_1 = "ed25519:key1"
key_id_2 = "ed25519:key2"
d = store.store_server_verify_key("srv1", "from_server", 0, KEY_1)
self.get_success(d)
d = store.store_server_verify_key("srv1", "from_server", 0, KEY_2)
self.get_success(d)
d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
res = self.get_success(d)
self.assertEqual(len(res.keys()), 2)
self.assertEqual(res[("srv1", key_id_1)], KEY_1)
self.assertEqual(res[("srv1", key_id_2)], KEY_2)
# we should be able to look up the same thing again without a db hit
res = store.get_server_verify_keys([("srv1", key_id_1)])
if isinstance(res, Deferred):
res = self.successResultOf(res)
self.assertEqual(len(res.keys()), 1)
self.assertEqual(res[("srv1", key_id_1)], KEY_1)
new_key_2 = signedjson.key.get_verify_key(
signedjson.key.generate_signing_key("key2")
)
d = store.store_server_verify_key("srv1", "from_server", 10, new_key_2)
self.get_success(d)
d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
res = self.get_success(d)
self.assertEqual(len(res.keys()), 2)
self.assertEqual(res[("srv1", key_id_1)], KEY_1)
self.assertEqual(res[("srv1", key_id_2)], new_key_2)
| 818 | 1,583 | 23 |
c42eb39561010eb8403eb0346e47bae8171fc4d3 | 2,987 | py | Python | AlpacaNewsRetriever/NewsRetriever.py | caixunshiren/alpaca-news-api | 5bdfb8883148754f2f9eeba8aee34a4607d4584e | [
"MIT"
] | null | null | null | AlpacaNewsRetriever/NewsRetriever.py | caixunshiren/alpaca-news-api | 5bdfb8883148754f2f9eeba8aee34a4607d4584e | [
"MIT"
] | null | null | null | AlpacaNewsRetriever/NewsRetriever.py | caixunshiren/alpaca-news-api | 5bdfb8883148754f2f9eeba8aee34a4607d4584e | [
"MIT"
] | 1 | 2022-03-02T03:53:47.000Z | 2022-03-02T03:53:47.000Z | import requests
import pandas as pd
import time
class AlpacaNewsRetriever:
"""
Class for getting historical news from Alpaca
__init__():
API_ID: your Alpaca ID
API_KEY: your Alpaca secret key
get_news():
symbol: Ticker of the stock. e.g. AAPL
start: start timestamp in RFC 3339 format. e.g. 01-01-2015
end: end timestamp in RFC 3339 format. e.g. 01-01-2019
limit: number of news per page, max 50.
return: a pandas dataframe contains the news
"""
"""
def get_news(self, symbol, start, end, limit=50):
raw_response = self.get_raw_request(symbol, start, end, limit)
if limit <= 50:
print(raw_response)
return self.post_process(raw_response, symbol)
else:
# TODO: add pagination to API call
return raw_response
"""
| 40.917808 | 115 | 0.603281 | import requests
import pandas as pd
import time
class AlpacaNewsRetriever:
"""
Class for getting historical news from Alpaca
__init__():
API_ID: your Alpaca ID
API_KEY: your Alpaca secret key
get_news():
symbol: Ticker of the stock. e.g. AAPL
start: start timestamp in RFC 3339 format. e.g. 01-01-2015
end: end timestamp in RFC 3339 format. e.g. 01-01-2019
limit: number of news per page, max 50.
return: a pandas dataframe contains the news
"""
def __init__(self, API_ID, API_KEY):
self.API_ID = API_ID
self.API_KEY = API_KEY
self.base_url = 'https://data.alpaca.markets/v1beta1/news?'
"""
def get_news(self, symbol, start, end, limit=50):
raw_response = self.get_raw_request(symbol, start, end, limit)
if limit <= 50:
print(raw_response)
return self.post_process(raw_response, symbol)
else:
# TODO: add pagination to API call
return raw_response
"""
def get_news(self, symbol, start, end, limit=50, max_call_per_min=1000):
print("Status --- Extracting News")
raw_response = self.get_raw_request(symbol, start, end, limit)
token = raw_response['next_page_token']
df = self.post_process(raw_response, symbol)
num_call = 1 # number of api calls. Must make sure it is less than
while token is not None:
if num_call >= max_call_per_min:
# sleep for 60 second after the limit is reached
print("Status --- API call limit reached. Sleep for 60 seconds")
time.sleep(60)
num_call = 0
print("Status --- Sleep finished. Resuming...")
raw_response = self.get_raw_request(symbol, start, end, limit, token=token)
token = raw_response['next_page_token']
df = pd.concat([df, self.post_process(raw_response, symbol)], ignore_index=True)
num_call += 1
return df
def get_raw_request(self, symbol, start, end, limit, token=None):
url = self.base_url
url += f'start={start}&end={end}&symbols={symbol}&limit={limit}'
if token is not None:
url += f'&page_token={token}'
response = requests.get(url, headers={"Apca-Api-Key-Id": self.API_ID, 'Apca-Api-Secret-Key': self.API_KEY})
return response.json()
def post_process(self, content, symbol):
dict = {'ticker':[], 'timestamp':[], 'headline':[], 'summary':[]}
for news in content['news']:
dict['ticker'].append(symbol)
dict['timestamp'].append(news['created_at'])
dict['headline'].append(news['headline'])
dict['summary'].append(news['summary'])
df = pd.DataFrame([dict['ticker'], dict['timestamp'], dict['headline'], dict['summary']]).transpose()
df.columns = ['ticker', 'timestamp', 'headline', 'summary']
return df
| 2,005 | 0 | 106 |
897644b4e3ff887ea94b2e372e01f943dd13acdc | 12,861 | py | Python | image/__init__.py | conducto/conducto | b480780905f5a25e8c803b60ca7cdf6976ce5ef6 | [
"Apache-2.0"
] | 25 | 2020-05-07T22:51:11.000Z | 2021-11-17T16:14:42.000Z | image/__init__.py | conducto/conducto | b480780905f5a25e8c803b60ca7cdf6976ce5ef6 | [
"Apache-2.0"
] | 3 | 2020-04-21T06:38:02.000Z | 2020-05-31T01:57:19.000Z | image/__init__.py | conducto/conducto | b480780905f5a25e8c803b60ca7cdf6976ce5ef6 | [
"Apache-2.0"
] | 2 | 2020-05-14T01:47:32.000Z | 2020-06-03T21:58:12.000Z | import contextlib
import concurrent.futures
import os
import sys
import traceback
import typing
import itertools
from conducto.shared import (
async_utils,
client_utils,
types as t,
log,
)
import conducto
from . import dockerfile as dockerfile_mod, names
from conducto.shared import constants, imagepath
from . import names
CONST_EE = constants.ExecutionEnv
if sys.version_info >= (3, 7):
asynccontextmanager = contextlib.asynccontextmanager
else:
from conducto.shared import async_backport
asynccontextmanager = async_backport.asynccontextmanager
def relpath(path):
"""
Construct a path with decoration to enable translation inside a docker
image for a node. This may be used to construct path parameters to a
command line tool.
This is used internally by :py:class:`conducto.Exec` when used with a
Python callable to construct the command line which executes that callable
in the pipeline.
"""
ctxpath = Image.get_contextual_path(path)
return f"__conducto_path:{ctxpath.linear()}:endpath__"
class Repository:
"""A collection of images with different names"""
class Image:
"""
:param image: Specify the base image to start from. Code can be added with
various context* variables, and packages with install_* variables.
:type image: `str`
:param dockerfile: Use instead of :code:`image` and pass a path to a Dockerfile.
Relative paths are evaluated starting from the file where this code is
written. Unless :code:`context` is specified, it uses the directory of the
Dockerfile as the build context
:type dockerfile: `str`
:param dockerfile_text: Directly pass the text of a Dockerfile rather than linking
to one that's already written. If you want to use :code:`ADD` or :code:`COPY`
you must specify :code:`context` explicitly.
:type dockerfile_text: `str`
:param docker_build_args: Dict mapping names of arguments to
:code:`docker --build-args` to values
:type docker_build_args: `dict`
:param docker_auto_workdir: Set the work-dir to the destination of
:code:`copy_dir`. Default: :code:`True`
:type docker_auto_workdir: `bool`
:param context: Use this to specify a custom docker build context when
using :code:`dockerfile`.
:type context: `str`
:param copy_repo: Set to `True` to automatically copy the entire current Git repo
into the Docker image. Use this so that a single Image definition can either use
local code or can fetch from a remote repo.
**copy_dir mode**: Normal use of this parameter uses local code, so it sets
`copy_dir` to point to the root of the Git repo of the calling code.
**copy_url mode**: Specify `copy_branch` to use a remote repository. This is
commonly done for CI/CD. When specified, `copy_url` will be auto-populated.
:type copy_repo: `bool`
:param copy_dir: Path to a directory. All files in that directory (and its
subdirectories) will be copied into the generated Docker image.
:type copy_dir: `str`
:param copy_url: URL to a Git repo. Conducto will clone it and copy its
contents into the generated Docker image. Authenticate to private
GitHub repos with a URL like `https://{user}:{token}@github.com/...`.
See secrets for more info on how to store this securely. Must also
specify copy_branch.
:type copy_url: `str`
:param copy_branch: A specific branch name to clone. Required if using copy_url.
:type copy_branch: `str`
:param path_map: Dict that maps external_path to internal_path. Needed for
live debug and for passing callables to :py:class:`Exec` & :py:class:`Lazy`.
It can be inferred from :code:`copy_dir`, :code:`copy_url`, or :code:`copy_repo`;
if not using one of those, you must specify :code:`path_map` explicitly. This
typically happens when a user-generated Dockerfile copies the code into the image.
:type path_map: `None`
:param install_pip: List of Python packages for Conducto to :code:`pip install` into
the generated Docker image.
:type install_pip: `List[str]`
:param install_npm: List of npm packages for Conducto to :code:`npm i` into the
generated Docker image.
:type install_npm: `List[str]`
:param install_packages: List of packages to install with the appropriate Linux package
manager for this image's flavor.
:type install_packages: `List[str]`
:param install_docker: If :code:`True`, install Docker during build time.
:type install_docker: `bool`
:param shell: Which shell to use in this container. Defaults to :code:`co.Image.AUTO` to
auto-detect. :code:`AUTO` will prefer :code:`/bin/bash` when available, and fall back to
:code:`/bin/sh` otherwise.
:type shell: `str`
:param name: Name this `Image` so other Nodes can reference it by name. If
no name is given, one will automatically be generated from a list of
our favorite Pokemon. I choose you, angry-bulbasaur!
:type name: `str`
:param instantiation_directory: The directory of the file in which this image object was created. This is
used to determine where relative paths passed into co.Image are relative from. This is
automatically populated internally by conducto.
:type instantiation_directory: `str`
:param reqs_py: Deprecated. Use :code:`install_py` instead.
:param reqs_npm: Deprecated. Use :code:`install_npm` instead.
:param reqs_packages: Deprecated. Use :code:`install_packages` instead.
:param reqs_docker: Deprecated. Use :code:`install_docker` instead.
"""
_CONTEXT = None
AUTO = "__auto__"
@staticmethod
@staticmethod
# hack to get this to serialize
@property
# Note: these methods are not needed in non-python implementations
# co.Lazy(function) is not a thing in other languages
# and conducto share-directory should be called instead writing an Image.share_directory
@staticmethod
@staticmethod
register_directory = share_directory
def _non_conducto_dir():
"""
Walk the stack. The first file that's not in the Conducto dir is the one the user
called this from.
"""
op = os.path
if Image._CONTEXT is not None:
return op.dirname(op.abspath(Image._CONTEXT))
for frame, _lineno in traceback.walk_stack(None):
filename = frame.f_code.co_filename
if not filename.startswith(_conducto_dir):
return op.dirname(filename)
_conducto_dir = os.path.dirname(os.path.dirname(__file__)) + os.path.sep
| 34.94837 | 109 | 0.651816 | import contextlib
import concurrent.futures
import os
import sys
import traceback
import typing
import itertools
from conducto.shared import (
async_utils,
client_utils,
types as t,
log,
)
import conducto
from . import dockerfile as dockerfile_mod, names
from conducto.shared import constants, imagepath
from . import names
CONST_EE = constants.ExecutionEnv
if sys.version_info >= (3, 7):
asynccontextmanager = contextlib.asynccontextmanager
else:
from conducto.shared import async_backport
asynccontextmanager = async_backport.asynccontextmanager
def relpath(path):
"""
Construct a path with decoration to enable translation inside a docker
image for a node. This may be used to construct path parameters to a
command line tool.
This is used internally by :py:class:`conducto.Exec` when used with a
Python callable to construct the command line which executes that callable
in the pipeline.
"""
ctxpath = Image.get_contextual_path(path)
return f"__conducto_path:{ctxpath.linear()}:endpath__"
class Repository:
"""A collection of images with different names"""
class DuplicateImageError(Exception):
pass
def __init__(self):
self.images: typing.Dict[str, Image] = {}
def __delitem__(self, key):
if type(key) == str:
del self.images[key]
else:
for name, img in list(self.images.items()):
if img == key:
del self[name]
break
else:
raise KeyError
def __getitem__(self, name):
return self.images[name]
def Image(self, *args, **kwargs):
img = Image(*args, **kwargs)
self.add(img)
return img
def add(self, image):
if image.name in self.images and self.images[image.name] != image:
raise self.DuplicateImageError(
f"{image.name} already present with a different definition in this repository"
)
self.images[image.name] = image
def merge(self, repo):
# this makes merging all images into the root O(NlogN)
if len(repo.images) > len(self.images):
self.images, repo.images = repo.images, self.images
for img in repo.images.values():
self.add(img)
def finalize(self):
from .internal_image import Image as IImage
image_shells, self.images = self.images, {}
for img in image_shells.values():
self.add(IImage(**img.to_dict()))
class Image:
"""
:param image: Specify the base image to start from. Code can be added with
various context* variables, and packages with install_* variables.
:type image: `str`
:param dockerfile: Use instead of :code:`image` and pass a path to a Dockerfile.
Relative paths are evaluated starting from the file where this code is
written. Unless :code:`context` is specified, it uses the directory of the
Dockerfile as the build context
:type dockerfile: `str`
:param dockerfile_text: Directly pass the text of a Dockerfile rather than linking
to one that's already written. If you want to use :code:`ADD` or :code:`COPY`
you must specify :code:`context` explicitly.
:type dockerfile_text: `str`
:param docker_build_args: Dict mapping names of arguments to
:code:`docker --build-args` to values
:type docker_build_args: `dict`
:param docker_auto_workdir: Set the work-dir to the destination of
:code:`copy_dir`. Default: :code:`True`
:type docker_auto_workdir: `bool`
:param context: Use this to specify a custom docker build context when
using :code:`dockerfile`.
:type context: `str`
:param copy_repo: Set to `True` to automatically copy the entire current Git repo
into the Docker image. Use this so that a single Image definition can either use
local code or can fetch from a remote repo.
**copy_dir mode**: Normal use of this parameter uses local code, so it sets
`copy_dir` to point to the root of the Git repo of the calling code.
**copy_url mode**: Specify `copy_branch` to use a remote repository. This is
commonly done for CI/CD. When specified, `copy_url` will be auto-populated.
:type copy_repo: `bool`
:param copy_dir: Path to a directory. All files in that directory (and its
subdirectories) will be copied into the generated Docker image.
:type copy_dir: `str`
:param copy_url: URL to a Git repo. Conducto will clone it and copy its
contents into the generated Docker image. Authenticate to private
GitHub repos with a URL like `https://{user}:{token}@github.com/...`.
See secrets for more info on how to store this securely. Must also
specify copy_branch.
:type copy_url: `str`
:param copy_branch: A specific branch name to clone. Required if using copy_url.
:type copy_branch: `str`
:param path_map: Dict that maps external_path to internal_path. Needed for
live debug and for passing callables to :py:class:`Exec` & :py:class:`Lazy`.
It can be inferred from :code:`copy_dir`, :code:`copy_url`, or :code:`copy_repo`;
if not using one of those, you must specify :code:`path_map` explicitly. This
typically happens when a user-generated Dockerfile copies the code into the image.
:type path_map: `None`
:param install_pip: List of Python packages for Conducto to :code:`pip install` into
the generated Docker image.
:type install_pip: `List[str]`
:param install_npm: List of npm packages for Conducto to :code:`npm i` into the
generated Docker image.
:type install_npm: `List[str]`
:param install_packages: List of packages to install with the appropriate Linux package
manager for this image's flavor.
:type install_packages: `List[str]`
:param install_docker: If :code:`True`, install Docker during build time.
:type install_docker: `bool`
:param shell: Which shell to use in this container. Defaults to :code:`co.Image.AUTO` to
auto-detect. :code:`AUTO` will prefer :code:`/bin/bash` when available, and fall back to
:code:`/bin/sh` otherwise.
:type shell: `str`
:param name: Name this `Image` so other Nodes can reference it by name. If
no name is given, one will automatically be generated from a list of
our favorite Pokemon. I choose you, angry-bulbasaur!
:type name: `str`
:param instantiation_directory: The directory of the file in which this image object was created. This is
used to determine where relative paths passed into co.Image are relative from. This is
automatically populated internally by conducto.
:type instantiation_directory: `str`
:param reqs_py: Deprecated. Use :code:`install_py` instead.
:param reqs_npm: Deprecated. Use :code:`install_npm` instead.
:param reqs_packages: Deprecated. Use :code:`install_packages` instead.
:param reqs_docker: Deprecated. Use :code:`install_docker` instead.
"""
_CONTEXT = None
AUTO = "__auto__"
def __init__(
self,
image=None,
*,
dockerfile=None,
dockerfile_text=None,
docker_build_args=None,
context=None,
copy_repo=None,
copy_dir=None,
copy_url=None,
copy_branch=None,
docker_auto_workdir=True,
install_pip=None,
install_npm=None,
install_packages=None,
install_docker=False,
path_map=None,
shell=AUTO,
name=None,
git_urls=None,
instantiation_directory=None,
# For backwards-compatibility only
reqs_py=None,
reqs_npm=None,
reqs_packages=None,
reqs_docker=False,
**kwargs,
):
# TODO: remove pre_built back-compatibility for sept 9 changes
kwargs.pop("pre_built", None)
kwargs.pop("git_sha", None)
if len(kwargs):
raise ValueError(f"unknown args: {kwargs}")
if name is None:
name = names.NameGenerator.name()
self.name = name
self.copy_url = copy_url
self.image = image
self.dockerfile = dockerfile
self.dockerfile_text = dockerfile_text
self.docker_build_args = docker_build_args
self.context = context
self.copy_repo = copy_repo
self.copy_dir = copy_dir
self.copy_url = copy_url
self.copy_branch = copy_branch
self.docker_auto_workdir = docker_auto_workdir
self.install_pip = install_pip or reqs_py
self.install_npm = install_npm or reqs_npm
self.install_packages = install_packages or reqs_packages
self.install_docker = install_docker or reqs_docker
self.path_map = path_map or {}
self.shell = shell
self.git_urls = git_urls
self.instantiation_directory = instantiation_directory or _non_conducto_dir()
def __eq__(self, other):
if isinstance(other, Image):
return self.to_dict() == other.to_dict()
else:
from .internal_image import Image as IImage
if isinstance(other, IImage):
return IImage(**self.to_dict()) == other
return False
@staticmethod
def _serialize_path(p: typing.Union[imagepath.Path, str]):
return p._id() if isinstance(p, imagepath.Path) else p
@staticmethod
def _serialize_pathmap(pathmap):
if pathmap:
return {
p if isinstance(p, str) else p.linear(): v for p, v in pathmap.items()
}
return None
# hack to get this to serialize
@property
def id(self):
try:
return self.to_dict()
except:
# NOTE: when there is an error in to_dict, json.encode throws a
# really unhelpful "ValueError: Circular reference detected"
print(traceback.format_exc())
raise
def to_dict(self):
return {
"name": self.name,
"image": self.image,
"dockerfile": self._serialize_path(self.dockerfile),
"dockerfile_text": self.dockerfile_text,
"docker_build_args": self.docker_build_args,
"docker_auto_workdir": self.docker_auto_workdir,
"context": self._serialize_path(self.context),
"copy_repo": self.copy_repo,
"copy_dir": self._serialize_path(self.copy_dir),
"copy_url": self.copy_url,
"copy_branch": self.copy_branch,
"install_pip": self.install_pip,
"install_npm": self.install_npm,
"install_packages": self.install_packages,
"install_docker": self.install_docker,
"path_map": self._serialize_pathmap(self.path_map),
"shell": self.shell,
"instantiation_directory": self.instantiation_directory,
# For backcompat only
"reqs_py": self.install_pip,
"reqs_npm": self.install_npm,
"reqs_packages": self.install_packages,
"reqs_docker": self.install_docker,
}
# Note: these methods are not needed in non-python implementations
# co.Lazy(function) is not a thing in other languages
# and conducto share-directory should be called instead writing an Image.share_directory
@staticmethod
def get_contextual_path(
p: typing.Union[imagepath.Path, dict, str],
*,
named_shares=True,
branch=None,
url=None,
) -> imagepath.Path:
from conducto.image.internal_image import Image as IImage
class HackImage:
instantiation_directory = _non_conducto_dir()
return IImage.get_contextual_path(
HackImage(), p, named_shares=named_shares, branch=branch, url=url
)
@staticmethod
def share_directory(name, relative):
import conducto
from conducto.image.internal_image import Image as IImage
path = Image.get_contextual_path(relative, named_shares=False)
config = conducto.api.Config()
config.register_named_share(config.default_profile, name, path)
register_directory = share_directory
def _non_conducto_dir():
"""
Walk the stack. The first file that's not in the Conducto dir is the one the user
called this from.
"""
op = os.path
if Image._CONTEXT is not None:
return op.dirname(op.abspath(Image._CONTEXT))
for frame, _lineno in traceback.walk_stack(None):
filename = frame.f_code.co_filename
if not filename.startswith(_conducto_dir):
return op.dirname(filename)
_conducto_dir = os.path.dirname(os.path.dirname(__file__)) + os.path.sep
| 5,694 | 29 | 427 |
21f53f03707cdfeaf7be966e79c22e21d805b3f7 | 710 | py | Python | src/python/doufo/qlambda.py | Hong-Xiang/doufo | 3d375fef30670597768a6eef809b75b4b1b5a3fd | [
"Apache-2.0"
] | 3 | 2018-08-05T07:16:34.000Z | 2018-08-10T05:28:24.000Z | src/python/doufo/qlambda.py | tech-pi/doufo | 3d375fef30670597768a6eef809b75b4b1b5a3fd | [
"Apache-2.0"
] | 10 | 2018-09-16T15:44:19.000Z | 2018-10-06T10:39:59.000Z | src/python/doufo/qlambda.py | tech-pi/doufo | 3d375fef30670597768a6eef809b75b4b1b5a3fd | [
"Apache-2.0"
] | 1 | 2018-08-04T08:13:50.000Z | 2018-08-04T08:13:50.000Z | """
Quick lambda creator, useful for use in fmap, filter, etc.
e.g. List([1,2]).fmap(x + 1)
"""
from doufo import WrappedFunction, identity, Functor, FunctorArithmeticMixin
import operator
__all__ = ['QuickLambda', 'x']
class QuickLambda(WrappedFunction, FunctorArithmeticMixin):
"""
QuickLambda constructor.
"""
x = QuickLambda(identity)
| 22.903226 | 76 | 0.669014 | """
Quick lambda creator, useful for use in fmap, filter, etc.
e.g. List([1,2]).fmap(x + 1)
"""
from doufo import WrappedFunction, identity, Functor, FunctorArithmeticMixin
import operator
__all__ = ['QuickLambda', 'x']
class QuickLambda(WrappedFunction, FunctorArithmeticMixin):
"""
QuickLambda constructor.
"""
def fmap(self, f):
return QuickLambda(lambda o: f(self.__call__(o)))
def __getattr__(self, *args, **kwargs):
return self.fmap(operator.attrgetter(*args, **kwargs))
def __getitem__(self, *args, **kwargs):
return self.fmap(operator.itemgetter(*args, **kwargs))
def __hash__(self):
return hash(id(self))
x = QuickLambda(identity)
| 245 | 0 | 108 |
956e8f44b682bac08ca006ab5acb457d983ced03 | 7,849 | py | Python | workbench/server/plugin_manager.py | Ayub-Khan/workbench | 710232756dd717f734253315e3d0b33c9628dafb | [
"MIT"
] | 61 | 2015-01-04T01:23:49.000Z | 2021-06-22T14:41:10.000Z | workbench/server/plugin_manager.py | Ayub-Khan/workbench | 710232756dd717f734253315e3d0b33c9628dafb | [
"MIT"
] | 3 | 2015-01-02T23:26:59.000Z | 2015-01-03T19:28:36.000Z | workbench/server/plugin_manager.py | Ayub-Khan/workbench | 710232756dd717f734253315e3d0b33c9628dafb | [
"MIT"
] | 17 | 2015-08-25T23:57:22.000Z | 2020-05-30T02:36:05.000Z | """A simple plugin manager. Rolling my own for three reasons:
1) Environmental scan did not give me quite what I wanted.
2) The super simple examples didn't support automatic/dynamic loading.
3) I kinda wanted to understand the process :)
"""
import os, sys
from datetime import datetime
import dir_watcher
import inspect
from IPython.utils.coloransi import TermColors as color
#pylint: disable=no-member
class PluginManager(object):
"""Plugin Manager for Workbench."""
def __init__(self, plugin_callback, plugin_dir = 'workers'):
"""Initialize the Plugin Manager for Workbench.
Args:
plugin_callback: The callback for plugin. This is called when plugin is added.
plugin_dir: The dir where plugin resides.
"""
# Set the callback, the plugin directory and load the plugins
self.plugin_callback = plugin_callback
self.plugin_dir = plugin_dir
self.load_all_plugins()
# Now setup dynamic monitoring of the plugins directory
self.watcher = dir_watcher.DirWatcher(self.plugin_path)
self.watcher.register_callbacks(self.on_created, self.on_modified, self.on_deleted)
self.watcher.start_monitoring()
def load_all_plugins(self):
"""Load all the plugins in the plugin directory"""
# Go through the existing python files in the plugin directory
self.plugin_path = os.path.realpath(self.plugin_dir)
sys.path.append(self.plugin_dir)
print '<<< Plugin Manager >>>'
for f in [os.path.join(self.plugin_dir, child) for child in os.listdir(self.plugin_dir)]:
# Skip certain files
if '.DS_Store' in f or '__init__.py' in f:
continue
# Add the plugin
self.add_plugin(f)
def on_created(self, file_list):
"""Watcher callback
Args:
event: The creation event.
"""
for plugin in file_list:
self.add_plugin(plugin)
def on_modified(self, file_list):
"""Watcher callback.
Args:
event: The modification event.
"""
for plugin in file_list:
self.add_plugin(plugin)
def on_deleted(self, file_list):
"""Watcher callback.
Args:
event: The modification event.
"""
for plugin in file_list:
self.remove_plugin(plugin)
def remove_plugin(self, f):
"""Remvoing a deleted plugin.
Args:
f: the filepath for the plugin.
"""
if f.endswith('.py'):
plugin_name = os.path.splitext(os.path.basename(f))[0]
print '- %s %sREMOVED' % (plugin_name, color.Red)
print '\t%sNote: still in memory, restart Workbench to remove...%s' % \
(color.Yellow, color.Normal)
def add_plugin(self, f):
"""Adding and verifying plugin.
Args:
f: the filepath for the plugin.
"""
if f.endswith('.py'):
# Just the basename without extension
plugin_name = os.path.splitext(os.path.basename(f))[0]
# It's possible the plugin has been modified and needs to be reloaded
if plugin_name in sys.modules:
try:
handler = reload(sys.modules[plugin_name])
print'\t- %s %sRELOAD%s' % (plugin_name, color.Yellow, color.Normal)
except ImportError, error:
print 'Failed to import plugin: %s (%s)' % (plugin_name, error)
return
else:
# Not already loaded so try to import it
try:
handler = __import__(plugin_name, globals(), locals(), [], -1)
except ImportError, error:
print 'Failed to import plugin: %s (%s)' % (plugin_name, error)
return
# Run the handler through plugin validation
plugin = self.validate(handler)
print '\t- %s %sOK%s' % (plugin_name, color.Green, color.Normal)
if plugin:
# Okay must be successfully loaded so capture the plugin meta-data,
# modification time and register the plugin through the callback
plugin['name'] = plugin_name
plugin['dependencies'] = plugin['class'].dependencies
plugin['docstring'] = plugin['class'].__doc__
plugin['mod_time'] = datetime.utcfromtimestamp(os.path.getmtime(f))
# Plugin may accept sample_sets as input
try:
plugin['sample_set_input'] = getattr(plugin['class'], 'sample_set_input')
except AttributeError:
plugin['sample_set_input'] = False
# Now pass the plugin back to workbench
self.plugin_callback(plugin)
def validate(self, handler):
"""Validate the plugin, each plugin must have the following:
1) The worker class must have an execute method: execute(self, input_data).
2) The worker class must have a dependencies list (even if it's empty).
3) The file must have a top level test() method.
Args:
handler: the loaded plugin.
"""
# Check for the test method first
test_method = self.plugin_test_validation(handler)
if not test_method:
return None
# Here we iterate through the classes found in the module and pick
# the first one that satisfies the validation
for name, plugin_class in inspect.getmembers(handler, inspect.isclass):
if self.plugin_class_validation(plugin_class):
return {'class':plugin_class, 'test':test_method}
# If we're here the plugin didn't pass validation
print 'Failure for plugin: %s' % (handler.__name__)
print 'Validation Error: Worker class is required to have a dependencies list and an execute method'
return None
def plugin_test_validation(self, handler):
"""Plugin validation.
Every workbench plugin must have top level test method.
Args:
handler: The loaded plugin.
Returns:
None if the test fails or the test function.
"""
methods = {name:func for name, func in inspect.getmembers(handler, callable)}
if 'test' not in methods.keys():
print 'Failure for plugin: %s' % (handler.__name__)
print 'Validation Error: The file must have a top level test() method'
return None
else:
return methods['test']
def plugin_class_validation(self, plugin_class):
"""Plugin validation
Every workbench plugin must have a dependencies list (even if it's empty).
Every workbench plugin must have an execute method.
Args:
plugin_class: The loaded plugun class.
Returns:
True if dependencies and execute are present, else False.
"""
try:
getattr(plugin_class, 'dependencies')
getattr(plugin_class, 'execute')
except AttributeError:
return False
return True
# Just create the class and run it for a test
def test():
"""Executes plugin_manager.py test."""
# This test actually does more than it appears. The workers directory
# will get scanned and stuff will get loaded into workbench.
def new_plugin(plugin):
"""new plugin callback """
print '%s' % (plugin['name'])
# Create Plugin Manager
plugin_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),'../workers')
PluginManager(new_plugin, plugin_dir=plugin_dir)
if __name__ == "__main__":
test()
| 35.515837 | 108 | 0.600459 | """A simple plugin manager. Rolling my own for three reasons:
1) Environmental scan did not give me quite what I wanted.
2) The super simple examples didn't support automatic/dynamic loading.
3) I kinda wanted to understand the process :)
"""
import os, sys
from datetime import datetime
import dir_watcher
import inspect
from IPython.utils.coloransi import TermColors as color
#pylint: disable=no-member
class PluginManager(object):
"""Plugin Manager for Workbench."""
def __init__(self, plugin_callback, plugin_dir = 'workers'):
"""Initialize the Plugin Manager for Workbench.
Args:
plugin_callback: The callback for plugin. This is called when plugin is added.
plugin_dir: The dir where plugin resides.
"""
# Set the callback, the plugin directory and load the plugins
self.plugin_callback = plugin_callback
self.plugin_dir = plugin_dir
self.load_all_plugins()
# Now setup dynamic monitoring of the plugins directory
self.watcher = dir_watcher.DirWatcher(self.plugin_path)
self.watcher.register_callbacks(self.on_created, self.on_modified, self.on_deleted)
self.watcher.start_monitoring()
def load_all_plugins(self):
"""Load all the plugins in the plugin directory"""
# Go through the existing python files in the plugin directory
self.plugin_path = os.path.realpath(self.plugin_dir)
sys.path.append(self.plugin_dir)
print '<<< Plugin Manager >>>'
for f in [os.path.join(self.plugin_dir, child) for child in os.listdir(self.plugin_dir)]:
# Skip certain files
if '.DS_Store' in f or '__init__.py' in f:
continue
# Add the plugin
self.add_plugin(f)
def on_created(self, file_list):
"""Watcher callback
Args:
event: The creation event.
"""
for plugin in file_list:
self.add_plugin(plugin)
def on_modified(self, file_list):
"""Watcher callback.
Args:
event: The modification event.
"""
for plugin in file_list:
self.add_plugin(plugin)
def on_deleted(self, file_list):
"""Watcher callback.
Args:
event: The modification event.
"""
for plugin in file_list:
self.remove_plugin(plugin)
def remove_plugin(self, f):
"""Remvoing a deleted plugin.
Args:
f: the filepath for the plugin.
"""
if f.endswith('.py'):
plugin_name = os.path.splitext(os.path.basename(f))[0]
print '- %s %sREMOVED' % (plugin_name, color.Red)
print '\t%sNote: still in memory, restart Workbench to remove...%s' % \
(color.Yellow, color.Normal)
def add_plugin(self, f):
"""Adding and verifying plugin.
Args:
f: the filepath for the plugin.
"""
if f.endswith('.py'):
# Just the basename without extension
plugin_name = os.path.splitext(os.path.basename(f))[0]
# It's possible the plugin has been modified and needs to be reloaded
if plugin_name in sys.modules:
try:
handler = reload(sys.modules[plugin_name])
print'\t- %s %sRELOAD%s' % (plugin_name, color.Yellow, color.Normal)
except ImportError, error:
print 'Failed to import plugin: %s (%s)' % (plugin_name, error)
return
else:
# Not already loaded so try to import it
try:
handler = __import__(plugin_name, globals(), locals(), [], -1)
except ImportError, error:
print 'Failed to import plugin: %s (%s)' % (plugin_name, error)
return
# Run the handler through plugin validation
plugin = self.validate(handler)
print '\t- %s %sOK%s' % (plugin_name, color.Green, color.Normal)
if plugin:
# Okay must be successfully loaded so capture the plugin meta-data,
# modification time and register the plugin through the callback
plugin['name'] = plugin_name
plugin['dependencies'] = plugin['class'].dependencies
plugin['docstring'] = plugin['class'].__doc__
plugin['mod_time'] = datetime.utcfromtimestamp(os.path.getmtime(f))
# Plugin may accept sample_sets as input
try:
plugin['sample_set_input'] = getattr(plugin['class'], 'sample_set_input')
except AttributeError:
plugin['sample_set_input'] = False
# Now pass the plugin back to workbench
self.plugin_callback(plugin)
def validate(self, handler):
"""Validate the plugin, each plugin must have the following:
1) The worker class must have an execute method: execute(self, input_data).
2) The worker class must have a dependencies list (even if it's empty).
3) The file must have a top level test() method.
Args:
handler: the loaded plugin.
"""
# Check for the test method first
test_method = self.plugin_test_validation(handler)
if not test_method:
return None
# Here we iterate through the classes found in the module and pick
# the first one that satisfies the validation
for name, plugin_class in inspect.getmembers(handler, inspect.isclass):
if self.plugin_class_validation(plugin_class):
return {'class':plugin_class, 'test':test_method}
# If we're here the plugin didn't pass validation
print 'Failure for plugin: %s' % (handler.__name__)
print 'Validation Error: Worker class is required to have a dependencies list and an execute method'
return None
def plugin_test_validation(self, handler):
"""Plugin validation.
Every workbench plugin must have top level test method.
Args:
handler: The loaded plugin.
Returns:
None if the test fails or the test function.
"""
methods = {name:func for name, func in inspect.getmembers(handler, callable)}
if 'test' not in methods.keys():
print 'Failure for plugin: %s' % (handler.__name__)
print 'Validation Error: The file must have a top level test() method'
return None
else:
return methods['test']
def plugin_class_validation(self, plugin_class):
"""Plugin validation
Every workbench plugin must have a dependencies list (even if it's empty).
Every workbench plugin must have an execute method.
Args:
plugin_class: The loaded plugun class.
Returns:
True if dependencies and execute are present, else False.
"""
try:
getattr(plugin_class, 'dependencies')
getattr(plugin_class, 'execute')
except AttributeError:
return False
return True
# Just create the class and run it for a test
def test():
"""Executes plugin_manager.py test."""
# This test actually does more than it appears. The workers directory
# will get scanned and stuff will get loaded into workbench.
def new_plugin(plugin):
"""new plugin callback """
print '%s' % (plugin['name'])
# Create Plugin Manager
plugin_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),'../workers')
PluginManager(new_plugin, plugin_dir=plugin_dir)
if __name__ == "__main__":
test()
| 0 | 0 | 0 |
df4e3fd85ec89f3c2dc03118a5361d9f7272ed99 | 2,313 | py | Python | __init__.py | krisgesling/swag-badge-skill | 7640264880d8ae14f9c49c3ba40c6e388e58dcaf | [
"Apache-2.0"
] | 1 | 2021-01-24T01:42:15.000Z | 2021-01-24T01:42:15.000Z | __init__.py | krisgesling/swag-badge-skill | 7640264880d8ae14f9c49c3ba40c6e388e58dcaf | [
"Apache-2.0"
] | null | null | null | __init__.py | krisgesling/swag-badge-skill | 7640264880d8ae14f9c49c3ba40c6e388e58dcaf | [
"Apache-2.0"
] | null | null | null | import os
from subprocess import call
from mycroft import MycroftSkill
from .badge import MQTT_Client
from .util import wrap_text
class SwagBadge(MycroftSkill):
"""Provide interaction between Mycroft and a Swag Badge.
For more details on the Swag Badge from LinuxConfAu 2021 see:
http://www.openhardwareconf.org/wiki/Swagbadge2021
"""
def send_text_block(self, message):
"""Send utterance to Badge.
Splits text based on line length and prevents words being split
between the two screens.
Arguments:
message (Message): standard Mycroft Message object
"""
text = message.data.get("utterance")
if not text:
return
chars = int(self.LINE_LENGTH / self.NUM_SCREENS)
lines_per_screen = wrap_text(text, chars)
# Add spaces to log correctly across multiple screens.
padded_lines = [f"{l: <{chars}}" for l in lines_per_screen]
lines = [
x + y
for x, y in zip(
padded_lines[0 :: self.NUM_SCREENS], padded_lines[1 :: self.NUM_SCREENS]
)
]
for line in lines:
success, msg = self.mqttc.log_to_oled(line)
if not success:
self.log.error(msg)
break
def display_image(self, image="m32.png"):
"""Display an image on the Badge screen."""
image_path = os.path.join(self.root_dir, "images", image)
self.mqttc.render_image(image_path)
| 30.038961 | 88 | 0.613489 | import os
from subprocess import call
from mycroft import MycroftSkill
from .badge import MQTT_Client
from .util import wrap_text
class SwagBadge(MycroftSkill):
"""Provide interaction between Mycroft and a Swag Badge.
For more details on the Swag Badge from LinuxConfAu 2021 see:
http://www.openhardwareconf.org/wiki/Swagbadge2021
"""
def __init__(self):
MycroftSkill.__init__(self)
self.LINE_LENGTH = 32
self.NUM_SCREENS = 2
self.mqttc = None
def initialize(self):
self.settings_change_callback = self.on_settings_changed
self.on_settings_changed()
self.add_event("speak", self.send_text_block)
self.display_image()
def on_settings_changed(self):
host = self.settings.get("mqtt_host")
if host:
if self.mqttc:
self.mqttc.disconnect()
self.mqttc = MQTT_Client(host)
badge_id = self.settings.get("badge_id")
if badge_id:
self.mqttc.set_topic(f"public/{badge_id}/0/in")
def send_text_block(self, message):
"""Send utterance to Badge.
Splits text based on line length and prevents words being split
between the two screens.
Arguments:
message (Message): standard Mycroft Message object
"""
text = message.data.get("utterance")
if not text:
return
chars = int(self.LINE_LENGTH / self.NUM_SCREENS)
lines_per_screen = wrap_text(text, chars)
# Add spaces to log correctly across multiple screens.
padded_lines = [f"{l: <{chars}}" for l in lines_per_screen]
lines = [
x + y
for x, y in zip(
padded_lines[0 :: self.NUM_SCREENS], padded_lines[1 :: self.NUM_SCREENS]
)
]
for line in lines:
success, msg = self.mqttc.log_to_oled(line)
if not success:
self.log.error(msg)
break
def display_image(self, image="m32.png"):
"""Display an image on the Badge screen."""
image_path = os.path.join(self.root_dir, "images", image)
self.mqttc.render_image(image_path)
def shutdown(self):
self.mqttc.disconnect()
def create_skill():
return SwagBadge()
| 666 | 0 | 131 |
fc73e92f597f35947bedf2470fc9fcc7c500e873 | 5,492 | py | Python | DeviceAPI/MagnumThermostat.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | DeviceAPI/MagnumThermostat.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | DeviceAPI/MagnumThermostat.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division
'''
Copyright © 2014 by Virginia Polytechnic Institute and State University
All rights reserved
Virginia Polytechnic Institute and State University (Virginia Tech) owns the copyright for the BEMOSS software and its
associated documentation (“Software”) and retains rights to grant research rights under patents related to
the BEMOSS software to other academic institutions or non-profit research institutions.
You should carefully read the following terms and conditions before using this software.
Your use of this Software indicates your acceptance of this license agreement and all terms and conditions.
You are hereby licensed to use the Software for Non-Commercial Purpose only. Non-Commercial Purpose means the
use of the Software solely for research. Non-Commercial Purpose excludes, without limitation, any use of
the Software, as part of, or in any way in connection with a product or service which is sold, offered for sale,
licensed, leased, loaned, or rented. Permission to use, copy, modify, and distribute this compilation
for Non-Commercial Purpose to other academic institutions or non-profit research institutions is hereby granted
without fee, subject to the following terms of this license.
Commercial Use If you desire to use the software for profit-making or commercial purposes,
you agree to negotiate in good faith a license with Virginia Tech prior to such profit-making or commercial use.
Virginia Tech shall have no obligation to grant such license to you, and may grant exclusive or non-exclusive
licenses to others. You may contact the following by email to discuss commercial use: vtippatents@vtip.org
Limitation of Liability IN NO EVENT WILL VIRGINIA TECH, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE
OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF VIRGINIA TECH OR OTHER PARTY HAS BEEN ADVISED
OF THE POSSIBILITY OF SUCH DAMAGES.
For full terms and conditions, please visit https://bitbucket.org/bemoss/bemoss_os.
Address all correspondence regarding this license to Virginia Tech’s electronic mail address: vtippatents@vtip.org
__author__ = "Aditya Nugur"
__credits__ = ""
__version__ = "3.5"
__maintainer__ = "Aditya Nugur""
__email__ = "aditya32@vt.edu"
__website__ = ""
__status__ = "Prototype"
__created__ = "2016-10-22 16:12:00"
__lastUpdated__ = "2016-10-25 13:25:00"
'''
from DeviceAPI.BaseAPI_Magnum import baseAPI_Magnum
from bemoss_lib.utils.BEMOSS_ONTOLOGY import BEMOSS_ONTOLOGY
debug = True
| 54.92 | 206 | 0.734523 | # -*- coding: utf-8 -*-
from __future__ import division
'''
Copyright © 2014 by Virginia Polytechnic Institute and State University
All rights reserved
Virginia Polytechnic Institute and State University (Virginia Tech) owns the copyright for the BEMOSS software and its
associated documentation (“Software”) and retains rights to grant research rights under patents related to
the BEMOSS software to other academic institutions or non-profit research institutions.
You should carefully read the following terms and conditions before using this software.
Your use of this Software indicates your acceptance of this license agreement and all terms and conditions.
You are hereby licensed to use the Software for Non-Commercial Purpose only. Non-Commercial Purpose means the
use of the Software solely for research. Non-Commercial Purpose excludes, without limitation, any use of
the Software, as part of, or in any way in connection with a product or service which is sold, offered for sale,
licensed, leased, loaned, or rented. Permission to use, copy, modify, and distribute this compilation
for Non-Commercial Purpose to other academic institutions or non-profit research institutions is hereby granted
without fee, subject to the following terms of this license.
Commercial Use If you desire to use the software for profit-making or commercial purposes,
you agree to negotiate in good faith a license with Virginia Tech prior to such profit-making or commercial use.
Virginia Tech shall have no obligation to grant such license to you, and may grant exclusive or non-exclusive
licenses to others. You may contact the following by email to discuss commercial use: vtippatents@vtip.org
Limitation of Liability IN NO EVENT WILL VIRGINIA TECH, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE
OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF VIRGINIA TECH OR OTHER PARTY HAS BEEN ADVISED
OF THE POSSIBILITY OF SUCH DAMAGES.
For full terms and conditions, please visit https://bitbucket.org/bemoss/bemoss_os.
Address all correspondence regarding this license to Virginia Tech’s electronic mail address: vtippatents@vtip.org
__author__ = "Aditya Nugur"
__credits__ = ""
__version__ = "3.5"
__maintainer__ = "Aditya Nugur""
__email__ = "aditya32@vt.edu"
__website__ = ""
__status__ = "Prototype"
__created__ = "2016-10-22 16:12:00"
__lastUpdated__ = "2016-10-25 13:25:00"
'''
from DeviceAPI.BaseAPI_Magnum import baseAPI_Magnum
from bemoss_lib.utils.BEMOSS_ONTOLOGY import BEMOSS_ONTOLOGY
debug = True
class API(baseAPI_Magnum):
def API_info(self):
return [{'device_model': 'M9TS1 Thermostat', 'vendor_name': 'eBox BACnet/IP', 'communication': 'BACnet',
'device_type_id': 1, 'api_name': 'API_MagnumThermostat', 'html_template': 'thermostat/magnum_thermostat.html',
'agent_type': 'BasicAgent', 'identifiable': False,'authorizable': False, 'is_cloud_device': False,
'schedule_weekday_period': 4, 'schedule_weekend_period': 4, 'allow_schedule_period_delete': True,
'chart_template': 'charts/charts_thermostat.html'},
]
def dashboard_view(self):
if self.get_variable(BEMOSS_ONTOLOGY.THERMOSTAT_MODE.NAME) == BEMOSS_ONTOLOGY.THERMOSTAT_MODE.POSSIBLE_VALUES.OFF:
return {"top": BEMOSS_ONTOLOGY.THERMOSTAT_MODE.NAME, "center": {"type": "number", "value": BEMOSS_ONTOLOGY.TEMPERATURE.NAME},
"bottom": None}
else:
return {"top": BEMOSS_ONTOLOGY.THERMOSTAT_MODE.NAME, "center": {"type": "number", "value": BEMOSS_ONTOLOGY.TEMPERATURE.NAME},
"bottom": BEMOSS_ONTOLOGY.SETPOINT.NAME}
def ontology(self):
return {
"0x01A157DF [0] (14) Temperature (linear)": BEMOSS_ONTOLOGY.TEMPERATURE,
"0x01A157DF [0] (13) Turn-switch for fan": BEMOSS_ONTOLOGY.FAN_MODE ,
"0x01A157DF [0] (12) Rel. Humidity (linear)": BEMOSS_ONTOLOGY.RELATIVE_HUMIDITY,
"0x01A157DF [0] (11) Unoccupied Heating Limit": BEMOSS_ONTOLOGY.HEAT_SETPOINT,
"0x01A157DF [0] (10) Unoccupied Cooling Limit": BEMOSS_ONTOLOGY.COOL_SETPOINT,
"0x01A157DF [0] (7) Comfort Set point": BEMOSS_ONTOLOGY.SETPOINT,
}
fmode_dict = {0: BEMOSS_ONTOLOGY.FAN_MODE.POSSIBLE_VALUES.AUTO,
1: BEMOSS_ONTOLOGY.FAN_MODE.POSSIBLE_VALUES.CIRCULATE,
2: BEMOSS_ONTOLOGY.FAN_MODE.POSSIBLE_VALUES.ON}
# def getDataFromDevice(self):
#
# returndata=dict()
#
# bacnetread = self.Bacnet_read()
# for key, value in bacnetread.iteritems():
# if type(value)!= str and type(value)!= int:
# if key in ["0x01A157DF [0] (14) Temperature (linear)","0x01A157DF [0] (11) Unoccupied Heating Limit","0x01A157DF [0] (10) Unoccupied Cooling Limit","0x01A157DF [0] (7) Comfort Set point"]:
# value=self.farenheitCon(value)
# bacnetread[key]=round(value,2)
# if "0x01A157DF [0] (13) Turn-switch for fan" in bacnetread.keys():
# bacnetread["0x01A157DF [0] (13) Turn-switch for fan"]=self.fmode_dict[bacnetread["0x01A157DF [0] (13) Turn-switch for fan"]]
# print bacnetread
# return bacnetread | 1,521 | 1,103 | 23 |
8f0e40e1e9ad26888f88397edb734ee26570185f | 1,337 | py | Python | profiles/migrations/0004_auto_20180322_2323.py | joatuapp/joatu-django | 5626d03ba89c55650ff5bff2e706ca0883ae3b9c | [
"MIT"
] | 10 | 2018-05-13T18:01:57.000Z | 2018-12-23T17:11:14.000Z | profiles/migrations/0004_auto_20180322_2323.py | moileretour/joatu | 9d18cb58b4280235688e269be6fd2d34b77ccead | [
"MIT"
] | 88 | 2018-05-04T15:33:46.000Z | 2022-03-08T21:09:21.000Z | profiles/migrations/0004_auto_20180322_2323.py | joatuapp/joatu-django | 5626d03ba89c55650ff5bff2e706ca0883ae3b9c | [
"MIT"
] | 7 | 2018-05-08T16:05:06.000Z | 2018-09-13T05:49:05.000Z | # Generated by Django 2.0.3 on 2018-03-23 03:23
from django.db import migrations, models
import django.db.models.deletion
| 29.065217 | 118 | 0.579656 | # Generated by Django 2.0.3 on 2018-03-23 03:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('profiles', '0003_auto_20180322_2257'),
]
operations = [
migrations.RemoveField(
model_name='profilegeolocation',
name='profileAddress',
),
migrations.AddField(
model_name='profile',
name='city',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='country',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='postal_code',
field=models.CharField(default=1, max_length=20),
preserve_default=False,
),
migrations.AddField(
model_name='profilegeolocation',
name='profile',
field=models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, to='profiles.Profile'),
preserve_default=False,
),
migrations.DeleteModel(
name='ProfileAddress',
),
]
| 0 | 1,190 | 23 |
1e89d3726c6c30924f01c404b413fb86e1e9b799 | 1,370 | py | Python | chapter7/stylegan2_pytorch/mapping_network.py | tms-byte/gan_sample | 1ff723cf37af902b400dbb68777a52e6e3dfcc89 | [
"MIT"
] | 57 | 2021-02-11T12:25:30.000Z | 2022-03-16T11:47:21.000Z | chapter7/stylegan2_pytorch/mapping_network.py | tms-byte/gan_sample | 1ff723cf37af902b400dbb68777a52e6e3dfcc89 | [
"MIT"
] | 8 | 2021-02-22T01:38:36.000Z | 2021-06-29T15:55:04.000Z | chapter7/stylegan2_pytorch/mapping_network.py | tms-byte/gan_sample | 1ff723cf37af902b400dbb68777a52e6e3dfcc89 | [
"MIT"
] | 11 | 2021-02-11T14:49:08.000Z | 2022-01-26T04:18:11.000Z | import numpy as np
import torch.nn as nn
import torch
from dense_layer import DenseLayer
from fused_bias_activation import FusedBiasActivation
from base_layer import BaseLayer
from tensorboard_logger import TensorboardLogger
| 35.128205 | 112 | 0.669343 | import numpy as np
import torch.nn as nn
import torch
from dense_layer import DenseLayer
from fused_bias_activation import FusedBiasActivation
from base_layer import BaseLayer
from tensorboard_logger import TensorboardLogger
class MappingNetwork(BaseLayer):
def __init__(self, dlaten_size, opt):
super(MappingNetwork, self).__init__()
self.mapping_layers = 8
self.out_feature = 512
resolution_log2 = int(np.log2(opt.resolution))
self.num_layers = resolution_log2 * 2 - 2
self.dense_layers = nn.ModuleDict()
self.fused_bias_acts = nn.ModuleDict()
for layer_idx in range(self.mapping_layers):
self.dense_layers[str(layer_idx)] = DenseLayer(dlaten_size, self.out_feature, lmul=0.01)
self.fused_bias_acts[str(layer_idx)] = FusedBiasActivation(dlaten_size, lrmul=0.01, act='LeakyRelu')
def forward(self, z):
x = self.normalize(z)
for layer_idx in range(self.mapping_layers):
x = self.dense_layers[str(layer_idx)](x)
x = self.fused_bias_acts[str(layer_idx)](x)
x = x.unsqueeze(1)
x = x.repeat([1, self.num_layers, 1])
return x
def normalize(self, x):
x_var = torch.mean(x**2, dim=1, keepdim=True)
x_rstd = torch.rsqrt(x_var + 1e-8)
normalized = x * x_rstd
return normalized
| 1,030 | 11 | 103 |
9ffd4b2fe2dd2b154da114d62a59314623da1cc0 | 1,063 | py | Python | src/genie/libs/parser/iosxr/tests/ShowMplsLdpDiscovery/cli/equal/golden_output_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxr/tests/ShowMplsLdpDiscovery/cli/equal/golden_output_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxr/tests/ShowMplsLdpDiscovery/cli/equal/golden_output_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z |
expected_output = {
'vrf': {
'default': {
'local_ldp_identifier': {
'10.52.31.247:0': {
'discovery_sources': {
'interfaces': {
'TenGigE0/0/0/5.2097': {
'ldp_id': {
'10.52.26.121:0': {
'established_date': 'Dec 18 16:49:16.538',
'established_elapsed': '3d00h',
'holdtime_sec': 15,
'proposed_local': 15,
'proposed_peer': 15
}
},
'recv': True,
'transport_ip_addr': '10.52.26.121',
'xmit': True
}
}
}
}
}
}
}
}
| 35.433333 | 82 | 0.238946 |
expected_output = {
'vrf': {
'default': {
'local_ldp_identifier': {
'10.52.31.247:0': {
'discovery_sources': {
'interfaces': {
'TenGigE0/0/0/5.2097': {
'ldp_id': {
'10.52.26.121:0': {
'established_date': 'Dec 18 16:49:16.538',
'established_elapsed': '3d00h',
'holdtime_sec': 15,
'proposed_local': 15,
'proposed_peer': 15
}
},
'recv': True,
'transport_ip_addr': '10.52.26.121',
'xmit': True
}
}
}
}
}
}
}
}
| 0 | 0 | 0 |
793d35dc4008ac42c5b5a7d8858e3efef0efcc65 | 2,887 | py | Python | cad_tickers/sedar/tsx.py | FriendlyUser/cad_tickers | 2f99a4494498419d8decf41fb0fbc77722dbc712 | [
"MIT"
] | 2 | 2022-03-16T02:19:25.000Z | 2022-03-16T02:22:39.000Z | cad_tickers/sedar/tsx.py | FriendlyUser/cad_tickers | 2f99a4494498419d8decf41fb0fbc77722dbc712 | [
"MIT"
] | 57 | 2020-07-30T15:43:43.000Z | 2022-03-28T02:04:13.000Z | cad_tickers/sedar/tsx.py | FriendlyUser/cad_tickers | 2f99a4494498419d8decf41fb0fbc77722dbc712 | [
"MIT"
] | null | null | null | import requests
import json
from datetime import datetime
from cad_tickers.exchanges.tsx.gql_data import GQL
from typing import Union
def get_ticker_filings(
symbol: str,
fromDate: str = datetime.today().replace(day=1).strftime("%Y-%m-%d"),
toDate: str = datetime.today().strftime("%Y-%m-%d"),
limit: int = 100,
) -> Union[dict, None]:
"""
Parameters:
symbol - ticker symbol from tsx, no prefix
fromDate - start date to grab documents
toDate - end date to grab documents
limit - max number of documents to retrieve
Returns:
dict - :ref:`Quote By Symbol <quote_by_symbol_query>`
"""
payload = GQL.get_company_filings_payload
payload["variables"]["symbol"] = symbol
payload["variables"]["fromDate"] = fromDate
payload["variables"]["toDate"] = toDate
payload["variables"]["limit"] = limit
url = "https://app-money.tmx.com/graphql"
r = requests.post(
url,
data=json.dumps(payload),
headers={
"authority": "app-money.tmx.com",
"referer": f"https://money.tmx.com/en/quote/{symbol.upper()}",
"locale": "en",
"Content-Type": "application/json"
},
)
try:
if r.status_code == 403:
print(r.text)
return {}
else:
allData = r.json()
print(allData)
data = allData["data"]
return data
except KeyError as _e:
print(_e, symbol)
pass
# TODO rename this later
def get_news_and_events(
symbol: str,
page: int = 1,
limit: int = 100,
locale: str = "en",
) -> Union[dict, None]:
"""
Parameters:
symbol - ticker symbol from tsx, no prefix
page - start date to grab documents
limit - max number of documents to retrieve
locale - language
Returns:
dict - :ref:`Quote By Symbol <quote_by_symbol_query>`
"""
payload = GQL.get_company_news_events_payload
payload["variables"]["symbol"] = symbol
payload["variables"]["page"] = page
payload["variables"]["limit"] = limit
payload["variables"]["locale"] = locale
url = "https://app-money.tmx.com/graphql"
r = requests.post(
url,
data=json.dumps(payload),
headers={
"authority": "app-money.tmx.com",
"referer": f"https://money.tmx.com/en/quote/{symbol.upper()}",
"locale": "en",
"Content-Type": "application/json"
},
)
try:
# check headings
if r.status_code == 403:
print(r.text)
return {}
else:
allData = r.json()
data = allData["data"]
return data
except KeyError as _e:
return {}
if __name__ == "__main__":
art = get_news_and_events(
"PKK.CN", 1, 108
)
print(art)
| 28.584158 | 74 | 0.5646 | import requests
import json
from datetime import datetime
from cad_tickers.exchanges.tsx.gql_data import GQL
from typing import Union
def get_ticker_filings(
symbol: str,
fromDate: str = datetime.today().replace(day=1).strftime("%Y-%m-%d"),
toDate: str = datetime.today().strftime("%Y-%m-%d"),
limit: int = 100,
) -> Union[dict, None]:
"""
Parameters:
symbol - ticker symbol from tsx, no prefix
fromDate - start date to grab documents
toDate - end date to grab documents
limit - max number of documents to retrieve
Returns:
dict - :ref:`Quote By Symbol <quote_by_symbol_query>`
"""
payload = GQL.get_company_filings_payload
payload["variables"]["symbol"] = symbol
payload["variables"]["fromDate"] = fromDate
payload["variables"]["toDate"] = toDate
payload["variables"]["limit"] = limit
url = "https://app-money.tmx.com/graphql"
r = requests.post(
url,
data=json.dumps(payload),
headers={
"authority": "app-money.tmx.com",
"referer": f"https://money.tmx.com/en/quote/{symbol.upper()}",
"locale": "en",
"Content-Type": "application/json"
},
)
try:
if r.status_code == 403:
print(r.text)
return {}
else:
allData = r.json()
print(allData)
data = allData["data"]
return data
except KeyError as _e:
print(_e, symbol)
pass
# TODO rename this later
def get_news_and_events(
symbol: str,
page: int = 1,
limit: int = 100,
locale: str = "en",
) -> Union[dict, None]:
"""
Parameters:
symbol - ticker symbol from tsx, no prefix
page - start date to grab documents
limit - max number of documents to retrieve
locale - language
Returns:
dict - :ref:`Quote By Symbol <quote_by_symbol_query>`
"""
payload = GQL.get_company_news_events_payload
payload["variables"]["symbol"] = symbol
payload["variables"]["page"] = page
payload["variables"]["limit"] = limit
payload["variables"]["locale"] = locale
url = "https://app-money.tmx.com/graphql"
r = requests.post(
url,
data=json.dumps(payload),
headers={
"authority": "app-money.tmx.com",
"referer": f"https://money.tmx.com/en/quote/{symbol.upper()}",
"locale": "en",
"Content-Type": "application/json"
},
)
try:
# check headings
if r.status_code == 403:
print(r.text)
return {}
else:
allData = r.json()
data = allData["data"]
return data
except KeyError as _e:
return {}
if __name__ == "__main__":
art = get_news_and_events(
"PKK.CN", 1, 108
)
print(art)
| 0 | 0 | 0 |
6344581f2661ecf2f2d823aec09b3d2b924df53c | 365 | py | Python | ampel/contrib/gamma/channels.py | RuslanKonno/Ampel-contrib-gamma | c552823f754554d784db157eea8ffd612ea2d0df | [
"BSD-3-Clause"
] | null | null | null | ampel/contrib/gamma/channels.py | RuslanKonno/Ampel-contrib-gamma | c552823f754554d784db157eea8ffd612ea2d0df | [
"BSD-3-Clause"
] | null | null | null | ampel/contrib/gamma/channels.py | RuslanKonno/Ampel-contrib-gamma | c552823f754554d784db157eea8ffd612ea2d0df | [
"BSD-3-Clause"
] | null | null | null | from os.path import dirname, join
import json
| 24.333333 | 64 | 0.728767 | from os.path import dirname, join
import json
def load_channels():
with open(join(dirname(__file__), "channels.json")) as f:
return json.load(f)
def load_t2_run_configs():
with open(join(dirname(__file__), "t2_run_configs.json")) as f:
return json.load(f)
def load_t3_jobs():
with open(join(dirname(__file__), "t3_jobs.json")) as f:
return json.load(f)
| 250 | 0 | 69 |
c5876f0f0b87f80844430381e47a218150e0e357 | 1,012 | py | Python | mentor_helper/questions/models.py | idisblueflash/mentor-helper | 93265a654a0752a21cf87f5569baae02ed03d31e | [
"MIT"
] | null | null | null | mentor_helper/questions/models.py | idisblueflash/mentor-helper | 93265a654a0752a21cf87f5569baae02ed03d31e | [
"MIT"
] | null | null | null | mentor_helper/questions/models.py | idisblueflash/mentor-helper | 93265a654a0752a21cf87f5569baae02ed03d31e | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
| 28.914286 | 70 | 0.725296 | from django.db import models
# Create your models here.
class SkillPoint(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class Category(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class Question(models.Model):
question_summary = models.CharField(max_length=200)
question_detail = models.TextField()
question_url = models.URLField(blank=True)
question_cate = models.ForeignKey(Category, blank=True, default=1)
def __str__(self):
return self.question_summary
class Answer(models.Model):
question = models.ForeignKey(Question)
answer_detail = models.TextField()
answer_solved = models.BooleanField(default=False)
is_mentor = models.BooleanField(default=True)
tags = models.TextField(blank=True, default='deep learning')
skill_points = models.ManyToManyField(SkillPoint)
def __str__(self):
return self.answer_detail[:200]
| 115 | 749 | 91 |
783b2e782983edf1245746b1a07c08b6bc41a100 | 5,415 | py | Python | rogue/challenge/challenge.py | cypher-me/HAS-Qualifier-Challenges | bb795303716155dad4a930880a58fecb5d9b50c5 | [
"MIT"
] | 75 | 2020-07-20T20:54:00.000Z | 2022-03-09T09:18:37.000Z | rogue/challenge/challenge.py | cypher-me/HAS-Qualifier-Challenges | bb795303716155dad4a930880a58fecb5d9b50c5 | [
"MIT"
] | 3 | 2020-09-13T00:46:49.000Z | 2021-07-06T16:18:22.000Z | rogue/challenge/challenge.py | cypher-me/HAS-Qualifier-Challenges | bb795303716155dad4a930880a58fecb5d9b50c5 | [
"MIT"
] | 14 | 2020-07-22T16:34:51.000Z | 2021-09-13T12:19:59.000Z | import os, sys
import numpy as np
import time
import math
from scipy.spatial.transform import Rotation as R
from skyfield.api import load,Topos
from skyfield.earthlib import terra, reverse_terra
from timeout import timeout, TimeoutError
timeout_time = int(os.getenv("TIMEOUT",60))
speed_of_light_km_ns = 0.000299792
au_to_km = 149598000
geo_orbit_km = 42164
sealevel_km = 6371
num_sats = int(os.getenv("NUM_SATS", 8))
minA = np.deg2rad(15)
maxA = np.deg2rad(35)
minDist = 5
maxDist = 15
def haversine_np(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
Reference:
https://stackoverflow.com/a/29546836/7657658
https://gist.github.com/mazzma12/6dbcc71ab3b579c08d66a968ff509901
"""
lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat / 2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0)**2
c = 2 * np.arcsin(np.sqrt(a))
km = 6371 * c
return km
# Only rotate around the Z axis, changes Lon only
@timeout(timeout_time)
if __name__ == "__main__":
ts = load.timescale(builtin=True)
t = ts.utc(0)
SEED = int(os.getenv("SEED", 0)) & 0xFFFFFFFF
np.random.seed(SEED)
sys.stderr.write("SEED: {}\n".format(SEED))
Gll, Vg = GroundStation(t)
_, Vt = Transmitter(Gll, Vg, t)
_, Vs = Satellites(Gll, Vt)
# Time to randomize!!
np.random.seed( ( SEED + int(time.time()) ) & 0xFFFFFFFF )
r = randomRotation()
Gll = rotateCoords(r, Vg)
Tll = rotateCoords(r, Vt)
Slls = map(lambda V: rotateCoords(r, V), Vs)
sys.stderr.write("Rogue @ {}\n".format(Tll))
# Print out the details
sys.stdout.write("Ground Antenna (lat,lon):\n")
sys.stdout.write("\t{}, {}\n".format(Gll[0], Gll[1]))
sys.stdout.write("Satellites (#,lat,lon):\n")
ii = 1
for (lat,lon) in Slls:
sys.stdout.write("{},\t{},\t{}\n".format(ii,lat,lon))
ii += 1
sys.stdout.flush()
try:
doChallenge(Tll)
except TimeoutError:
sys.stdout.write("Timeout, Bye\n")
sys.stdout.flush()
| 27.211055 | 83 | 0.570452 | import os, sys
import numpy as np
import time
import math
from scipy.spatial.transform import Rotation as R
from skyfield.api import load,Topos
from skyfield.earthlib import terra, reverse_terra
from timeout import timeout, TimeoutError
timeout_time = int(os.getenv("TIMEOUT",60))
speed_of_light_km_ns = 0.000299792
au_to_km = 149598000
geo_orbit_km = 42164
sealevel_km = 6371
num_sats = int(os.getenv("NUM_SATS", 8))
minA = np.deg2rad(15)
maxA = np.deg2rad(35)
minDist = 5
maxDist = 15
def get_cart_xyz(lat, lon, alt = sealevel_km):
lat,lon = list(map(np.deg2rad, [lat,lon]))
return np.array([ alt * np.cos(lat) * np.cos(lon),
alt * np.cos(lat) * np.sin(lon),
alt * np.sin(lat) ])
def get_lat_lon(V):
R = np.linalg.norm(V)
lat = np.arcsin(V[2]/R)
lon = np.arctan2(V[1], V[0])
return np.rad2deg(lat),np.rad2deg(lon)
def haversine_np(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
Reference:
https://stackoverflow.com/a/29546836/7657658
https://gist.github.com/mazzma12/6dbcc71ab3b579c08d66a968ff509901
"""
lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat / 2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0)**2
c = 2 * np.arcsin(np.sqrt(a))
km = 6371 * c
return km
def F(Vi, Vj):
return (Vi[0]**2 - Vj[0]**2) + (Vi[1]**2 - Vj[1]**2) + (Vi[2]**2 - Vj[2]**2)
def GroundStation(t):
Gll = [
50 * np.random.rand() - 25.,
300 * np.random.rand() - 150
]
Vg = get_cart_xyz(Gll[0], Gll[1])
return Gll, Vg
def Transmitter(Gll, Vg, t):
t_lat = 0
t_lon = 0
while True:
t_lat = Gll[0] + 2 * np.random.rand() - 1
t_lon = Gll[1] + 2 * np.random.rand() - 1
dist = haversine_np(Gll[1], Gll[0], t_lon, t_lat)
if dist > minDist and dist < maxDist:
break
Vt = get_cart_xyz(t_lat, t_lon)
#Vt = Vt * (np.linalg.norm(Vg)/np.linalg.norm(Vt))
#t_lat, t_lon, elevation = reverse_terra(Vt, t.gast, iterations=1000)
#t_lat, t_lon = map(np.rad2deg, [t_lat,t_lon])
return (t_lat, t_lon), Vt
def Satellites(Gll, Vg):
Slls = []
Vs = []
Vg_norm = Vg / np.linalg.norm(Vg)
while len(Vs) < num_sats:
lat = Gll[0] + 60 * np.random.rand() - 10
lon = Gll[1] + 60 * np.random.rand() - 10
V = get_cart_xyz(lat, lon, alt=geo_orbit_km)
V_norm = V / np.linalg.norm(V)
diff = np.arccos( V_norm.dot(Vg_norm) )
if diff < minA or diff > maxA:
continue
# Compare sats to ensure good HDOP
#'''
hdopGood = True
for Vi in Vs:
Vi = Vi/np.linalg.norm(Vi)
diff = np.arccos( V_norm.dot( Vi ))
if diff < np.deg2rad(1):
hdopGood = False
break
if not hdopGood:
continue
#'''
Vs.append(V)
Slls.append((lat,lon))
return Slls, Vs
# Only rotate around the Z axis, changes Lon only
def randomRotation():
angle = np.random.rand() * (2 * np.pi)
return R.from_rotvec(angle * np.array([0,0,1]))
def rotateCoords(r, V):
V = r.apply(V)
LL = get_lat_lon(V)
return LL
@timeout(timeout_time)
def doChallenge(Tll):
print("Where am I? Lat, Lon in +/-degrees)?")
flag = os.getenv("FLAG", "FLAG{Placeholder}")
sys.stdout.flush()
line = sys.stdin.readline().strip()
try:
lat, lon = list(map(float, line.split(",")))
except ValueError:
print("Format must be two floats, +/-, Lat, Lon")
sys.stdout.flush()
sys.exit(-1)
if math.isnan(lat) or math.isnan(lon) or math.isinf(lat) or math.isinf(lon):
print("Those aren't real coordinates...")
sys.stdout.flush()
sys.exit(0)
diff = 1000 * haversine_np(lon, lat, Tll[1], Tll[0])
sys.stderr.write("Guess Error: %d\n" % diff)
if 250 > diff:
print("You found it!")
print(flag)
sys.stderr.write("Awarded Flag: " + flag)
else :
print("Nothing here...");
print("Try looking harder?")
sys.stdout.flush()
sys.stderr.flush()
if __name__ == "__main__":
ts = load.timescale(builtin=True)
t = ts.utc(0)
SEED = int(os.getenv("SEED", 0)) & 0xFFFFFFFF
np.random.seed(SEED)
sys.stderr.write("SEED: {}\n".format(SEED))
Gll, Vg = GroundStation(t)
_, Vt = Transmitter(Gll, Vg, t)
_, Vs = Satellites(Gll, Vt)
# Time to randomize!!
np.random.seed( ( SEED + int(time.time()) ) & 0xFFFFFFFF )
r = randomRotation()
Gll = rotateCoords(r, Vg)
Tll = rotateCoords(r, Vt)
Slls = map(lambda V: rotateCoords(r, V), Vs)
sys.stderr.write("Rogue @ {}\n".format(Tll))
# Print out the details
sys.stdout.write("Ground Antenna (lat,lon):\n")
sys.stdout.write("\t{}, {}\n".format(Gll[0], Gll[1]))
sys.stdout.write("Satellites (#,lat,lon):\n")
ii = 1
for (lat,lon) in Slls:
sys.stdout.write("{},\t{},\t{}\n".format(ii,lat,lon))
ii += 1
sys.stdout.flush()
try:
doChallenge(Tll)
except TimeoutError:
sys.stdout.write("Timeout, Bye\n")
sys.stdout.flush()
| 3,001 | 0 | 205 |
e304abf6a01e3079ded5e6bdfcc34f40a13a42b7 | 403 | py | Python | config.py | uk-gov-mirror/LandRegistry.search-api-alpha | da9760e6563299518ba4fccb3c958e64a4f1c763 | [
"MIT"
] | null | null | null | config.py | uk-gov-mirror/LandRegistry.search-api-alpha | da9760e6563299518ba4fccb3c958e64a4f1c763 | [
"MIT"
] | 1 | 2021-06-01T22:00:40.000Z | 2021-06-01T22:00:40.000Z | config.py | uk-gov-mirror/LandRegistry.search-api-alpha | da9760e6563299518ba4fccb3c958e64a4f1c763 | [
"MIT"
] | 1 | 2021-04-11T05:22:35.000Z | 2021-04-11T05:22:35.000Z | import os
| 26.866667 | 65 | 0.761787 | import os
class Config(object):
DEBUG = False
ELASTICSEARCH_HOST = os.environ['ELASTICSEARCH_HOST']
ELASTICSEARCH_PORT = os.environ['ELASTICSEARCH_PORT']
ELASTICSEARCH_USESSL = os.environ['ELASTICSEARCH_USESSL']
ELASTICSEARCH_USERPASS = os.environ['ELASTICSEARCH_USERPASS']
class DevelopmentConfig(Config):
DEBUG = True
class TestConfig(DevelopmentConfig):
TESTING = True
| 0 | 324 | 69 |
38fb95037a513aa50d7261f6b8818c317e7f9a8d | 7,217 | py | Python | j5/backends/hardware/sr/v4/power_board.py | udhayacommits/j5 | 2b516af67ccf5aa31e20489d479c48075e737f4d | [
"MIT"
] | null | null | null | j5/backends/hardware/sr/v4/power_board.py | udhayacommits/j5 | 2b516af67ccf5aa31e20489d479c48075e737f4d | [
"MIT"
] | null | null | null | j5/backends/hardware/sr/v4/power_board.py | udhayacommits/j5 | 2b516af67ccf5aa31e20489d479c48075e737f4d | [
"MIT"
] | null | null | null | """Hardware Backend for the SR V4 power board."""
import struct
from datetime import timedelta
from time import sleep
from typing import Callable, Dict, Mapping, Set, cast
import usb
from j5.backends.hardware.env import NotSupportedByHardwareError
from j5.backends.hardware.j5.raw_usb import (
RawUSBHardwareBackend,
ReadCommand,
WriteCommand,
handle_usb_error,
)
from j5.boards import Board
from j5.boards.sr.v4.power_board import PowerBoard, PowerOutputPosition
from j5.components import (
BatterySensorInterface,
ButtonInterface,
LEDInterface,
PiezoInterface,
PowerOutputInterface,
)
# The names and codes of these commands match the definitions in usb.h in the firmware
# source.
CMD_READ_OUTPUT: Mapping[int, ReadCommand] = {
output.value: ReadCommand(output.value, 4)
for output in PowerOutputPosition
}
CMD_READ_5VRAIL = ReadCommand(6, 4)
CMD_READ_BATTERY = ReadCommand(7, 8)
CMD_READ_BUTTON = ReadCommand(8, 4)
CMD_READ_FWVER = ReadCommand(9, 4)
CMD_WRITE_OUTPUT: Mapping[int, WriteCommand] = {
output.value: WriteCommand(output.value)
for output in PowerOutputPosition
}
CMD_WRITE_RUNLED = WriteCommand(6)
CMD_WRITE_ERRORLED = WriteCommand(7)
CMD_WRITE_PIEZO = WriteCommand(8)
class SRV4PowerBoardHardwareBackend(
PowerOutputInterface,
PiezoInterface,
ButtonInterface,
BatterySensorInterface,
LEDInterface,
RawUSBHardwareBackend,
):
"""The hardware implementation of the SR V4 power board."""
board = PowerBoard
@classmethod
@handle_usb_error
def discover(cls, find: Callable = usb.core.find) -> Set[Board]:
"""Discover boards that this backend can control."""
boards: Set[Board] = set()
device_list = find(idVendor=0x1bda, idProduct=0x0010, find_all=True)
for device in device_list:
backend = cls(device)
board = PowerBoard(backend.serial, backend)
boards.add(cast(Board, board))
return boards
@handle_usb_error
def check_firmware_version_supported(self) -> None:
"""Raises an exception if the firmware version is not supported."""
v = self.firmware_version
if v != "3":
raise NotImplementedError(f"this power board is running firmware "
f"version {v}, but only version 3 is supported")
@property
def firmware_version(self) -> str:
"""The firmware version reported by the board."""
version, = struct.unpack("<I", self._read(CMD_READ_FWVER))
return str(cast(int, version))
def get_power_output_enabled(self, identifier: int) -> bool:
"""Get whether a power output is enabled."""
try:
return self._output_states[identifier]
except KeyError:
raise ValueError(f"Invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_WRITE_OUTPUT.keys()}") from None
def set_power_output_enabled(
self, identifier: int, enabled: bool,
) -> None:
"""Set whether a power output is enabled."""
try:
cmd = CMD_WRITE_OUTPUT[identifier]
except KeyError:
raise ValueError(f"Invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_WRITE_OUTPUT.keys()}") from None
self._write(cmd, int(enabled))
self._output_states[identifier] = enabled
def get_power_output_current(self, identifier: int) -> float:
"""Get the current being drawn on a power output, in amperes."""
try:
cmd = CMD_READ_OUTPUT[identifier]
except KeyError:
raise ValueError(f"invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_READ_OUTPUT.keys()}") from None
current, = struct.unpack("<I", self._read(cmd))
return cast(int, current) / 1000 # convert milliamps to amps
def buzz(self, identifier: int,
duration: timedelta, frequency: float) -> None:
"""Queue a pitch to be played."""
if identifier != 0:
raise ValueError(f"invalid piezo identifier {identifier!r}; "
f"the only valid identifier is 0")
duration_ms = round(duration / timedelta(milliseconds=1))
if duration_ms > 65535:
raise NotSupportedByHardwareError("Maximum piezo duration is 65535ms.")
frequency_int = int(round(frequency))
if frequency_int > 65535:
raise NotSupportedByHardwareError("Maximum piezo frequency is 65535Hz.")
data = struct.pack("<HH", frequency_int, duration_ms)
self._write(CMD_WRITE_PIEZO, data)
def get_button_state(self, identifier: int) -> bool:
"""Get the state of a button."""
if identifier != 0:
raise ValueError(f"invalid button identifier {identifier!r}; "
f"the only valid identifier is 0")
state, = struct.unpack("<I", self._read(CMD_READ_BUTTON))
return cast(int, state) != 0
def wait_until_button_pressed(self, identifier: int) -> None:
"""Halt the program until this button is pushed."""
while not self.get_button_state(identifier):
sleep(0.05)
def get_battery_sensor_voltage(self, identifier: int) -> float:
"""Get the voltage of a battery sensor."""
if identifier != 0:
raise ValueError(f"invalid battery sensor identifier {identifier!r}; "
f"the only valid identifier is 0")
current, voltage = struct.unpack("<II", self._read(CMD_READ_BATTERY))
return cast(int, voltage) / 1000 # convert millivolts to volts
def get_battery_sensor_current(self, identifier: int) -> float:
"""Get the current of a battery sensor."""
if identifier != 0:
raise ValueError(f"invalid battery sensor identifier {identifier!r}; "
f"the only valid identifier is 0")
current, voltage = struct.unpack("<II", self._read(CMD_READ_BATTERY))
return cast(int, current) / 1000 # convert milliamps to amps
def get_led_state(self, identifier: int) -> bool:
"""Get the state of an LED."""
return self._led_states[identifier]
def set_led_state(self, identifier: int, state: bool) -> None:
"""Set the state of an LED."""
cmds = {0: CMD_WRITE_RUNLED, 1: CMD_WRITE_ERRORLED}
try:
cmd = cmds[identifier]
except KeyError:
raise ValueError(f"invalid LED identifier {identifier!r}; valid identifiers "
f"are 0 (run LED) and 1 (error LED)") from None
self._write(cmd, int(state))
self._led_states[identifier] = state
| 38.185185 | 90 | 0.640987 | """Hardware Backend for the SR V4 power board."""
import struct
from datetime import timedelta
from time import sleep
from typing import Callable, Dict, Mapping, Set, cast
import usb
from j5.backends.hardware.env import NotSupportedByHardwareError
from j5.backends.hardware.j5.raw_usb import (
RawUSBHardwareBackend,
ReadCommand,
WriteCommand,
handle_usb_error,
)
from j5.boards import Board
from j5.boards.sr.v4.power_board import PowerBoard, PowerOutputPosition
from j5.components import (
BatterySensorInterface,
ButtonInterface,
LEDInterface,
PiezoInterface,
PowerOutputInterface,
)
# The names and codes of these commands match the definitions in usb.h in the firmware
# source.
CMD_READ_OUTPUT: Mapping[int, ReadCommand] = {
output.value: ReadCommand(output.value, 4)
for output in PowerOutputPosition
}
CMD_READ_5VRAIL = ReadCommand(6, 4)
CMD_READ_BATTERY = ReadCommand(7, 8)
CMD_READ_BUTTON = ReadCommand(8, 4)
CMD_READ_FWVER = ReadCommand(9, 4)
CMD_WRITE_OUTPUT: Mapping[int, WriteCommand] = {
output.value: WriteCommand(output.value)
for output in PowerOutputPosition
}
CMD_WRITE_RUNLED = WriteCommand(6)
CMD_WRITE_ERRORLED = WriteCommand(7)
CMD_WRITE_PIEZO = WriteCommand(8)
class SRV4PowerBoardHardwareBackend(
PowerOutputInterface,
PiezoInterface,
ButtonInterface,
BatterySensorInterface,
LEDInterface,
RawUSBHardwareBackend,
):
"""The hardware implementation of the SR V4 power board."""
board = PowerBoard
@classmethod
@handle_usb_error
def discover(cls, find: Callable = usb.core.find) -> Set[Board]:
"""Discover boards that this backend can control."""
boards: Set[Board] = set()
device_list = find(idVendor=0x1bda, idProduct=0x0010, find_all=True)
for device in device_list:
backend = cls(device)
board = PowerBoard(backend.serial, backend)
boards.add(cast(Board, board))
return boards
@handle_usb_error
def __init__(self, usb_device: usb.core.Device) -> None:
self._usb_device = usb_device
self._output_states: Dict[int, bool] = {
output.value: False
for output in PowerOutputPosition
}
self._led_states: Dict[int, bool] = {
i: False
for i in range(2)
}
self.check_firmware_version_supported()
def check_firmware_version_supported(self) -> None:
"""Raises an exception if the firmware version is not supported."""
v = self.firmware_version
if v != "3":
raise NotImplementedError(f"this power board is running firmware "
f"version {v}, but only version 3 is supported")
@property
def firmware_version(self) -> str:
"""The firmware version reported by the board."""
version, = struct.unpack("<I", self._read(CMD_READ_FWVER))
return str(cast(int, version))
def get_power_output_enabled(self, identifier: int) -> bool:
"""Get whether a power output is enabled."""
try:
return self._output_states[identifier]
except KeyError:
raise ValueError(f"Invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_WRITE_OUTPUT.keys()}") from None
def set_power_output_enabled(
self, identifier: int, enabled: bool,
) -> None:
"""Set whether a power output is enabled."""
try:
cmd = CMD_WRITE_OUTPUT[identifier]
except KeyError:
raise ValueError(f"Invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_WRITE_OUTPUT.keys()}") from None
self._write(cmd, int(enabled))
self._output_states[identifier] = enabled
def get_power_output_current(self, identifier: int) -> float:
"""Get the current being drawn on a power output, in amperes."""
try:
cmd = CMD_READ_OUTPUT[identifier]
except KeyError:
raise ValueError(f"invalid power output identifier {identifier!r}; "
f"valid identifiers are {CMD_READ_OUTPUT.keys()}") from None
current, = struct.unpack("<I", self._read(cmd))
return cast(int, current) / 1000 # convert milliamps to amps
def buzz(self, identifier: int,
duration: timedelta, frequency: float) -> None:
"""Queue a pitch to be played."""
if identifier != 0:
raise ValueError(f"invalid piezo identifier {identifier!r}; "
f"the only valid identifier is 0")
duration_ms = round(duration / timedelta(milliseconds=1))
if duration_ms > 65535:
raise NotSupportedByHardwareError("Maximum piezo duration is 65535ms.")
frequency_int = int(round(frequency))
if frequency_int > 65535:
raise NotSupportedByHardwareError("Maximum piezo frequency is 65535Hz.")
data = struct.pack("<HH", frequency_int, duration_ms)
self._write(CMD_WRITE_PIEZO, data)
def get_button_state(self, identifier: int) -> bool:
"""Get the state of a button."""
if identifier != 0:
raise ValueError(f"invalid button identifier {identifier!r}; "
f"the only valid identifier is 0")
state, = struct.unpack("<I", self._read(CMD_READ_BUTTON))
return cast(int, state) != 0
def wait_until_button_pressed(self, identifier: int) -> None:
"""Halt the program until this button is pushed."""
while not self.get_button_state(identifier):
sleep(0.05)
def get_battery_sensor_voltage(self, identifier: int) -> float:
"""Get the voltage of a battery sensor."""
if identifier != 0:
raise ValueError(f"invalid battery sensor identifier {identifier!r}; "
f"the only valid identifier is 0")
current, voltage = struct.unpack("<II", self._read(CMD_READ_BATTERY))
return cast(int, voltage) / 1000 # convert millivolts to volts
def get_battery_sensor_current(self, identifier: int) -> float:
"""Get the current of a battery sensor."""
if identifier != 0:
raise ValueError(f"invalid battery sensor identifier {identifier!r}; "
f"the only valid identifier is 0")
current, voltage = struct.unpack("<II", self._read(CMD_READ_BATTERY))
return cast(int, current) / 1000 # convert milliamps to amps
def get_led_state(self, identifier: int) -> bool:
"""Get the state of an LED."""
return self._led_states[identifier]
def set_led_state(self, identifier: int, state: bool) -> None:
"""Set the state of an LED."""
cmds = {0: CMD_WRITE_RUNLED, 1: CMD_WRITE_ERRORLED}
try:
cmd = cmds[identifier]
except KeyError:
raise ValueError(f"invalid LED identifier {identifier!r}; valid identifiers "
f"are 0 (run LED) and 1 (error LED)") from None
self._write(cmd, int(state))
self._led_states[identifier] = state
| 366 | 0 | 26 |
bf27a951fe891c22d59e5ab0f3da52fcd0d0ceca | 2,163 | py | Python | intreehooks.py | hroncok/intreehooks | 675b0a9039abe61839c267edcc440ee13331aad3 | [
"MIT"
] | 5 | 2018-08-30T19:04:25.000Z | 2020-05-01T18:51:37.000Z | intreehooks.py | hroncok/intreehooks | 675b0a9039abe61839c267edcc440ee13331aad3 | [
"MIT"
] | 2 | 2019-12-14T11:33:51.000Z | 2020-06-04T15:40:48.000Z | intreehooks.py | hroncok/intreehooks | 675b0a9039abe61839c267edcc440ee13331aad3 | [
"MIT"
] | 1 | 2018-08-30T19:04:29.000Z | 2018-08-30T19:04:29.000Z | """Load a PEP 517 backend from inside the source tree.
"""
from contextlib import contextmanager
import importlib
import os
import pytoml
import sys
__version__ = '1.0'
@contextmanager
loader = HooksLoader(os.path.realpath(os.getcwd()))
| 32.283582 | 75 | 0.678225 | """Load a PEP 517 backend from inside the source tree.
"""
from contextlib import contextmanager
import importlib
import os
import pytoml
import sys
__version__ = '1.0'
@contextmanager
def prepended_to_syspath(directory):
sys.path.insert(0, directory)
try:
yield
finally:
sys.path.pop(0)
class HooksLoader(object):
def __init__(self, directory):
self.directory = directory
def _module_from_dir(self, modname):
with prepended_to_syspath(self.directory):
mod = importlib.import_module(modname)
mod_file = os.path.realpath(mod.__file__)
if not mod_file.startswith(self.directory):
raise ImportError('{} not found in working directory', modname)
return mod
@property
def _backend(self):
with open(os.path.join(self.directory, 'pyproject.toml')) as f:
proj = pytoml.load(f)
ref = proj['tool']['intreehooks']['build-backend']
modname, separator, qualname = ref.partition(':')
obj = self._module_from_dir(modname)
if separator:
for attr in qualname.split('.'):
obj = getattr(obj, attr)
return obj
# Hook wrappers -----
def build_wheel(self, wheel_directory, config_settings=None,
metadata_directory=None):
return self._backend.build_wheel(
wheel_directory, config_settings, metadata_directory)
def get_requires_for_build_wheel(self, config_settings=None):
return self._backend.get_requires_for_build_sdist(config_settings)
def prepare_metadata_for_build_wheel(self, metadata_directory,
config_settings=None):
return self._backend.prepare_metadata_for_build_wheel(
metadata_directory, config_settings)
def build_sdist(self, sdist_directory, config_settings=None):
return self._backend.build_sdist(sdist_directory, config_settings)
def get_requires_for_build_sdist(self, config_settings=None):
return self._backend.get_requires_for_build_sdist(config_settings)
loader = HooksLoader(os.path.realpath(os.getcwd()))
| 1,617 | 261 | 45 |
3aa0f9b4ac917875eedf179635675351921cfa88 | 844 | py | Python | examples/nlp/bert_glue_pytorch/constants.py | gh-determined-ai/determined | 9a1ab33a3a356b69681b3351629fef4ab98ddb56 | [
"Apache-2.0"
] | 1,729 | 2020-04-27T17:36:40.000Z | 2022-03-31T05:48:39.000Z | examples/nlp/bert_glue_pytorch/constants.py | ChrisW09/determined | 5c37bfe9cfcc69174ba29a3f1a115c3e9e3632e0 | [
"Apache-2.0"
] | 1,940 | 2020-04-27T17:34:14.000Z | 2022-03-31T23:02:28.000Z | examples/nlp/bert_glue_pytorch/constants.py | ChrisW09/determined | 5c37bfe9cfcc69174ba29a3f1a115c3e9e3632e0 | [
"Apache-2.0"
] | 214 | 2020-04-27T19:57:28.000Z | 2022-03-29T08:17:16.000Z | from transformers import (
BertConfig,
BertForSequenceClassification,
BertTokenizer,
DistilBertConfig,
DistilBertForSequenceClassification,
DistilBertTokenizer,
RobertaConfig,
RobertaForSequenceClassification,
RobertaTokenizer,
XLMConfig,
XLMForSequenceClassification,
XLMTokenizer,
XLNetConfig,
XLNetForSequenceClassification,
XLNetTokenizer,
)
# Lookup for classes
MODEL_CLASSES = {
"bert": (BertConfig, BertForSequenceClassification, BertTokenizer),
"xlnet": (XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer),
"xlm": (XLMConfig, XLMForSequenceClassification, XLMTokenizer),
"roberta": (RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer),
"distilbert": (DistilBertConfig, DistilBertForSequenceClassification, DistilBertTokenizer),
}
| 31.259259 | 95 | 0.774882 | from transformers import (
BertConfig,
BertForSequenceClassification,
BertTokenizer,
DistilBertConfig,
DistilBertForSequenceClassification,
DistilBertTokenizer,
RobertaConfig,
RobertaForSequenceClassification,
RobertaTokenizer,
XLMConfig,
XLMForSequenceClassification,
XLMTokenizer,
XLNetConfig,
XLNetForSequenceClassification,
XLNetTokenizer,
)
# Lookup for classes
MODEL_CLASSES = {
"bert": (BertConfig, BertForSequenceClassification, BertTokenizer),
"xlnet": (XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer),
"xlm": (XLMConfig, XLMForSequenceClassification, XLMTokenizer),
"roberta": (RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer),
"distilbert": (DistilBertConfig, DistilBertForSequenceClassification, DistilBertTokenizer),
}
| 0 | 0 | 0 |
a8e4d20c8ab80f5b7903457929ef9c281fbe49b2 | 95 | py | Python | flashback/caching/__init__.py | PaulRenvoise/flashback | f9a16f4b0cb12a2180206c7b95d9eb8fb256381d | [
"MIT"
] | 3 | 2021-06-08T11:40:59.000Z | 2022-03-31T16:22:56.000Z | flashback/caching/__init__.py | PaulRenvoise/flashback | f9a16f4b0cb12a2180206c7b95d9eb8fb256381d | [
"MIT"
] | 28 | 2020-04-28T22:36:14.000Z | 2021-06-06T20:32:00.000Z | flashback/caching/__init__.py | PaulRenvoise/flashback | f9a16f4b0cb12a2180206c7b95d9eb8fb256381d | [
"MIT"
] | null | null | null | from .cache import Cache
from .cached import cached
__all__ = (
"Cache",
"cached",
)
| 10.555556 | 26 | 0.631579 | from .cache import Cache
from .cached import cached
__all__ = (
"Cache",
"cached",
)
| 0 | 0 | 0 |
aa43158a1a5030a9fbe2caa413e11432a7cc303b | 1,708 | py | Python | c/lic_eliminar.py | yo-alan/personal | 2f711a9f5dd5a16fbb3ab2a6f9b89069894ce40c | [
"MIT"
] | null | null | null | c/lic_eliminar.py | yo-alan/personal | 2f711a9f5dd5a16fbb3ab2a6f9b89069894ce40c | [
"MIT"
] | 10 | 2015-01-12T12:57:09.000Z | 2015-03-30T13:39:23.000Z | c/lic_eliminar.py | yo-alan/personal | 2f711a9f5dd5a16fbb3ab2a6f9b89069894ce40c | [
"MIT"
] | null | null | null | # coding=utf-8
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from c.error import Error
from v.ui_lic_eliminar import Ui_Lic_Eliminar
| 21.897436 | 103 | 0.697307 | # coding=utf-8
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from c.error import Error
from v.ui_lic_eliminar import Ui_Lic_Eliminar
class Lic_Eliminar(QMessageBox):
l = None
error = None
def __init__(self, principal):
QDialog.__init__(self, principal)
self.ui = Ui_Lic_Eliminar()
self.ui.setupUi(self)
self.error = Error(self)
self.setText("<b>¿Estás seguro de querer eliminar esta licencia?</b>".decode('utf-8'))
self.setInformativeText("Esta acción no se puede deshacer.".decode('utf-8'))
self.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
self.button(QMessageBox.Ok).clicked.connect(lambda : self.accept())
self.setButtonText(QMessageBox.Ok, "Eliminar")
self.setButtonText(QMessageBox.Cancel, "Cancelar")
self.setIcon(QMessageBox.Warning)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def mostrar(self, principal):
fila = principal.ui.twLicencias.currentRow()
item = principal.ui.twLicencias.item(fila, 1)
if item is None:
return
desde = item.text()
for l in principal.licencias:
if l.desde == desde:
self.l = l
break
self.setDefaultButton(QMessageBox.Ok)
self.show()
self.center()
def closeEvent(self, event):
pass
def reject(self, ):
self.done(QDialog.Rejected)
def accept(self, ):
try:
self.l.eliminar()
self.done(QDialog.Accepted)
except Exception as ex:
self.error.setText("Ha ocurrido un error mientras intentaba eliminar una licencia.".decode('utf-8'))
self.error.setDetailedText(str(ex).decode('utf-8'))
self.error.mostrar()
| 1,354 | 190 | 23 |
7dd9bcfe92839d70d4b8933849b2c5516e4aa2a5 | 882 | py | Python | source/_static/code/linear_models/tsh_hg.py | tuttugu-ryo/lecture-source-py | 9ce84044c2cc421775ea63a004556d7ae3b4e504 | [
"BSD-3-Clause"
] | null | null | null | source/_static/code/linear_models/tsh_hg.py | tuttugu-ryo/lecture-source-py | 9ce84044c2cc421775ea63a004556d7ae3b4e504 | [
"BSD-3-Clause"
] | null | null | null | source/_static/code/linear_models/tsh_hg.py | tuttugu-ryo/lecture-source-py | 9ce84044c2cc421775ea63a004556d7ae3b4e504 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
from quantecon import LinearStateSpace
phi_1, phi_2, phi_3, phi_4 = 0.5, -0.2, 0, 0.5
sigma = 0.1
A = [[phi_1, phi_2, phi_3, phi_4],
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]]
C = [[sigma], [0], [0], [0]]
G = [1, 0, 0, 0]
T = 30
ar = LinearStateSpace(A, C, G)
ymin, ymax = -0.8, 1.25
fig, ax = plt.subplots(figsize=(8, 4))
ax.set_xlim(ymin, ymax)
ax.set_xlabel(r'$y_t$', fontsize=16)
x, y = ar.replicate(T=T, num_reps=100000)
mu_x, mu_y, Sigma_x, Sigma_y = ar.stationary_distributions()
f_y = norm(loc=float(mu_y), scale=float(np.sqrt(Sigma_y)))
y = y.flatten()
ax.hist(y, bins=50, density=True, alpha=0.4)
ygrid = np.linspace(ymin, ymax, 150)
ax.plot(ygrid, f_y.pdf(ygrid), 'k-', lw=2, alpha=0.8, label='true density')
ax.legend()
plt.show()
| 23.837838 | 75 | 0.608844 | import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
from quantecon import LinearStateSpace
phi_1, phi_2, phi_3, phi_4 = 0.5, -0.2, 0, 0.5
sigma = 0.1
A = [[phi_1, phi_2, phi_3, phi_4],
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]]
C = [[sigma], [0], [0], [0]]
G = [1, 0, 0, 0]
T = 30
ar = LinearStateSpace(A, C, G)
ymin, ymax = -0.8, 1.25
fig, ax = plt.subplots(figsize=(8, 4))
ax.set_xlim(ymin, ymax)
ax.set_xlabel(r'$y_t$', fontsize=16)
x, y = ar.replicate(T=T, num_reps=100000)
mu_x, mu_y, Sigma_x, Sigma_y = ar.stationary_distributions()
f_y = norm(loc=float(mu_y), scale=float(np.sqrt(Sigma_y)))
y = y.flatten()
ax.hist(y, bins=50, density=True, alpha=0.4)
ygrid = np.linspace(ymin, ymax, 150)
ax.plot(ygrid, f_y.pdf(ygrid), 'k-', lw=2, alpha=0.8, label='true density')
ax.legend()
plt.show()
| 0 | 0 | 0 |
a6664b1d0580e21b207addf21d9ef99693e8773a | 2,910 | py | Python | calc_main.py | PaprikaX33/stupidly-simple-calculator | d35085baf7ce78d0fc111e3d8a7e8232c270998b | [
"MIT"
] | null | null | null | calc_main.py | PaprikaX33/stupidly-simple-calculator | d35085baf7ce78d0fc111e3d8a7e8232c270998b | [
"MIT"
] | 7 | 2019-10-09T04:13:54.000Z | 2019-10-14T03:10:11.000Z | calc_main.py | PaprikaX33/stupidly-simple-calculator | d35085baf7ce78d0fc111e3d8a7e8232c270998b | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
import math
import parser as pr
while True:
print("\n CALCULATOR MENU")
print("1 for addition :")
print("2 for subtraction :")
print('3 for multiplication :')
print("4 for raise to power:")
print("5 for Division:")
print("6 for floor division:")
print("7 for factorial:")
print("8 for Statement based:")
choice=int(input("enter any choice:"))
if choice==1:
additon()
elif choice==2:
subtract()
elif choice==3:
multiplication()
elif choice==4:
power()
elif choice==5:
divide()
elif choice==6:
floor_division()
elif choice==7:
factorial()
elif choice==8:
statement_wrapper()
else:
print("wrong input")
exit(0)
| 25.752212 | 82 | 0.519244 | #!/usr/bin/env python2
import math
import parser as pr
def add(a, b):
return a + b
def red(a, b):
return a - b
def mul(a, b):
return a * b
def div(a, b):
return a / b
def power(a, b):
return a ** b
def statement_wrapper():
doper = {
'+' : add
,'-' : red
,'*' : mul
,'/' : div
,'^' : power
}
states=raw_input(">>")
result=pr.apply(states, darg=doper)
if(result == None):
print("<< Undefined")
else:
full = "<< ", str(result)
print(full)
return
while True:
print("\n CALCULATOR MENU")
print("1 for addition :")
print("2 for subtraction :")
print('3 for multiplication :')
print("4 for raise to power:")
print("5 for Division:")
print("6 for floor division:")
print("7 for factorial:")
print("8 for Statement based:")
choice=int(input("enter any choice:"))
def additon():
a=int(input("enter 1st no to perform addition:")) #a-first input
b=int(input("enter 2nd no to perform addition:")) #b-second input
c=add(a, b)
print("sum is:",c)
def subtract():
a = int(input("enter 1st no to perform subtraction:"))
b = int(input("enter 2nd no to perform subtraction:"))
c = red(a, b)
print("subtraction is:", c)
def multiplication():
a = int(input("enter 1st no to perform multipication:"))
b = int(input("enter 2nd no to perform multiplication:"))
c = mul(a, b)
print("multiplication is:", c)
def power():
a = int(input("enter base :"))
b = int(input("enter power :"))
c = pow(a, b)
print("division is:", c)
def divide():
a = int(input("enter 1st no to perform division:"))
b = int(input("enter 2nd no to perform division:"))
c = div(a, b)
print("division is:", c)
def floor_division():
a = int(input("enter 1st no to perform floor division:"))
b = int(input("enter 2nd no to perform floor division:"))
c = a // b
print("floor division is:",c)
def factorial():
res = 0
num = int(input("enter a number: "))
if num < 0:
print("Sorry, factorial does not exist for negative numbers")
elif num == 0:
print("The factorial of 0 is 1")
else:
res = math.factorial(num-1)*num
print("The factorial of",num,"is:",res)
if choice==1:
additon()
elif choice==2:
subtract()
elif choice==3:
multiplication()
elif choice==4:
power()
elif choice==5:
divide()
elif choice==6:
floor_division()
elif choice==7:
factorial()
elif choice==8:
statement_wrapper()
else:
print("wrong input")
exit(0)
| 1,740 | 0 | 362 |
dbdd6aecbadab47e637b3959e99f9f1394312c3b | 12,312 | py | Python | dask/dataframe/indexing.py | deHasara/dask | fb544144611b25a6f23d90637038a93f93153f8f | [
"BSD-3-Clause"
] | null | null | null | dask/dataframe/indexing.py | deHasara/dask | fb544144611b25a6f23d90637038a93f93153f8f | [
"BSD-3-Clause"
] | 2 | 2020-03-30T22:18:11.000Z | 2020-04-02T04:13:36.000Z | dask/dataframe/indexing.py | deHasara/dask | fb544144611b25a6f23d90637038a93f93153f8f | [
"BSD-3-Clause"
] | 1 | 2020-04-29T19:28:41.000Z | 2020-04-29T19:28:41.000Z | from datetime import datetime
from collections import defaultdict
import bisect
import numpy as np
import pandas as pd
from .core import new_dd_object, Series
from ..array.core import Array
from .utils import is_index_like, meta_nonempty
from . import methods
from ..base import tokenize
from ..highlevelgraph import HighLevelGraph
class _LocIndexer(_IndexerBase):
""" Helper class for the .loc accessor """
@property
def _loc(self, iindexer, cindexer):
""" Helper function for the .loc accessor """
if isinstance(iindexer, Series):
return self._loc_series(iindexer, cindexer)
elif isinstance(iindexer, Array):
return self._loc_array(iindexer, cindexer)
elif callable(iindexer):
return self._loc(iindexer(self.obj), cindexer)
if self.obj.known_divisions:
iindexer = self._maybe_partial_time_string(iindexer)
if isinstance(iindexer, slice):
return self._loc_slice(iindexer, cindexer)
elif isinstance(iindexer, (list, np.ndarray)):
return self._loc_list(iindexer, cindexer)
else:
# element should raise KeyError
return self._loc_element(iindexer, cindexer)
else:
if isinstance(iindexer, (list, np.ndarray)):
# applying map_pattition to each partitions
# results in duplicated NaN rows
msg = "Cannot index with list against unknown division"
raise KeyError(msg)
elif not isinstance(iindexer, slice):
iindexer = slice(iindexer, iindexer)
meta = self._make_meta(iindexer, cindexer)
return self.obj.map_partitions(
methods.try_loc, iindexer, cindexer, meta=meta
)
def _maybe_partial_time_string(self, iindexer):
"""
Convert index-indexer for partial time string slicing
if obj.index is DatetimeIndex / PeriodIndex
"""
idx = meta_nonempty(self.obj._meta.index)
iindexer = _maybe_partial_time_string(idx, iindexer, kind="loc")
return iindexer
def _partition_of_index_value(divisions, val):
"""In which partition does this value lie?
>>> _partition_of_index_value([0, 5, 10], 3)
0
>>> _partition_of_index_value([0, 5, 10], 8)
1
>>> _partition_of_index_value([0, 5, 10], 100)
1
>>> _partition_of_index_value([0, 5, 10], 5) # left-inclusive divisions
1
"""
if divisions[0] is None:
msg = "Can not use loc on DataFrame without known divisions"
raise ValueError(msg)
val = _coerce_loc_index(divisions, val)
i = bisect.bisect_right(divisions, val)
return min(len(divisions) - 2, max(0, i - 1))
def _partitions_of_index_values(divisions, values):
"""Return defaultdict of division and values pairs
Each key corresponds to the division which values are index values belong
to the division.
>>> sorted(_partitions_of_index_values([0, 5, 10], [3]).items())
[(0, [3])]
>>> sorted(_partitions_of_index_values([0, 5, 10], [3, 8, 5]).items())
[(0, [3]), (1, [8, 5])]
"""
if divisions[0] is None:
msg = "Can not use loc on DataFrame without known divisions"
raise ValueError(msg)
results = defaultdict(list)
values = pd.Index(values, dtype=object)
for val in values:
i = bisect.bisect_right(divisions, val)
div = min(len(divisions) - 2, max(0, i - 1))
results[div].append(val)
return results
def _coerce_loc_index(divisions, o):
"""Transform values to be comparable against divisions
This is particularly valuable to use with pandas datetimes
"""
if divisions and isinstance(divisions[0], datetime):
return pd.Timestamp(o)
if divisions and isinstance(divisions[0], np.datetime64):
return np.datetime64(o).astype(divisions[0].dtype)
return o
def _maybe_partial_time_string(index, indexer, kind):
"""
Convert indexer for partial string selection
if data has DatetimeIndex/PeriodIndex
"""
# do not pass dd.Index
assert is_index_like(index)
if not isinstance(index, (pd.DatetimeIndex, pd.PeriodIndex)):
return indexer
if isinstance(indexer, slice):
if isinstance(indexer.start, str):
start = index._maybe_cast_slice_bound(indexer.start, "left", kind)
else:
start = indexer.start
if isinstance(indexer.stop, str):
stop = index._maybe_cast_slice_bound(indexer.stop, "right", kind)
else:
stop = indexer.stop
return slice(start, stop)
elif isinstance(indexer, str):
start = index._maybe_cast_slice_bound(indexer, "left", "loc")
stop = index._maybe_cast_slice_bound(indexer, "right", "loc")
return slice(min(start, stop), max(start, stop))
return indexer
| 33.275676 | 87 | 0.584146 | from datetime import datetime
from collections import defaultdict
import bisect
import numpy as np
import pandas as pd
from .core import new_dd_object, Series
from ..array.core import Array
from .utils import is_index_like, meta_nonempty
from . import methods
from ..base import tokenize
from ..highlevelgraph import HighLevelGraph
class _IndexerBase:
def __init__(self, obj):
self.obj = obj
@property
def _name(self):
return self.obj._name
@property
def _meta_indexer(self):
raise NotImplementedError
def _make_meta(self, iindexer, cindexer):
"""
get metadata
"""
if cindexer is None:
return self.obj
else:
return self._meta_indexer[:, cindexer]
class _iLocIndexer(_IndexerBase):
@property
def _meta_indexer(self):
return self.obj._meta.iloc
def __getitem__(self, key):
# dataframe
msg = (
"'DataFrame.iloc' only supports selecting columns. "
"It must be used like 'df.iloc[:, column_indexer]'."
)
if not isinstance(key, tuple):
raise NotImplementedError(msg)
if len(key) > 2:
raise ValueError("Too many indexers")
iindexer, cindexer = key
if iindexer != slice(None):
raise NotImplementedError(msg)
if not self.obj.columns.is_unique:
# if there are any duplicate column names, do an iloc
return self._iloc(iindexer, cindexer)
else:
# otherwise dispatch to dask.dataframe.core.DataFrame.__getitem__
col_names = self.obj.columns[cindexer]
return self.obj.__getitem__(col_names)
def _iloc(self, iindexer, cindexer):
assert iindexer == slice(None)
meta = self._make_meta(iindexer, cindexer)
return self.obj.map_partitions(methods.iloc, cindexer, meta=meta)
class _LocIndexer(_IndexerBase):
""" Helper class for the .loc accessor """
@property
def _meta_indexer(self):
return self.obj._meta.loc
def __getitem__(self, key):
if isinstance(key, tuple):
# multi-dimensional selection
if len(key) > self.obj.ndim:
# raise from pandas
msg = "Too many indexers"
raise pd.core.indexing.IndexingError(msg)
iindexer = key[0]
cindexer = key[1]
else:
# if self.obj is Series, cindexer is always None
iindexer = key
cindexer = None
return self._loc(iindexer, cindexer)
def _loc(self, iindexer, cindexer):
""" Helper function for the .loc accessor """
if isinstance(iindexer, Series):
return self._loc_series(iindexer, cindexer)
elif isinstance(iindexer, Array):
return self._loc_array(iindexer, cindexer)
elif callable(iindexer):
return self._loc(iindexer(self.obj), cindexer)
if self.obj.known_divisions:
iindexer = self._maybe_partial_time_string(iindexer)
if isinstance(iindexer, slice):
return self._loc_slice(iindexer, cindexer)
elif isinstance(iindexer, (list, np.ndarray)):
return self._loc_list(iindexer, cindexer)
else:
# element should raise KeyError
return self._loc_element(iindexer, cindexer)
else:
if isinstance(iindexer, (list, np.ndarray)):
# applying map_pattition to each partitions
# results in duplicated NaN rows
msg = "Cannot index with list against unknown division"
raise KeyError(msg)
elif not isinstance(iindexer, slice):
iindexer = slice(iindexer, iindexer)
meta = self._make_meta(iindexer, cindexer)
return self.obj.map_partitions(
methods.try_loc, iindexer, cindexer, meta=meta
)
def _maybe_partial_time_string(self, iindexer):
"""
Convert index-indexer for partial time string slicing
if obj.index is DatetimeIndex / PeriodIndex
"""
idx = meta_nonempty(self.obj._meta.index)
iindexer = _maybe_partial_time_string(idx, iindexer, kind="loc")
return iindexer
def _loc_series(self, iindexer, cindexer):
meta = self._make_meta(iindexer, cindexer)
return self.obj.map_partitions(
methods.loc, iindexer, cindexer, token="loc-series", meta=meta
)
def _loc_array(self, iindexer, cindexer):
iindexer_series = iindexer.to_dask_dataframe("_", self.obj.index)
return self._loc_series(iindexer_series, cindexer)
def _loc_list(self, iindexer, cindexer):
name = "loc-%s" % tokenize(iindexer, self.obj)
parts = self._get_partitions(iindexer)
meta = self._make_meta(iindexer, cindexer)
if len(iindexer):
dsk = {}
divisions = []
items = sorted(parts.items())
for i, (div, indexer) in enumerate(items):
dsk[name, i] = (methods.loc, (self._name, div), indexer, cindexer)
# append minimum value as division
divisions.append(sorted(indexer)[0])
# append maximum value of the last division
divisions.append(sorted(items[-1][1])[-1])
graph = HighLevelGraph.from_collections(name, dsk, dependencies=[self.obj])
else:
divisions = [None, None]
dsk = {(name, 0): meta.head(0)}
graph = HighLevelGraph.from_collections(name, dsk)
return new_dd_object(graph, name, meta=meta, divisions=divisions)
def _loc_element(self, iindexer, cindexer):
name = "loc-%s" % tokenize(iindexer, self.obj)
part = self._get_partitions(iindexer)
if iindexer < self.obj.divisions[0] or iindexer > self.obj.divisions[-1]:
raise KeyError("the label [%s] is not in the index" % str(iindexer))
dsk = {
(name, 0): (
methods.loc,
(self._name, part),
slice(iindexer, iindexer),
cindexer,
)
}
meta = self._make_meta(iindexer, cindexer)
graph = HighLevelGraph.from_collections(name, dsk, dependencies=[self.obj])
return new_dd_object(graph, name, meta=meta, divisions=[iindexer, iindexer])
def _get_partitions(self, keys):
if isinstance(keys, (list, np.ndarray)):
return _partitions_of_index_values(self.obj.divisions, keys)
else:
# element
return _partition_of_index_value(self.obj.divisions, keys)
def _coerce_loc_index(self, key):
return _coerce_loc_index(self.obj.divisions, key)
def _loc_slice(self, iindexer, cindexer):
name = "loc-%s" % tokenize(iindexer, cindexer, self)
assert isinstance(iindexer, slice)
assert iindexer.step in (None, 1)
if iindexer.start is not None:
start = self._get_partitions(iindexer.start)
else:
start = 0
if iindexer.stop is not None:
stop = self._get_partitions(iindexer.stop)
else:
stop = self.obj.npartitions - 1
if iindexer.start is None and self.obj.known_divisions:
istart = self.obj.divisions[0]
else:
istart = self._coerce_loc_index(iindexer.start)
if iindexer.stop is None and self.obj.known_divisions:
istop = self.obj.divisions[-1]
else:
istop = self._coerce_loc_index(iindexer.stop)
if stop == start:
dsk = {
(name, 0): (
methods.loc,
(self._name, start),
slice(iindexer.start, iindexer.stop),
cindexer,
)
}
divisions = [istart, istop]
else:
dsk = {
(name, 0): (
methods.loc,
(self._name, start),
slice(iindexer.start, None),
cindexer,
)
}
for i in range(1, stop - start):
if cindexer is None:
dsk[name, i] = (self._name, start + i)
else:
dsk[name, i] = (
methods.loc,
(self._name, start + i),
slice(None, None),
cindexer,
)
dsk[name, stop - start] = (
methods.loc,
(self._name, stop),
slice(None, iindexer.stop),
cindexer,
)
if iindexer.start is None:
div_start = self.obj.divisions[0]
else:
div_start = max(istart, self.obj.divisions[start])
if iindexer.stop is None:
div_stop = self.obj.divisions[-1]
else:
div_stop = min(istop, self.obj.divisions[stop + 1])
divisions = (
(div_start,) + self.obj.divisions[start + 1 : stop + 1] + (div_stop,)
)
assert len(divisions) == len(dsk) + 1
meta = self._make_meta(iindexer, cindexer)
graph = HighLevelGraph.from_collections(name, dsk, dependencies=[self.obj])
return new_dd_object(graph, name, meta=meta, divisions=divisions)
def _partition_of_index_value(divisions, val):
"""In which partition does this value lie?
>>> _partition_of_index_value([0, 5, 10], 3)
0
>>> _partition_of_index_value([0, 5, 10], 8)
1
>>> _partition_of_index_value([0, 5, 10], 100)
1
>>> _partition_of_index_value([0, 5, 10], 5) # left-inclusive divisions
1
"""
if divisions[0] is None:
msg = "Can not use loc on DataFrame without known divisions"
raise ValueError(msg)
val = _coerce_loc_index(divisions, val)
i = bisect.bisect_right(divisions, val)
return min(len(divisions) - 2, max(0, i - 1))
def _partitions_of_index_values(divisions, values):
"""Return defaultdict of division and values pairs
Each key corresponds to the division which values are index values belong
to the division.
>>> sorted(_partitions_of_index_values([0, 5, 10], [3]).items())
[(0, [3])]
>>> sorted(_partitions_of_index_values([0, 5, 10], [3, 8, 5]).items())
[(0, [3]), (1, [8, 5])]
"""
if divisions[0] is None:
msg = "Can not use loc on DataFrame without known divisions"
raise ValueError(msg)
results = defaultdict(list)
values = pd.Index(values, dtype=object)
for val in values:
i = bisect.bisect_right(divisions, val)
div = min(len(divisions) - 2, max(0, i - 1))
results[div].append(val)
return results
def _coerce_loc_index(divisions, o):
"""Transform values to be comparable against divisions
This is particularly valuable to use with pandas datetimes
"""
if divisions and isinstance(divisions[0], datetime):
return pd.Timestamp(o)
if divisions and isinstance(divisions[0], np.datetime64):
return np.datetime64(o).astype(divisions[0].dtype)
return o
def _maybe_partial_time_string(index, indexer, kind):
"""
Convert indexer for partial string selection
if data has DatetimeIndex/PeriodIndex
"""
# do not pass dd.Index
assert is_index_like(index)
if not isinstance(index, (pd.DatetimeIndex, pd.PeriodIndex)):
return indexer
if isinstance(indexer, slice):
if isinstance(indexer.start, str):
start = index._maybe_cast_slice_bound(indexer.start, "left", kind)
else:
start = indexer.start
if isinstance(indexer.stop, str):
stop = index._maybe_cast_slice_bound(indexer.stop, "right", kind)
else:
stop = indexer.stop
return slice(start, stop)
elif isinstance(indexer, str):
start = index._maybe_cast_slice_bound(indexer, "left", "loc")
stop = index._maybe_cast_slice_bound(indexer, "right", "loc")
return slice(min(start, stop), max(start, stop))
return indexer
| 6,657 | 426 | 288 |
a3198459c2694fb22edc0499faba3e03bb95e920 | 2,312 | py | Python | src/common/networks/component/pggan.py | pfnet-research/chainer-stylegan | 9bb2f5ac9d68958e594d03662ca791f403a13574 | [
"MIT"
] | 84 | 2019-02-28T12:57:37.000Z | 2021-12-05T16:54:36.000Z | model/common/networks/component/pggan.py | alexander7161/FaceGen | c1697a8bfc3c551a3dc2bc45078e8e4e5ae41368 | [
"MIT"
] | 31 | 2019-12-11T12:29:46.000Z | 2022-03-12T00:20:52.000Z | model/common/networks/component/pggan.py | alexander7161/FaceGen | c1697a8bfc3c551a3dc2bc45078e8e4e5ae41368 | [
"MIT"
] | 23 | 2019-03-01T17:59:19.000Z | 2021-08-12T18:08:36.000Z | import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
| 38.533333 | 106 | 0.636246 | import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
def feature_vector_normalization(x, eps=1e-8):
# x: (B, C, H, W)
alpha = 1.0 / F.sqrt(F.mean(x * x, axis=1, keepdims=True) + eps)
return F.broadcast_to(alpha, x.data.shape) * x
class EqualizedConv2d(chainer.Chain):
def __init__(self, in_ch, out_ch, ksize, stride, pad, nobias=False, gain=np.sqrt(2), lrmul=1):
w = chainer.initializers.Normal(1.0/lrmul) # equalized learning rate
self.inv_c = gain * np.sqrt(1.0 / (in_ch * ksize ** 2))
self.inv_c = self.inv_c * lrmul
super(EqualizedConv2d, self).__init__()
with self.init_scope():
self.c = L.Convolution2D(in_ch, out_ch, ksize, stride, pad, initialW=w, nobias=nobias)
def __call__(self, x):
return self.c(self.inv_c * x)
class EqualizedDeconv2d(chainer.Chain):
def __init__(self, in_ch, out_ch, ksize, stride, pad, nobias=False, gain=np.sqrt(2), lrmul=1):
w = chainer.initializers.Normal(1.0/lrmul) # equalized learning rate
self.inv_c = gain * np.sqrt(1.0 / (in_ch))
self.inv_c = self.inv_c * lrmul
super(EqualizedDeconv2d, self).__init__()
with self.init_scope():
self.c = L.Deconvolution2D(in_ch, out_ch, ksize, stride, pad, initialW=w, nobias=nobias)
def __call__(self, x):
return self.c(self.inv_c * x)
class EqualizedLinear(chainer.Chain):
def __init__(self, in_ch, out_ch, initial_bias=None, nobias=False, gain=np.sqrt(2), lrmul=1):
w = chainer.initializers.Normal(1.0/lrmul) # equalized learning rate
self.inv_c = gain * np.sqrt(1.0 / in_ch)
self.inv_c = self.inv_c * lrmul
super(EqualizedLinear, self).__init__()
with self.init_scope():
self.c = L.Linear(in_ch, out_ch, initialW=w, initial_bias=initial_bias, nobias=nobias)
def __call__(self, x):
return self.c(self.inv_c * x)
def minibatch_std(x):
m = F.mean(x, axis=0, keepdims=True)
v = F.mean((x - F.broadcast_to(m, x.shape)) * (x - F.broadcast_to(m, x.shape)), axis=0, keepdims=True)
std = F.mean(F.sqrt(v + 1e-8), keepdims=True)
std = F.broadcast_to(std, (x.shape[0], 1, x.shape[2], x.shape[3]))
return F.concat([x, std], axis=1)
| 1,867 | 50 | 288 |
7c0783775708e164a7de2a7db1e9ec1cd5c7040a | 182 | py | Python | source/__init__.py | Very1Fake/monitor | bb47352cffebd8b99bafac0a342324b042b3d826 | [
"Apache-2.0",
"MIT"
] | null | null | null | source/__init__.py | Very1Fake/monitor | bb47352cffebd8b99bafac0a342324b042b3d826 | [
"Apache-2.0",
"MIT"
] | null | null | null | source/__init__.py | Very1Fake/monitor | bb47352cffebd8b99bafac0a342324b042b3d826 | [
"Apache-2.0",
"MIT"
] | null | null | null | from packaging.version import Version
__credits__ = ["very1fake"]
__license__ = "MIT/Apache-2.0"
__version__ = "1.0.6"
__maintainer__ = "very1fake"
version = Version(__version__)
| 18.2 | 37 | 0.752747 | from packaging.version import Version
__credits__ = ["very1fake"]
__license__ = "MIT/Apache-2.0"
__version__ = "1.0.6"
__maintainer__ = "very1fake"
version = Version(__version__)
| 0 | 0 | 0 |
62e63d0c6b135c8198eca554bf7e6a2e7baac64d | 433 | py | Python | ACM ICPC/String/Top_K_Frequent_Words/top_k_frequent_words.py | shreejitverma/GeeksforGeeks | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 2 | 2022-02-18T05:14:28.000Z | 2022-03-08T07:00:08.000Z | ACM ICPC/String/Top_K_Frequent_Words/top_k_frequent_words.py | shivaniverma1/Competitive-Programming-1 | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 6 | 2022-01-13T04:31:04.000Z | 2022-03-12T01:06:16.000Z | ACM ICPC/String/Top_K_Frequent_Words/top_k_frequent_words.py | shivaniverma1/Competitive-Programming-1 | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 2 | 2022-02-14T19:53:53.000Z | 2022-02-18T05:14:30.000Z | # Returns k number of words sorted on their occurrence
if __name__ == '__main__':
inpt = input('Enter space seperated words: ').split()
k = int(
input(
'Enter the amount of words to retreive based on their occurance: ')
)
print(top_k_frequent_words(inpt, k))
| 30.928571 | 79 | 0.65127 | # Returns k number of words sorted on their occurrence
def top_k_frequent_words(words, k: int):
return sorted(
sorted(set(words)), key=lambda x: words.count(x), reverse=True)[:k]
if __name__ == '__main__':
inpt = input('Enter space seperated words: ').split()
k = int(
input(
'Enter the amount of words to retreive based on their occurance: ')
)
print(top_k_frequent_words(inpt, k))
| 114 | 0 | 22 |
d54be79502f4c11601fd7fb259a73d045fb9a1a2 | 415 | py | Python | church/migrations/0018_facebooklink_file_upload.py | khanhpn/florida | 5e83d0561b9f41ff79383a6a2f0a84d6c8459ef0 | [
"Apache-2.0"
] | 1 | 2021-01-22T02:52:33.000Z | 2021-01-22T02:52:33.000Z | church/migrations/0018_facebooklink_file_upload.py | khanhpn/florida | 5e83d0561b9f41ff79383a6a2f0a84d6c8459ef0 | [
"Apache-2.0"
] | null | null | null | church/migrations/0018_facebooklink_file_upload.py | khanhpn/florida | 5e83d0561b9f41ff79383a6a2f0a84d6c8459ef0 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.2.5 on 2021-09-03 12:56
from django.db import migrations, models
| 21.842105 | 77 | 0.59759 | # Generated by Django 3.2.5 on 2021-09-03 12:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('church', '0017_linkedchurch'),
]
operations = [
migrations.AddField(
model_name='facebooklink',
name='file_upload',
field=models.FileField(null=True, upload_to='uploads/%Y/%m/%d/'),
),
]
| 0 | 301 | 23 |
36a2775a2ac44b7d929353e95a007696147eb907 | 1,270 | py | Python | crypto_address_validator/__init__.py | null-po1nter/crypto-address-validator | a65239ff86613c1cca744849c3c6910bb4eacf64 | [
"Unlicense"
] | 6 | 2022-01-11T15:25:57.000Z | 2022-02-16T08:05:07.000Z | crypto_address_validator/__init__.py | null-po1nter/crypto-address-validator | a65239ff86613c1cca744849c3c6910bb4eacf64 | [
"Unlicense"
] | null | null | null | crypto_address_validator/__init__.py | null-po1nter/crypto-address-validator | a65239ff86613c1cca744849c3c6910bb4eacf64 | [
"Unlicense"
] | 2 | 2022-01-11T15:26:05.000Z | 2022-03-22T08:05:59.000Z | from crypto_address_validator.validators import default_validator
from crypto_address_validator.validators import atom_validator
from crypto_address_validator.validators import bnb_validator
from crypto_address_validator.validators import aion_validator
from crypto_address_validator.validators import eos_validator
from crypto_address_validator.validators import iost_validator
from crypto_address_validator.validators import miota_validator
validators = {
'btc': default_validator,
'atom': atom_validator,
'bnb': bnb_validator,
'aion': aion_validator,
'eos': eos_validator,
'iost': iost_validator,
'miota': miota_validator
}
def validate(symbol: str, address: str) -> bool:
"""Validates the address of the passed symbol.
Args:
symbol (str): Currency symbol, e.g. 'btc' or 'atom'.
address (str): Currency address to validate.
Returns:
bool: Result of address validation.
"""
try:
validator = validators[symbol]
except (TypeError, KeyError):
print(f'"{symbol}" currency is not supported.')
return False
if not isinstance(address, str):
return False
# passes the address to the appropriate validator
return validator.is_valid_address(address)
| 30.238095 | 65 | 0.735433 | from crypto_address_validator.validators import default_validator
from crypto_address_validator.validators import atom_validator
from crypto_address_validator.validators import bnb_validator
from crypto_address_validator.validators import aion_validator
from crypto_address_validator.validators import eos_validator
from crypto_address_validator.validators import iost_validator
from crypto_address_validator.validators import miota_validator
validators = {
'btc': default_validator,
'atom': atom_validator,
'bnb': bnb_validator,
'aion': aion_validator,
'eos': eos_validator,
'iost': iost_validator,
'miota': miota_validator
}
def validate(symbol: str, address: str) -> bool:
"""Validates the address of the passed symbol.
Args:
symbol (str): Currency symbol, e.g. 'btc' or 'atom'.
address (str): Currency address to validate.
Returns:
bool: Result of address validation.
"""
try:
validator = validators[symbol]
except (TypeError, KeyError):
print(f'"{symbol}" currency is not supported.')
return False
if not isinstance(address, str):
return False
# passes the address to the appropriate validator
return validator.is_valid_address(address)
| 0 | 0 | 0 |
59f7061bacac1bfdf2d744f13106a5a5eadbd85e | 6,312 | py | Python | src/create_topic_graph.py | medialab/webclim_propagation_graphs_facebook | d2c6345a5045a29c903b7bd355c6c13ba01d0e71 | [
"MIT"
] | 1 | 2020-05-12T14:18:00.000Z | 2020-05-12T14:18:00.000Z | src/create_topic_graph.py | medialab/webclim_analyses | d2c6345a5045a29c903b7bd355c6c13ba01d0e71 | [
"MIT"
] | null | null | null | src/create_topic_graph.py | medialab/webclim_analyses | d2c6345a5045a29c903b7bd355c6c13ba01d0e71 | [
"MIT"
] | null | null | null | """Based on which facebook groups have shared the fake news
published by specific domain names,
this script will generate a bipartite graph
with the facebook groups and the domain names"""
import pandas as pd
import numpy as np
import matplotlib as mpl
import networkx as nx
from networkx.algorithms import bipartite
import ural
import os
import sys
def clean_data(CLEAN_DATA_DIRECTORY, SCIENTIFIC_TOPIC, DATE):
"""Import and prepare the dataframe to be used to build the graphs"""
posts_path = os.path.join(".", CLEAN_DATA_DIRECTORY,
"fake_posts_" + SCIENTIFIC_TOPIC + "_" + DATE + ".csv")
posts_df = pd.read_csv(posts_path)
if DATE == "28_04_2020":
# Remove the url with parameters from the analysis because CT return wrong results for them:
posts_df['parameter_in_url'] = posts_df['url'].apply(lambda x: '?' in x)
posts_df = posts_df[posts_df['parameter_in_url']==False]
posts_df = posts_df[posts_df["platform"] == "Facebook"]
posts_df = posts_df.dropna(subset=['account_id', 'url'])
posts_df['account_id'] = posts_df['account_id'].apply(lambda x:int(x))
# Sometimes a same facebook group can share multiple times the same URL,
# creating multiple lines in the input CSV. We remove the duplicates here:
posts_df = posts_df[['url', 'account_name', 'account_id',
'account_subscriber_count', 'actual_like_count']]
posts_df = posts_df.drop_duplicates(subset=['url', 'account_id'], keep='last')
posts_df['domain_name'] = posts_df['url'].apply(lambda x: ural.get_domain_name(x))
if DATE == "28_04_2020":
# Remove the platforms from the analysis:
platforms = ["facebook.com", "youtube.com", "twitter.com", "wordpress.com", "instagram.com"]
posts_df = posts_df[~posts_df['domain_name'].isin(platforms)]
# We remove the facebook groups that have shared only one fake URL:
vc = posts_df['account_id'].value_counts()
posts_df = posts_df[posts_df['account_id'].isin(vc[vc > 1].index)]
# We prepare a dataframe to import the facebook group nodes with specific attributes:
# - the number of followers
# - the account name -> label
# - the fake news URL shared by this group -> node size
fb_group_df = posts_df[['account_id', 'account_name', 'account_subscriber_count']]\
.sort_values(by="account_subscriber_count", ascending=True)\
.drop_duplicates(subset = ['account_id'], keep='last')
temp = posts_df.groupby('account_id')['url'].apply(list)\
.to_frame().reset_index()
fb_group_df = fb_group_df.merge(temp, left_on='account_id', right_on='account_id', how='left')
fb_group_df['nb_fake_news_shared'] = fb_group_df['url'].apply(lambda x:len(x))
# We prepare a dataframe to import the facebook group nodes with specific attributes:
# - the fake news URL shared by this domain -> node size
domain_df = posts_df[['url', 'domain_name']].drop_duplicates()\
.groupby('domain_name')['url'].apply(list)\
.to_frame().reset_index()
domain_df['nb_fake_news_shared'] = domain_df['url'].apply(lambda x:len(x))
return posts_df, fb_group_df, domain_df
def print_statistics(fb_group_df, domain_df):
"""We print a few interesting statistics"""
print()
print("The top 10 of facebook groups sharing the more fake URLs:\n")
print(fb_group_df[["account_name", "nb_fake_news_shared", "account_subscriber_count"]]\
.sort_values(by='nb_fake_news_shared', ascending=False).head(10).to_string(index=False))
print()
print("The top 10 of domains sharing the more fake URLs:\n")
print(domain_df[["domain_name", "nb_fake_news_shared"]]\
.sort_values(by='nb_fake_news_shared', ascending=False).head(10).to_string(index=False))
print()
def create_graph(posts_df, fb_group_df, domain_df,
GRAPH_DIRECTORY, SCIENTIFIC_TOPIC, DATE):
"""Create the bipartite graph with the facebook groups and the domain names.
The edges represent the fact that this group has shared the URL coming from this domain."""
bipartite_graph = nx.Graph()
for _, row in fb_group_df.iterrows():
bipartite_graph.add_node(int(row['account_id']),
label=row['account_name'],
type="facebook_account_or_page",
nb_fake_news_shared=row['nb_fake_news_shared'],
nb_followers=row['account_subscriber_count'],
)
for _, row in domain_df.iterrows():
bipartite_graph.add_node(row['domain_name'],
type="domain_name",
nb_fake_news_shared=row['nb_fake_news_shared']
)
bipartite_graph.add_edges_from(list(posts_df[['domain_name', 'account_id']]\
.itertuples(index=False, name=None)))
bipartite_graph_path = os.path.join(".", GRAPH_DIRECTORY, SCIENTIFIC_TOPIC + "_" + DATE + ".gexf")
nx.write_gexf(bipartite_graph, bipartite_graph_path, encoding="utf-8")
return bipartite_graph
if __name__ == "__main__":
if len(sys.argv) >= 2:
if sys.argv[1] in ["COVID-19", "health", "climate"]:
SCIENTIFIC_TOPIC = sys.argv[1]
else:
print("Please enter only 'COVID-19', 'health' or 'climate' as argument.")
exit()
else:
SCIENTIFIC_TOPIC = "COVID-19"
print("The topic 'COVID-19' has been chosen by default.")
if len(sys.argv) >= 3:
DATE = sys.argv[2]
else:
DATE = "02_06_2020"
print("The date '{}' has been chosen by default.".format(DATE))
CLEAN_DATA_DIRECTORY = "clean_data"
GRAPH_DIRECTORY = "graph"
posts_df, fb_group_df, domain_df = clean_data(CLEAN_DATA_DIRECTORY, SCIENTIFIC_TOPIC, DATE)
print_statistics(fb_group_df, domain_df)
bipartite_graph = create_graph(posts_df, fb_group_df, domain_df,
GRAPH_DIRECTORY, SCIENTIFIC_TOPIC, DATE)
print("The '{}_{}.gexf' graph has been saved in the 'graph' folder.".format(SCIENTIFIC_TOPIC, DATE))
| 42.938776 | 104 | 0.64512 | """Based on which facebook groups have shared the fake news
published by specific domain names,
this script will generate a bipartite graph
with the facebook groups and the domain names"""
import pandas as pd
import numpy as np
import matplotlib as mpl
import networkx as nx
from networkx.algorithms import bipartite
import ural
import os
import sys
def clean_data(CLEAN_DATA_DIRECTORY, SCIENTIFIC_TOPIC, DATE):
"""Import and prepare the dataframe to be used to build the graphs"""
posts_path = os.path.join(".", CLEAN_DATA_DIRECTORY,
"fake_posts_" + SCIENTIFIC_TOPIC + "_" + DATE + ".csv")
posts_df = pd.read_csv(posts_path)
if DATE == "28_04_2020":
# Remove the url with parameters from the analysis because CT return wrong results for them:
posts_df['parameter_in_url'] = posts_df['url'].apply(lambda x: '?' in x)
posts_df = posts_df[posts_df['parameter_in_url']==False]
posts_df = posts_df[posts_df["platform"] == "Facebook"]
posts_df = posts_df.dropna(subset=['account_id', 'url'])
posts_df['account_id'] = posts_df['account_id'].apply(lambda x:int(x))
# Sometimes a same facebook group can share multiple times the same URL,
# creating multiple lines in the input CSV. We remove the duplicates here:
posts_df = posts_df[['url', 'account_name', 'account_id',
'account_subscriber_count', 'actual_like_count']]
posts_df = posts_df.drop_duplicates(subset=['url', 'account_id'], keep='last')
posts_df['domain_name'] = posts_df['url'].apply(lambda x: ural.get_domain_name(x))
if DATE == "28_04_2020":
# Remove the platforms from the analysis:
platforms = ["facebook.com", "youtube.com", "twitter.com", "wordpress.com", "instagram.com"]
posts_df = posts_df[~posts_df['domain_name'].isin(platforms)]
# We remove the facebook groups that have shared only one fake URL:
vc = posts_df['account_id'].value_counts()
posts_df = posts_df[posts_df['account_id'].isin(vc[vc > 1].index)]
# We prepare a dataframe to import the facebook group nodes with specific attributes:
# - the number of followers
# - the account name -> label
# - the fake news URL shared by this group -> node size
fb_group_df = posts_df[['account_id', 'account_name', 'account_subscriber_count']]\
.sort_values(by="account_subscriber_count", ascending=True)\
.drop_duplicates(subset = ['account_id'], keep='last')
temp = posts_df.groupby('account_id')['url'].apply(list)\
.to_frame().reset_index()
fb_group_df = fb_group_df.merge(temp, left_on='account_id', right_on='account_id', how='left')
fb_group_df['nb_fake_news_shared'] = fb_group_df['url'].apply(lambda x:len(x))
# We prepare a dataframe to import the facebook group nodes with specific attributes:
# - the fake news URL shared by this domain -> node size
domain_df = posts_df[['url', 'domain_name']].drop_duplicates()\
.groupby('domain_name')['url'].apply(list)\
.to_frame().reset_index()
domain_df['nb_fake_news_shared'] = domain_df['url'].apply(lambda x:len(x))
return posts_df, fb_group_df, domain_df
def print_statistics(fb_group_df, domain_df):
"""We print a few interesting statistics"""
print()
print("The top 10 of facebook groups sharing the more fake URLs:\n")
print(fb_group_df[["account_name", "nb_fake_news_shared", "account_subscriber_count"]]\
.sort_values(by='nb_fake_news_shared', ascending=False).head(10).to_string(index=False))
print()
print("The top 10 of domains sharing the more fake URLs:\n")
print(domain_df[["domain_name", "nb_fake_news_shared"]]\
.sort_values(by='nb_fake_news_shared', ascending=False).head(10).to_string(index=False))
print()
def create_graph(posts_df, fb_group_df, domain_df,
GRAPH_DIRECTORY, SCIENTIFIC_TOPIC, DATE):
"""Create the bipartite graph with the facebook groups and the domain names.
The edges represent the fact that this group has shared the URL coming from this domain."""
bipartite_graph = nx.Graph()
for _, row in fb_group_df.iterrows():
bipartite_graph.add_node(int(row['account_id']),
label=row['account_name'],
type="facebook_account_or_page",
nb_fake_news_shared=row['nb_fake_news_shared'],
nb_followers=row['account_subscriber_count'],
)
for _, row in domain_df.iterrows():
bipartite_graph.add_node(row['domain_name'],
type="domain_name",
nb_fake_news_shared=row['nb_fake_news_shared']
)
bipartite_graph.add_edges_from(list(posts_df[['domain_name', 'account_id']]\
.itertuples(index=False, name=None)))
bipartite_graph_path = os.path.join(".", GRAPH_DIRECTORY, SCIENTIFIC_TOPIC + "_" + DATE + ".gexf")
nx.write_gexf(bipartite_graph, bipartite_graph_path, encoding="utf-8")
return bipartite_graph
if __name__ == "__main__":
if len(sys.argv) >= 2:
if sys.argv[1] in ["COVID-19", "health", "climate"]:
SCIENTIFIC_TOPIC = sys.argv[1]
else:
print("Please enter only 'COVID-19', 'health' or 'climate' as argument.")
exit()
else:
SCIENTIFIC_TOPIC = "COVID-19"
print("The topic 'COVID-19' has been chosen by default.")
if len(sys.argv) >= 3:
DATE = sys.argv[2]
else:
DATE = "02_06_2020"
print("The date '{}' has been chosen by default.".format(DATE))
CLEAN_DATA_DIRECTORY = "clean_data"
GRAPH_DIRECTORY = "graph"
posts_df, fb_group_df, domain_df = clean_data(CLEAN_DATA_DIRECTORY, SCIENTIFIC_TOPIC, DATE)
print_statistics(fb_group_df, domain_df)
bipartite_graph = create_graph(posts_df, fb_group_df, domain_df,
GRAPH_DIRECTORY, SCIENTIFIC_TOPIC, DATE)
print("The '{}_{}.gexf' graph has been saved in the 'graph' folder.".format(SCIENTIFIC_TOPIC, DATE))
| 0 | 0 | 0 |
36dbb5b4b4d979e45050f4692afffb84a7eed0f8 | 9,251 | py | Python | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/firebase/test/tool_results.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | null | null | null | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/firebase/test/tool_results.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | 4 | 2020-07-21T12:51:46.000Z | 2022-01-22T10:29:25.000Z | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/firebase/test/tool_results.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | 1 | 2020-07-25T18:17:57.000Z | 2020-07-25T18:17:57.000Z | # -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A utility library to support interaction with the Tool Results service."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import time
from googlecloudsdk.api_lib.firebase.test import exceptions
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import progress_tracker
from six.moves.urllib import parse
import uritemplate
_STATUS_INTERVAL_SECS = 3
class ToolResultsIds(
collections.namedtuple('ToolResultsIds', ['history_id', 'execution_id'])):
"""A tuple to hold the history & execution IDs returned from Tool Results.
Fields:
history_id: a string with the Tool Results history ID to publish to.
execution_id: a string with the ID of the Tool Results execution.
"""
def CreateToolResultsUiUrl(project_id, tool_results_ids):
"""Create the URL for a test's Tool Results UI in the Firebase App Manager.
Args:
project_id: string containing the user's GCE project ID.
tool_results_ids: a ToolResultsIds object holding history & execution IDs.
Returns:
A url to the Tool Results UI.
"""
url_base = properties.VALUES.test.results_base_url.Get()
if not url_base:
url_base = 'https://console.firebase.google.com'
url_end = uritemplate.expand(
'project/{project}/testlab/histories/{history}/matrices/{execution}', {
'project': project_id,
'history': tool_results_ids.history_id,
'execution': tool_results_ids.execution_id
})
return parse.urljoin(url_base, url_end)
def GetToolResultsIds(matrix,
matrix_monitor,
status_interval=_STATUS_INTERVAL_SECS):
"""Gets the Tool Results history ID and execution ID for a test matrix.
Sometimes the IDs are available immediately after a test matrix is created.
If not, we keep checking the matrix until the Testing and Tool Results
services have had enough time to create/assign the IDs, giving the user
continuous feedback using gcloud core's ProgressTracker class.
Args:
matrix: a TestMatrix which was just created by the Testing service.
matrix_monitor: a MatrixMonitor object.
status_interval: float, number of seconds to sleep between status checks.
Returns:
A ToolResultsIds tuple containing the history ID and execution ID, which
are shared by all TestExecutions in the TestMatrix.
Raises:
BadMatrixError: if the matrix finishes without both ToolResults IDs.
"""
history_id = None
execution_id = None
msg = 'Creating individual test executions'
with progress_tracker.ProgressTracker(msg, autotick=True):
while True:
if matrix.resultStorage.toolResultsExecution:
history_id = matrix.resultStorage.toolResultsExecution.historyId
execution_id = matrix.resultStorage.toolResultsExecution.executionId
if history_id and execution_id:
break
if matrix.state in matrix_monitor.completed_matrix_states:
raise exceptions.BadMatrixError(_ErrorFromInvalidMatrix(matrix))
time.sleep(status_interval)
matrix = matrix_monitor.GetTestMatrixStatus()
return ToolResultsIds(history_id=history_id, execution_id=execution_id)
def _ErrorFromInvalidMatrix(matrix):
"""Produces a human-readable error message from an invalid matrix."""
messages = apis.GetMessagesModule('testing', 'v1')
enum_values = messages.TestMatrix.InvalidMatrixDetailsValueValuesEnum
error_dict = {
enum_values.MALFORMED_APK:
'The app APK is not a valid Android application',
enum_values.MALFORMED_TEST_APK:
'The test APK is not a valid Android instrumentation test',
enum_values.NO_MANIFEST:
'The app APK is missing the manifest file',
enum_values.NO_PACKAGE_NAME:
'The APK manifest file is missing the package name',
enum_values.TEST_SAME_AS_APP:
'The test APK has the same package name as the app APK',
enum_values.NO_INSTRUMENTATION:
'The test APK declares no instrumentation tags in the manifest',
enum_values.NO_SIGNATURE:
'At least one supplied APK file has a missing or invalid signature',
enum_values.INSTRUMENTATION_ORCHESTRATOR_INCOMPATIBLE:
("The test runner class specified by the user or the test APK's "
'manifest file is not compatible with Android Test Orchestrator. '
'Please use AndroidJUnitRunner version 1.0 or higher'),
enum_values.NO_TEST_RUNNER_CLASS:
('The test APK does not contain the test runner class specified by '
'the user or the manifest file. The test runner class name may be '
'incorrect, or the class may be mislocated in the app APK.'),
enum_values.NO_LAUNCHER_ACTIVITY:
'The app APK does not specify a main launcher activity',
enum_values.FORBIDDEN_PERMISSIONS:
'The app declares one or more permissions that are not allowed',
enum_values.INVALID_ROBO_DIRECTIVES:
'Cannot have multiple robo-directives with the same resource name',
enum_values.INVALID_DIRECTIVE_ACTION:
'Robo Directive includes at least one invalid action definition.',
enum_values.INVALID_RESOURCE_NAME:
'Robo Directive resource name contains invalid characters: ":" '
' (colon) or " " (space)',
enum_values.TEST_LOOP_INTENT_FILTER_NOT_FOUND:
'The app does not have a correctly formatted game-loop intent filter',
enum_values.SCENARIO_LABEL_NOT_DECLARED:
'A scenario-label was not declared in the manifest file',
enum_values.SCENARIO_LABEL_MALFORMED:
'A scenario-label in the manifest includes invalid numbers or ranges',
enum_values.SCENARIO_NOT_DECLARED:
'A scenario-number was not declared in the manifest file',
enum_values.DEVICE_ADMIN_RECEIVER:
'Device administrator applications are not allowed',
enum_values.MALFORMED_XC_TEST_ZIP:
'The XCTest zip file was malformed. The zip did not contain a single '
'.xctestrun file and the contents of the DerivedData/Build/Products '
'directory.',
enum_values.BUILT_FOR_IOS_SIMULATOR:
'The provided XCTest was built for the iOS simulator rather than for '
'a physical device',
enum_values.NO_TESTS_IN_XC_TEST_ZIP:
'The .xctestrun file did not specify any test targets to run',
enum_values.USE_DESTINATION_ARTIFACTS:
'One or more of the test targets defined in the .xctestrun file '
'specifies "UseDestinationArtifacts", which is not allowed',
enum_values.TEST_NOT_APP_HOSTED:
'One or more of the test targets defined in the .xctestrun file '
'does not have a host binary to run on the physical iOS device, '
'which may cause errors when running xcodebuild',
enum_values.NO_CODE_APK:
'"hasCode" is false in the Manifest. Tested APKs must contain code',
enum_values.INVALID_INPUT_APK:
'Either the provided input APK path was malformed, the APK file does '
'not exist, or the user does not have permission to access the file',
enum_values.INVALID_APK_PREVIEW_SDK:
"Your app targets a preview version of the Android SDK that's "
'incompatible with the selected devices.',
enum_values.PLIST_CANNOT_BE_PARSED:
'One or more of the Info.plist files in the zip could not be parsed',
enum_values.INVALID_PACKAGE_NAME:
'The APK application ID (aka package name) is invalid. See also '
'https://developer.android.com/studio/build/application-id',
enum_values.MALFORMED_IPA:
'The app IPA is not a valid iOS application',
enum_values.MISSING_URL_SCHEME:
'The iOS game loop application does not register the custom URL '
'scheme',
enum_values.MALFORMED_APP_BUNDLE:
'The iOS application bundle (.app) is invalid',
}
details_enum = matrix.invalidMatrixDetails
if details_enum in error_dict:
return ('\nMatrix [{m}] failed during validation: {e}.'.format(
m=matrix.testMatrixId, e=error_dict[details_enum]))
# Use a generic message if the enum is unknown or unspecified/unavailable.
return (
'\nMatrix [{m}] unexpectedly reached final status {s} without returning '
'a URL to any test results in the Firebase console. Please re-check the '
'validity of your test files and parameters and try again.'.format(
m=matrix.testMatrixId, s=matrix.state))
| 45.126829 | 80 | 0.720246 | # -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A utility library to support interaction with the Tool Results service."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import time
from googlecloudsdk.api_lib.firebase.test import exceptions
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import progress_tracker
from six.moves.urllib import parse
import uritemplate
_STATUS_INTERVAL_SECS = 3
class ToolResultsIds(
collections.namedtuple('ToolResultsIds', ['history_id', 'execution_id'])):
"""A tuple to hold the history & execution IDs returned from Tool Results.
Fields:
history_id: a string with the Tool Results history ID to publish to.
execution_id: a string with the ID of the Tool Results execution.
"""
def CreateToolResultsUiUrl(project_id, tool_results_ids):
"""Create the URL for a test's Tool Results UI in the Firebase App Manager.
Args:
project_id: string containing the user's GCE project ID.
tool_results_ids: a ToolResultsIds object holding history & execution IDs.
Returns:
A url to the Tool Results UI.
"""
url_base = properties.VALUES.test.results_base_url.Get()
if not url_base:
url_base = 'https://console.firebase.google.com'
url_end = uritemplate.expand(
'project/{project}/testlab/histories/{history}/matrices/{execution}', {
'project': project_id,
'history': tool_results_ids.history_id,
'execution': tool_results_ids.execution_id
})
return parse.urljoin(url_base, url_end)
def GetToolResultsIds(matrix,
matrix_monitor,
status_interval=_STATUS_INTERVAL_SECS):
"""Gets the Tool Results history ID and execution ID for a test matrix.
Sometimes the IDs are available immediately after a test matrix is created.
If not, we keep checking the matrix until the Testing and Tool Results
services have had enough time to create/assign the IDs, giving the user
continuous feedback using gcloud core's ProgressTracker class.
Args:
matrix: a TestMatrix which was just created by the Testing service.
matrix_monitor: a MatrixMonitor object.
status_interval: float, number of seconds to sleep between status checks.
Returns:
A ToolResultsIds tuple containing the history ID and execution ID, which
are shared by all TestExecutions in the TestMatrix.
Raises:
BadMatrixError: if the matrix finishes without both ToolResults IDs.
"""
history_id = None
execution_id = None
msg = 'Creating individual test executions'
with progress_tracker.ProgressTracker(msg, autotick=True):
while True:
if matrix.resultStorage.toolResultsExecution:
history_id = matrix.resultStorage.toolResultsExecution.historyId
execution_id = matrix.resultStorage.toolResultsExecution.executionId
if history_id and execution_id:
break
if matrix.state in matrix_monitor.completed_matrix_states:
raise exceptions.BadMatrixError(_ErrorFromInvalidMatrix(matrix))
time.sleep(status_interval)
matrix = matrix_monitor.GetTestMatrixStatus()
return ToolResultsIds(history_id=history_id, execution_id=execution_id)
def _ErrorFromInvalidMatrix(matrix):
"""Produces a human-readable error message from an invalid matrix."""
messages = apis.GetMessagesModule('testing', 'v1')
enum_values = messages.TestMatrix.InvalidMatrixDetailsValueValuesEnum
error_dict = {
enum_values.MALFORMED_APK:
'The app APK is not a valid Android application',
enum_values.MALFORMED_TEST_APK:
'The test APK is not a valid Android instrumentation test',
enum_values.NO_MANIFEST:
'The app APK is missing the manifest file',
enum_values.NO_PACKAGE_NAME:
'The APK manifest file is missing the package name',
enum_values.TEST_SAME_AS_APP:
'The test APK has the same package name as the app APK',
enum_values.NO_INSTRUMENTATION:
'The test APK declares no instrumentation tags in the manifest',
enum_values.NO_SIGNATURE:
'At least one supplied APK file has a missing or invalid signature',
enum_values.INSTRUMENTATION_ORCHESTRATOR_INCOMPATIBLE:
("The test runner class specified by the user or the test APK's "
'manifest file is not compatible with Android Test Orchestrator. '
'Please use AndroidJUnitRunner version 1.0 or higher'),
enum_values.NO_TEST_RUNNER_CLASS:
('The test APK does not contain the test runner class specified by '
'the user or the manifest file. The test runner class name may be '
'incorrect, or the class may be mislocated in the app APK.'),
enum_values.NO_LAUNCHER_ACTIVITY:
'The app APK does not specify a main launcher activity',
enum_values.FORBIDDEN_PERMISSIONS:
'The app declares one or more permissions that are not allowed',
enum_values.INVALID_ROBO_DIRECTIVES:
'Cannot have multiple robo-directives with the same resource name',
enum_values.INVALID_DIRECTIVE_ACTION:
'Robo Directive includes at least one invalid action definition.',
enum_values.INVALID_RESOURCE_NAME:
'Robo Directive resource name contains invalid characters: ":" '
' (colon) or " " (space)',
enum_values.TEST_LOOP_INTENT_FILTER_NOT_FOUND:
'The app does not have a correctly formatted game-loop intent filter',
enum_values.SCENARIO_LABEL_NOT_DECLARED:
'A scenario-label was not declared in the manifest file',
enum_values.SCENARIO_LABEL_MALFORMED:
'A scenario-label in the manifest includes invalid numbers or ranges',
enum_values.SCENARIO_NOT_DECLARED:
'A scenario-number was not declared in the manifest file',
enum_values.DEVICE_ADMIN_RECEIVER:
'Device administrator applications are not allowed',
enum_values.MALFORMED_XC_TEST_ZIP:
'The XCTest zip file was malformed. The zip did not contain a single '
'.xctestrun file and the contents of the DerivedData/Build/Products '
'directory.',
enum_values.BUILT_FOR_IOS_SIMULATOR:
'The provided XCTest was built for the iOS simulator rather than for '
'a physical device',
enum_values.NO_TESTS_IN_XC_TEST_ZIP:
'The .xctestrun file did not specify any test targets to run',
enum_values.USE_DESTINATION_ARTIFACTS:
'One or more of the test targets defined in the .xctestrun file '
'specifies "UseDestinationArtifacts", which is not allowed',
enum_values.TEST_NOT_APP_HOSTED:
'One or more of the test targets defined in the .xctestrun file '
'does not have a host binary to run on the physical iOS device, '
'which may cause errors when running xcodebuild',
enum_values.NO_CODE_APK:
'"hasCode" is false in the Manifest. Tested APKs must contain code',
enum_values.INVALID_INPUT_APK:
'Either the provided input APK path was malformed, the APK file does '
'not exist, or the user does not have permission to access the file',
enum_values.INVALID_APK_PREVIEW_SDK:
"Your app targets a preview version of the Android SDK that's "
'incompatible with the selected devices.',
enum_values.PLIST_CANNOT_BE_PARSED:
'One or more of the Info.plist files in the zip could not be parsed',
enum_values.INVALID_PACKAGE_NAME:
'The APK application ID (aka package name) is invalid. See also '
'https://developer.android.com/studio/build/application-id',
enum_values.MALFORMED_IPA:
'The app IPA is not a valid iOS application',
enum_values.MISSING_URL_SCHEME:
'The iOS game loop application does not register the custom URL '
'scheme',
enum_values.MALFORMED_APP_BUNDLE:
'The iOS application bundle (.app) is invalid',
}
details_enum = matrix.invalidMatrixDetails
if details_enum in error_dict:
return ('\nMatrix [{m}] failed during validation: {e}.'.format(
m=matrix.testMatrixId, e=error_dict[details_enum]))
# Use a generic message if the enum is unknown or unspecified/unavailable.
return (
'\nMatrix [{m}] unexpectedly reached final status {s} without returning '
'a URL to any test results in the Firebase console. Please re-check the '
'validity of your test files and parameters and try again.'.format(
m=matrix.testMatrixId, s=matrix.state))
| 0 | 0 | 0 |
f22fa272296f5002435df89407092a61d69e7af3 | 1,705 | py | Python | ServerScript/recievestore.py | wmizzi/tn2capstone | e9855ba6b49e2d05293df74846c64fa0c220a25d | [
"BSD-2-Clause"
] | null | null | null | ServerScript/recievestore.py | wmizzi/tn2capstone | e9855ba6b49e2d05293df74846c64fa0c220a25d | [
"BSD-2-Clause"
] | null | null | null | ServerScript/recievestore.py | wmizzi/tn2capstone | e9855ba6b49e2d05293df74846c64fa0c220a25d | [
"BSD-2-Clause"
] | null | null | null | # created by Angus Clark 9/2/17 updated 27/2/17
# ToDo impliment traceroute function into this
# Perhaps get rid of unnecessary itemediate temp file
import socket
import os
import json
import my_traceroute
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '130.56.253.43'
#print host
port = 5201 # Change port (must enable security settigns of server)
s.bind((host,port))
s.listen(5)
MAX_HOPS = 30 # max hops for traceroute
while True:
c, addr = s.accept() #accept incoming Connection
f = open('temp.json','wb') # open blank binary to dump incoming data
#print addr[0]
l = c.recv(1024)
while(l):
# Dump data into temp file and get next chunk of data
f.write(l)
l = c.recv(1024)
f.close()
c.close()
tempfile = open('temp.json','rb')
info = json.load(tempfile)
info["UserInfo"]["ip"] = addr[0] # store ip address of sender
last_addr = '0.0.0.0' # placeholder for first iteration
for hop in range(1,MAX_HOPS):
result = my_traceroute.traceroute(hop, info["UserInfo"]["ip"])
#print result
if result == -1:
break
if result[1] == last_addr:
break
info["TRACEROUTE"][str(result[0])] = {}
info["TRACEROUTE"][str(result[0])].update({'node':result[1], 'rtt':result[2]})
last_addr = result[1]
id = info["UserInfo"]["user id"]
timestamp = info["UserInfo"]["timestamp"]
os.system('mkdir /home/ubuntu/data/'+id)
path = "/home/ubuntu/data/" + id + "/"
filename = timestamp + '.json'
savefile = open(path + filename, 'w+')
savefile.write(json.dumps(info))
savefile.close() | 30.446429 | 86 | 0.609971 | # created by Angus Clark 9/2/17 updated 27/2/17
# ToDo impliment traceroute function into this
# Perhaps get rid of unnecessary itemediate temp file
import socket
import os
import json
import my_traceroute
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '130.56.253.43'
#print host
port = 5201 # Change port (must enable security settigns of server)
s.bind((host,port))
s.listen(5)
MAX_HOPS = 30 # max hops for traceroute
while True:
c, addr = s.accept() #accept incoming Connection
f = open('temp.json','wb') # open blank binary to dump incoming data
#print addr[0]
l = c.recv(1024)
while(l):
# Dump data into temp file and get next chunk of data
f.write(l)
l = c.recv(1024)
f.close()
c.close()
tempfile = open('temp.json','rb')
info = json.load(tempfile)
info["UserInfo"]["ip"] = addr[0] # store ip address of sender
last_addr = '0.0.0.0' # placeholder for first iteration
for hop in range(1,MAX_HOPS):
result = my_traceroute.traceroute(hop, info["UserInfo"]["ip"])
#print result
if result == -1:
break
if result[1] == last_addr:
break
info["TRACEROUTE"][str(result[0])] = {}
info["TRACEROUTE"][str(result[0])].update({'node':result[1], 'rtt':result[2]})
last_addr = result[1]
id = info["UserInfo"]["user id"]
timestamp = info["UserInfo"]["timestamp"]
os.system('mkdir /home/ubuntu/data/'+id)
path = "/home/ubuntu/data/" + id + "/"
filename = timestamp + '.json'
savefile = open(path + filename, 'w+')
savefile.write(json.dumps(info))
savefile.close() | 0 | 0 | 0 |
fd71201325e98fc8968b2ddb64f28721ed2a9d76 | 817 | py | Python | PPAS_agent/knn/knn_model.py | pedMatias/matias_hfo | 6d88e1043a1455f5c1f6cc11b9380869772f4176 | [
"MIT"
] | 1 | 2021-06-03T20:03:50.000Z | 2021-06-03T20:03:50.000Z | multi_agents/knn/knn_model.py | pedMatias/matias_hfo | 6d88e1043a1455f5c1f6cc11b9380869772f4176 | [
"MIT"
] | null | null | null | multi_agents/knn/knn_model.py | pedMatias/matias_hfo | 6d88e1043a1455f5c1f6cc11b9380869772f4176 | [
"MIT"
] | 1 | 2021-03-14T01:22:33.000Z | 2021-03-14T01:22:33.000Z | import numpy as np
import faiss
| 28.172414 | 74 | 0.592411 | import numpy as np
import faiss
class KNeighbors:
def __init__(self, k=1, nlist=1000, nprobe=1):
self.index = None
self.y = None
self.k = 1
self.nlist = nlist
self.nprobe = nprobe
def fit(self, X, y):
quantizer = faiss.IndexFlatL2(X.shape[1]) # the other index
self.index = faiss.IndexIVFFlat(quantizer, X.shape[1], self.nlist)
# Train:
assert not self.index.is_trained
self.index.train(X.astype(np.float32))
assert self.index.is_trained
# Fit:
self.index.add(X.astype(np.float32))
self.index.nprobe = self.nprobe
self.y = y
def predict(self, X):
distances, indices = self.index.search(X.astype(np.float32), k=1)
index = indices[0][0]
return self.y[index]
| 685 | -4 | 103 |
402bd0ecd3ad3683810f6e43fdd05ede3dff3a02 | 9,732 | py | Python | src/piebus/api.py | hiway/piebus | 898e084c6065824fdb1dce8fedc1f8fac8499703 | [
"BSD-2-Clause"
] | null | null | null | src/piebus/api.py | hiway/piebus | 898e084c6065824fdb1dce8fedc1f8fac8499703 | [
"BSD-2-Clause"
] | 5 | 2021-03-19T01:09:42.000Z | 2022-01-13T01:17:44.000Z | src/piebus/api.py | hiway/piebus | 898e084c6065824fdb1dce8fedc1f8fac8499703 | [
"BSD-2-Clause"
] | null | null | null | import asyncio
import base64
import datetime
import hashlib
import json
import os
import smopy
import traceback
from json import JSONDecodeError
from uuid import uuid4
import bcrypt
import peewee
from PIL import Image, ImageDraw
from peewee import (
Model,
CharField,
TextField,
IntegerField,
ForeignKeyField,
DateTimeField,
BooleanField,
)
from playhouse.sqlite_ext import (
SqliteExtDatabase,
FTSModel)
from pysyncobj import SyncObj, replicated
from asgiref.sync import sync_to_async
from . import PATH_DATABASE
loop = asyncio.get_event_loop()
pragmas = [
('journal_mode', 'wal'),
('cache_size', -1024 * 32)]
db = SqliteExtDatabase(PATH_DATABASE, pragmas=pragmas)
| 31.597403 | 110 | 0.585491 | import asyncio
import base64
import datetime
import hashlib
import json
import os
import smopy
import traceback
from json import JSONDecodeError
from uuid import uuid4
import bcrypt
import peewee
from PIL import Image, ImageDraw
from peewee import (
Model,
CharField,
TextField,
IntegerField,
ForeignKeyField,
DateTimeField,
BooleanField,
)
from playhouse.sqlite_ext import (
SqliteExtDatabase,
FTSModel)
from pysyncobj import SyncObj, replicated
from asgiref.sync import sync_to_async
from . import PATH_DATABASE
loop = asyncio.get_event_loop()
pragmas = [
('journal_mode', 'wal'),
('cache_size', -1024 * 32)]
db = SqliteExtDatabase(PATH_DATABASE, pragmas=pragmas)
class BaseModel(Model):
timestamp = DateTimeField(default=datetime.datetime.utcnow, index=True)
class Meta:
database = db
class User(BaseModel):
username = CharField(index=True, unique=True)
password = CharField(index=False)
note = TextField(index=True, default='')
class Preference(BaseModel):
key = CharField(index=True, unique=True)
value = CharField(index=False)
class Frame(BaseModel):
# Zentropi fields:
uuid = CharField(index=True, unique=True)
kind = IntegerField(index=True)
name = TextField(index=True)
data = TextField(default='')
meta = TextField(default='')
# piebus fields:
publish = BooleanField(default=False, index=True)
render = CharField(index=True, default='default')
source = CharField(default='')
tags = TextField(index=True, default='')
@property
def jdata(self):
try:
if self.data:
return dict(json.loads(self.data) or {})
except JSONDecodeError:
print('cannot decode data', self.data)
pass
return {}
@property
def jmeta(self):
try:
if self.meta:
return dict(json.loads(self.meta) or {})
except JSONDecodeError:
print('cannot decode meta', self.meta)
pass
return {}
async def fetch_map_async(self):
tsurl = """http://c.tile.stamen.com/watercolor/${z}/${x}/${y}.jpg"""
data = self.jdata
if 'location' in data:
lat = data['location'].get('latitude')
lon = data['location'].get('longitude')
map = await sync_to_async(smopy.Map)((lat - 1, lat + 1., lon - 1, lon + 1), z=4)
await sync_to_async(map.save_png)(f'map_{self.uuid}.png', tileserver=tsurl)
else:
raise KeyError(f'No location data found in frame: {self}')
def fetch_map(self):
data = self.jdata
if 'location' in data:
lat = data['location'].get('latitude')
lon = data['location'].get('longitude')
map = smopy.Map((lat - 0.006, lon - 0.038, lat + 0.006, lon + 0.038), z=12,
tileserver="http://tile.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png",
tilesize=512, maxtiles=16)
x, y = map.to_pixels(lat, lon)
x = int(x)
y = int(y)
fname = f'map_{self.uuid}.png'
map.save_png(f'content/{fname}')
img = Image.open(open(f'content/{fname}', 'rb'))
draw = ImageDraw.Draw(img)
draw.ellipse([(x-10, y-10), (x+10, y+10)], fill=128, width=10)
del draw
ffname = f'content/loc_{fname}'
img.save(ffname, "PNG")
data.update({'media_url': ffname})
self.data = json.dumps(data)
self.save()
return f'loc_{fname}'
else:
raise KeyError(f'No location data found in frame: {self}')
class FTSEntry(FTSModel):
content = TextField()
class Meta:
database = db
@classmethod
def index_frame(cls, frame: Frame):
try:
existing = cls.get_or_none(docid=frame.id)
if frame.name == 'telegram-message':
content = '\n'.join([frame.jdata.get('text', ''), frame.jdata.get('caption', ''), frame.tags])
else:
content = '\n'.join([frame.name, str(frame.data), frame.tags])
if not content.strip():
return
if existing:
existing.update(content=content)
else:
cls.create(docid=frame.id, content=content)
except peewee.OperationalError:
cls.create_table()
cls.index_frame(frame)
class Kind(object):
command = 0
event = 1
message = 2
request = 3
response = 4
state = 5
stream = 6
def ensure_db():
if os.path.exists(PATH_DATABASE):
return False
User.create_table()
Preference.create_table()
Frame.create_table()
return True
class PiebusAPI(SyncObj):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@replicated
def _register(self, username, password):
password = base64.b64encode(hashlib.sha256(password.encode('utf-8')).digest())
hashed = bcrypt.hashpw(password, bcrypt.gensalt())
try:
User.create(username=username, password=hashed)
return True
except:
traceback.print_exc()
return False
async def register(self, username, password):
return await sync_to_async(self._register)(username, password, sync=True)
@replicated
def _login(self, username, password):
user = User.get_or_none(username=username)
if not user:
return False
password = base64.b64encode(hashlib.sha256(password.encode('utf-8')).digest())
hashed = user.password.encode('utf-8')
if bcrypt.checkpw(password, hashed):
return True
return False
async def login(self, username, password):
return await sync_to_async(self._login)(username, password, sync=True)
@replicated
def _logout(self, username):
return True
async def logout(self, username):
return await sync_to_async(self._logout)(username, sync=True)
def _get_preference(self, key, default=None):
pref = Preference.get_or_none(key=key)
if pref is None:
return default
return pref.value if pref.value is not None else default
@replicated
def _set_preference(self, key, value):
pref = Preference.get_or_none(key=key)
if pref is not None:
q = Preference.update(value=value).where(Preference.key == key)
q.execute()
else:
Preference.create(key=key, value=value)
return value
async def preference(self, key, value=None):
if value is not None:
await sync_to_async(self._set_preference)(key, value, sync=True)
return self._get_preference(key)
async def enable_register(self, value=None):
if value is not None:
await sync_to_async(self._set_preference)('enable_register', 1 if value else 0, sync=True)
return bool(int(self._get_preference('enable_register', 0)))
@replicated
def _create_frame(self, kind, name, data, meta, publish, render, source, tags):
frame = Frame.create(
uuid=uuid4().hex,
kind=int(kind) if kind != '' else 1,
name=name,
data=json.dumps(data) or '',
meta=json.dumps(meta) or '',
publish=publish or False,
render=render or 'default',
source=source or '',
tags=tags or '',
)
return frame
async def create_frame(self, kind, name, data=None, meta=None, publish=False, render='', tags=''):
source = meta.get('source', '') if meta else ''
frame = await sync_to_async(self._create_frame)(kind=kind, name=name,
data=data, meta=meta,
publish=publish, render=render,
source=source, tags=tags,
sync=True)
await sync_to_async(FTSEntry.index_frame)(frame)
return frame
async def list_frames(self, limit=10):
frames = Frame.select().order_by(Frame.timestamp.desc()).limit(limit)
return frames
@replicated
def _index_frames(self):
frames = Frame.select().order_by(Frame.timestamp.desc())
for frame in frames:
FTSEntry.index_frame(frame)
async def index_frames(self):
await sync_to_async(self._index_frames)(sync=True)
return True
async def search_frames(self, query):
frames = Frame \
.select() \
.join(FTSEntry, on=(Frame.id == FTSEntry.docid)) \
.where(FTSEntry.match(query)).order_by(Frame.timestamp.desc())
return frames
async def search_public_frames(self, query):
frames = Frame \
.select() \
.join(FTSEntry, on=(Frame.id == FTSEntry.docid)) \
.where((FTSEntry.match(query)) & (Frame.publish == True)).order_by(Frame.timestamp.desc())
return frames
async def list_public_frames(self, limit=10):
frames = Frame.select() \
.where(Frame.publish == True) \
.order_by(Frame.timestamp.desc()) \
.limit(limit)
return frames
async def frame_from_uuid(self, uuid):
frame = Frame.get(Frame.uuid == uuid)
return frame
async def publish(self, uuid, status):
frame = Frame.get(Frame.uuid == uuid)
frame.publish = status
await sync_to_async(frame.save)()
return frame
| 7,051 | 1,774 | 184 |
6f3c693c479a27919155bf7fe3a2aa2e751a7747 | 2,731 | py | Python | tests/test_attention_gqn.py | rnagumo/gqnlib | 96bd8499f90c00b29817f71e6380bc622ce78479 | [
"MIT"
] | 1 | 2020-08-13T01:54:52.000Z | 2020-08-13T01:54:52.000Z | tests/test_attention_gqn.py | rnagumo/gqnlib | 96bd8499f90c00b29817f71e6380bc622ce78479 | [
"MIT"
] | null | null | null | tests/test_attention_gqn.py | rnagumo/gqnlib | 96bd8499f90c00b29817f71e6380bc622ce78479 | [
"MIT"
] | 1 | 2021-01-03T16:02:55.000Z | 2021-01-03T16:02:55.000Z |
import unittest
import torch
import gqnlib
if __name__ == "__main__":
unittest.main()
| 33.716049 | 72 | 0.58074 |
import unittest
import torch
import gqnlib
class TestAttentionGQN(unittest.TestCase):
def setUp(self):
self.model = gqnlib.AttentionGQN()
def test_inference(self):
x_c = torch.randn(4, 15, 3, 64, 64)
v_c = torch.randn(4, 15, 7)
x_q = torch.randn(4, 2, 3, 64, 64)
v_q = torch.randn(4, 2, 7)
(canvas, key, value, r_stack), loss_dict = self.model.inference(
x_c, v_c, x_q, v_q)
self.assertTupleEqual(canvas.size(), (4, 2, 3, 64, 64))
self.assertTupleEqual(key.size(), (4, 15 * 49, 64, 8, 8))
self.assertTupleEqual(value.size(), (4, 15 * 49, 76, 8, 8))
self.assertTupleEqual(r_stack.size(), (4, 2, 76, 8, 8))
self.assertTupleEqual(loss_dict["loss"].size(), (4, 2))
self.assertTupleEqual(loss_dict["nll_loss"].size(), (4, 2))
self.assertTupleEqual(loss_dict["kl_loss"].size(), (4, 2))
self.assertGreater(loss_dict["loss"].mean(), 0)
self.assertGreater(loss_dict["nll_loss"].mean(), 0)
self.assertGreater(loss_dict["kl_loss"].mean(), 0)
def test_forward(self):
x_c = torch.randn(4, 15, 3, 64, 64)
v_c = torch.randn(4, 15, 7)
x_q = torch.randn(4, 2, 3, 64, 64)
v_q = torch.randn(4, 2, 7)
loss_dict = self.model(x_c, v_c, x_q, v_q)
self.assertTupleEqual(loss_dict["loss"].size(), (4, 2))
self.assertTupleEqual(loss_dict["nll_loss"].size(), (4, 2))
self.assertTupleEqual(loss_dict["kl_loss"].size(), (4, 2))
self.assertGreater(loss_dict["loss"].mean(), 0)
self.assertGreater(loss_dict["nll_loss"].mean(), 0)
self.assertGreater(loss_dict["kl_loss"].mean(), 0)
def test_loss_func(self):
x_c = torch.randn(4, 15, 3, 64, 64)
v_c = torch.randn(4, 15, 7)
x_q = torch.randn(4, 1, 3, 64, 64)
v_q = torch.randn(4, 1, 7)
loss_dict = self.model.loss_func(x_c, v_c, x_q, v_q)
self.assertGreater(loss_dict["loss"], 0)
self.assertGreater(loss_dict["nll_loss"], 0)
self.assertGreater(loss_dict["kl_loss"], 0)
def test_reconstruct(self):
x_c = torch.randn(4, 15, 3, 64, 64)
v_c = torch.randn(4, 15, 7)
x_q = torch.randn(4, 2, 3, 64, 64)
v_q = torch.randn(4, 2, 7)
canvas = self.model.reconstruct(x_c, v_c, x_q, v_q)
self.assertTupleEqual(canvas.size(), (4, 2, 3, 64, 64))
def test_sample(self):
x_c = torch.randn(4, 15, 3, 64, 64)
v_c = torch.randn(4, 15, 7)
v_q = torch.randn(4, 5, 7)
canvas = self.model.sample(x_c, v_c, v_q)
self.assertTupleEqual(canvas.size(), (4, 5, 3, 64, 64))
if __name__ == "__main__":
unittest.main()
| 2,429 | 21 | 185 |
bbece0a7cdc6417247ddc363307eef71f86b1fdd | 562 | py | Python | subdomains.py | kiuru/pyTLScanner | ba4b3b35675ad7854366bf6765678229c74fa77b | [
"MIT"
] | null | null | null | subdomains.py | kiuru/pyTLScanner | ba4b3b35675ad7854366bf6765678229c74fa77b | [
"MIT"
] | null | null | null | subdomains.py | kiuru/pyTLScanner | ba4b3b35675ad7854366bf6765678229c74fa77b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from get_listed_companies import get_listed_companies_from_cache
import tldextract
if __name__ == '__main__':
run_scan('helsinki', True)
| 26.761905 | 64 | 0.709964 | #!/usr/bin/env python
from get_listed_companies import get_listed_companies_from_cache
import tldextract
def run_scan(market, debug):
companies = get_listed_companies_from_cache(market, debug)
domain_list = []
for company in companies:
ext_domain = tldextract.extract(company.website)
domain = ext_domain.domain + "." + ext_domain.suffix
domain_list.append(domain)
domain_list = list(set(domain_list))
for domain in domain_list:
print(domain)
if __name__ == '__main__':
run_scan('helsinki', True)
| 370 | 0 | 23 |
8512a29537636ae25ad14c6bc16ec03b4ce86019 | 10,146 | py | Python | feapder/network/user_pool/gold_user_pool.py | ibryang/feapder | 14b1c1e9bd0953ea8af102d6d220fed4b79d0a5c | [
"MIT"
] | 876 | 2021-02-09T11:08:04.000Z | 2022-03-31T21:14:11.000Z | feapder/network/user_pool/gold_user_pool.py | ibryang/feapder | 14b1c1e9bd0953ea8af102d6d220fed4b79d0a5c | [
"MIT"
] | 94 | 2021-02-20T07:59:28.000Z | 2022-03-28T09:54:53.000Z | feapder/network/user_pool/gold_user_pool.py | ibryang/feapder | 14b1c1e9bd0953ea8af102d6d220fed4b79d0a5c | [
"MIT"
] | 172 | 2021-02-22T08:24:44.000Z | 2022-03-29T08:15:27.000Z | # -*- coding: utf-8 -*-
"""
Created on 2018/12/27 11:32 AM
---------
@summary: 账号昂贵、限制查询次数及使用时间的用户UserPool
---------
@author: Boris
@email: boris_liu@foxmail.com
"""
import os
import random
import time
from enum import Enum, unique
from typing import Optional, List
from feapder import setting
from feapder.db.redisdb import RedisDB
from feapder.network.user_pool.base_user_pool import GoldUser, UserPoolInterface
from feapder.utils import metrics
from feapder.utils.log import log
from feapder.utils.redis_lock import RedisLock
from feapder.utils.tools import send_msg
@unique
class GoldUserPool(UserPoolInterface):
"""
账号昂贵、限制查询次数的用户的UserPool
"""
def __init__(
self,
redis_key,
*,
users: List[GoldUser],
keep_alive=False,
):
"""
@param redis_key: user存放在redis中的key前缀
@param users: 账号信息
@param keep_alive: 是否保持常驻,以便user不足时立即补充
"""
self._tab_user_pool = setting.TAB_USER_POOL.format(
redis_key=redis_key, user_type="gold"
)
self.users = users
self._keep_alive = keep_alive
self._redisdb = RedisDB()
self._users_id = []
if not users:
raise ValueError("not users")
# 给user的类属性复制
self.users[0].__class__.redisdb = self._redisdb
self.users[0].__class__.redis_key = self._tab_user_pool
self.__init_metrics()
self.__sync_users_base_info()
self.__sycn_users_info()
def login(self, user: GoldUser) -> GoldUser:
"""
登录 生产cookie
"""
raise NotImplementedError
def get_user(
self,
block=True,
username=None,
used_for_spider_name=None,
not_limit_use_interval=False,
) -> Optional[GoldUser]:
"""
@params username: 获取指定的用户
@params used_for_spider_name: 独享式使用,独享爬虫的名字。其他爬虫不可抢占
@params block: 无用户时是否等待
@params not_limit_frequence: 不限制使用频率
@return: GoldUser
"""
while True:
try:
user_id = username or self._get_user_id()
user_str = None
if user_id:
user_str = self._redisdb.hget(self._tab_user_pool, user_id)
if (not user_id or not user_str) and block:
self._keep_alive = False
self.run(username)
continue
# 取到用户
user = GoldUser(**eval(user_str))
# 独占式使用,若为其他爬虫,检查等待使用时间是否超过独占时间,若超过则可以使用
if (
user.get_used_for_spider_name()
and user.get_used_for_spider_name() != used_for_spider_name
):
wait_time = time.time() - user.get_last_use_time()
if wait_time < user.exclusive_time:
log.info(
"用户{} 被 {} 爬虫独占,需等待 {} 秒后才可使用".format(
user.username,
user.get_used_for_spider_name(),
user.exclusive_time - wait_time,
)
)
time.sleep(1)
continue
if not user.is_overwork() and user.is_at_work_time():
if not user.cookies:
log.debug(f"用户 {user.username} 未登录,尝试登录")
self._keep_alive = False
self.run(username)
continue
if not_limit_use_interval or user.is_time_to_use():
user.set_used_for_spider_name(used_for_spider_name)
log.debug("使用用户 {}".format(user.username))
self.record_user_status(user.user_id, GoldUserStatus.USED)
return user
else:
log.debug("{} 用户使用间隔过短 查看下一个用户".format(user.username))
time.sleep(1)
continue
else:
if not user.is_at_work_time():
log.info("用户 {} 不在工作时间 sleep 60s".format(user.username))
if block:
time.sleep(60)
continue
else:
return None
except Exception as e:
log.exception(e)
time.sleep(1)
| 34.746575 | 101 | 0.479401 | # -*- coding: utf-8 -*-
"""
Created on 2018/12/27 11:32 AM
---------
@summary: 账号昂贵、限制查询次数及使用时间的用户UserPool
---------
@author: Boris
@email: boris_liu@foxmail.com
"""
import os
import random
import time
from enum import Enum, unique
from typing import Optional, List
from feapder import setting
from feapder.db.redisdb import RedisDB
from feapder.network.user_pool.base_user_pool import GoldUser, UserPoolInterface
from feapder.utils import metrics
from feapder.utils.log import log
from feapder.utils.redis_lock import RedisLock
from feapder.utils.tools import send_msg
@unique
class GoldUserStatus(Enum):
# 使用状态
USED = "used"
SUCCESS = "success"
OVERDUE = "overdue" # cookie 过期
SLEEP = "sleep"
EXCEPTION = "exception"
# 登陆状态
LOGIN_SUCCESS = "login_success"
LOGIN_FALIED = "login_failed"
class GoldUserPool(UserPoolInterface):
"""
账号昂贵、限制查询次数的用户的UserPool
"""
def __init__(
self,
redis_key,
*,
users: List[GoldUser],
keep_alive=False,
):
"""
@param redis_key: user存放在redis中的key前缀
@param users: 账号信息
@param keep_alive: 是否保持常驻,以便user不足时立即补充
"""
self._tab_user_pool = setting.TAB_USER_POOL.format(
redis_key=redis_key, user_type="gold"
)
self.users = users
self._keep_alive = keep_alive
self._redisdb = RedisDB()
self._users_id = []
if not users:
raise ValueError("not users")
# 给user的类属性复制
self.users[0].__class__.redisdb = self._redisdb
self.users[0].__class__.redis_key = self._tab_user_pool
self.__init_metrics()
self.__sync_users_base_info()
self.__sycn_users_info()
def __init_metrics(self):
metrics.init(**setting.METRICS_OTHER_ARGS)
def __sync_users_base_info(self):
# 本地同步基本信息到redis, 注 只能在初始化函数内同步
for user in self.users:
cache_user = self.get_user_by_id(user.user_id)
if cache_user:
for key, value in user.to_dict().items():
if not key.startswith("_"):
setattr(cache_user, key, value)
cache_user.sycn_to_redis()
def __sycn_users_info(self):
# redis同步登录信息到本地
for index, user in enumerate(self.users):
cache_user = self.get_user_by_id(user.user_id)
if cache_user:
self.users[index] = cache_user
def _load_users_id(self):
self._users_id = self._redisdb.hkeys(self._tab_user_pool)
if self._users_id:
random.shuffle(self._users_id)
def _get_user_id(self):
if not self._users_id:
self._load_users_id()
if self._users_id:
return self._users_id.pop()
def login(self, user: GoldUser) -> GoldUser:
"""
登录 生产cookie
"""
raise NotImplementedError
def get_user_by_id(self, user_id: str) -> GoldUser:
user_str = self._redisdb.hget(self._tab_user_pool, user_id)
if user_str:
user = GoldUser(**eval(user_str))
return user
def get_user(
self,
block=True,
username=None,
used_for_spider_name=None,
not_limit_use_interval=False,
) -> Optional[GoldUser]:
"""
@params username: 获取指定的用户
@params used_for_spider_name: 独享式使用,独享爬虫的名字。其他爬虫不可抢占
@params block: 无用户时是否等待
@params not_limit_frequence: 不限制使用频率
@return: GoldUser
"""
while True:
try:
user_id = username or self._get_user_id()
user_str = None
if user_id:
user_str = self._redisdb.hget(self._tab_user_pool, user_id)
if (not user_id or not user_str) and block:
self._keep_alive = False
self.run(username)
continue
# 取到用户
user = GoldUser(**eval(user_str))
# 独占式使用,若为其他爬虫,检查等待使用时间是否超过独占时间,若超过则可以使用
if (
user.get_used_for_spider_name()
and user.get_used_for_spider_name() != used_for_spider_name
):
wait_time = time.time() - user.get_last_use_time()
if wait_time < user.exclusive_time:
log.info(
"用户{} 被 {} 爬虫独占,需等待 {} 秒后才可使用".format(
user.username,
user.get_used_for_spider_name(),
user.exclusive_time - wait_time,
)
)
time.sleep(1)
continue
if not user.is_overwork() and user.is_at_work_time():
if not user.cookies:
log.debug(f"用户 {user.username} 未登录,尝试登录")
self._keep_alive = False
self.run(username)
continue
if not_limit_use_interval or user.is_time_to_use():
user.set_used_for_spider_name(used_for_spider_name)
log.debug("使用用户 {}".format(user.username))
self.record_user_status(user.user_id, GoldUserStatus.USED)
return user
else:
log.debug("{} 用户使用间隔过短 查看下一个用户".format(user.username))
time.sleep(1)
continue
else:
if not user.is_at_work_time():
log.info("用户 {} 不在工作时间 sleep 60s".format(user.username))
if block:
time.sleep(60)
continue
else:
return None
except Exception as e:
log.exception(e)
time.sleep(1)
def del_user(self, user_id: str):
user = self.get_user_by_id(user_id)
if user:
user.set_cookies(None)
self.record_user_status(user.user_id, GoldUserStatus.OVERDUE)
def add_user(self, user: GoldUser):
user.sycn_to_redis()
def delay_use(self, user_id: str, delay_seconds: int):
user = self.get_user_by_id(user_id)
if user:
user.set_delay_use(delay_seconds)
self.record_user_status(user_id, GoldUserStatus.SLEEP)
def record_success_user(self, user_id: str):
self.record_user_status(user_id, GoldUserStatus.SUCCESS)
def record_exception_user(self, user_id: str):
self.record_user_status(user_id, GoldUserStatus.EXCEPTION)
def run(self, username=None):
while True:
try:
with RedisLock(
key=self._tab_user_pool, lock_timeout=3600, wait_timeout=0
) as _lock:
if _lock.locked:
self.__sycn_users_info()
online_user = 0
for user in self.users:
if username and username != user.username:
continue
try:
if user.cookies:
online_user += 1
continue
# 预检查
if not user.is_time_to_login():
log.info(
"账号{}与上次登录时间间隔过短,暂不登录: 将在{}登录使用".format(
user.username, user.next_login_time()
)
)
continue
user = self.login(user)
if user.cookies:
# 保存cookie
user.set_login_time()
self.add_user(user)
self.record_user_status(
user.user_id, GoldUserStatus.LOGIN_SUCCESS
)
log.debug("登录成功 {}".format(user.username))
online_user += 1
else:
log.info("登录失败 {}".format(user.username))
self.record_user_status(
user.user_id, GoldUserStatus.LOGIN_FALIED
)
except NotImplementedError:
log.error(
f"{self.__class__.__name__} must be implementation login method!"
)
os._exit(0)
except Exception as e:
log.exception(e)
msg = f"{user.username} 账号登陆失败 exception: {str(e)}"
log.info(msg)
self.record_user_status(
user.user_id, GoldUserStatus.LOGIN_FALIED
)
send_msg(
msg=msg,
level="error",
message_prefix=f"{user.username} 账号登陆失败",
)
log.info("当前在线user数为 {}".format(online_user))
if self._keep_alive:
time.sleep(10)
else:
break
except Exception as e:
log.exception(e)
time.sleep(1)
def record_user_status(self, user_id: str, status: GoldUserStatus):
metrics.emit_counter(user_id, 1, classify=f"users_{status.value}")
| 5,197 | 245 | 373 |
91d239c28c854fd8c9982789a6d5ce7d11f7e345 | 1,509 | py | Python | tests/unit/test_core/test_events.py | MarvinTorres/kytos | 2c72f45a76cf61f0e2e62f6703bf794db617e8a9 | [
"MIT"
] | 43 | 2017-03-27T14:30:20.000Z | 2022-02-04T12:42:10.000Z | tests/unit/test_core/test_events.py | MarvinTorres/kytos | 2c72f45a76cf61f0e2e62f6703bf794db617e8a9 | [
"MIT"
] | 612 | 2017-03-09T19:22:16.000Z | 2021-05-31T21:48:52.000Z | tests/unit/test_core/test_events.py | MarvinTorres/kytos | 2c72f45a76cf61f0e2e62f6703bf794db617e8a9 | [
"MIT"
] | 54 | 2017-03-03T19:11:26.000Z | 2022-02-16T15:31:49.000Z | """Test kytos.core.events module."""
from unittest import TestCase
from kytos.core.events import KytosEvent
class TestKytosEvent(TestCase):
"""KytosEvent tests."""
def setUp(self):
"""Instantiate a KytosEvent."""
self.event = KytosEvent('kytos/core.any')
def test__str__(self):
"""Test __str__ method."""
self.assertEqual(str(self.event), 'kytos/core.any')
def test__repr__(self):
"""Test __repr__ method."""
self.event.content = {"destination": "dest",
"source": "src",
"message": "msg"}
expected = "KytosEvent('kytos/core.any', {'destination': 'dest', " + \
"'source': 'src', 'message': 'msg'})"
self.assertEqual(repr(self.event), expected)
def test_destination(self):
"""Test destination property and set_destination method."""
self.assertEqual(self.event.destination, None)
self.event.set_destination('dest')
self.assertEqual(self.event.destination, 'dest')
def test_source(self):
"""Test source property and set_source method."""
self.assertEqual(self.event.source, None)
self.event.set_source('src')
self.assertEqual(self.event.source, 'src')
def test_message(self):
"""Test message property."""
self.assertEqual(self.event.message, None)
self.event.content = {"message": "msg"}
self.assertEqual(self.event.message, 'msg')
| 31.4375 | 78 | 0.60106 | """Test kytos.core.events module."""
from unittest import TestCase
from kytos.core.events import KytosEvent
class TestKytosEvent(TestCase):
"""KytosEvent tests."""
def setUp(self):
"""Instantiate a KytosEvent."""
self.event = KytosEvent('kytos/core.any')
def test__str__(self):
"""Test __str__ method."""
self.assertEqual(str(self.event), 'kytos/core.any')
def test__repr__(self):
"""Test __repr__ method."""
self.event.content = {"destination": "dest",
"source": "src",
"message": "msg"}
expected = "KytosEvent('kytos/core.any', {'destination': 'dest', " + \
"'source': 'src', 'message': 'msg'})"
self.assertEqual(repr(self.event), expected)
def test_destination(self):
"""Test destination property and set_destination method."""
self.assertEqual(self.event.destination, None)
self.event.set_destination('dest')
self.assertEqual(self.event.destination, 'dest')
def test_source(self):
"""Test source property and set_source method."""
self.assertEqual(self.event.source, None)
self.event.set_source('src')
self.assertEqual(self.event.source, 'src')
def test_message(self):
"""Test message property."""
self.assertEqual(self.event.message, None)
self.event.content = {"message": "msg"}
self.assertEqual(self.event.message, 'msg')
| 0 | 0 | 0 |
383b5d4a3508e1677c06bc8c513a3d82750bc6c2 | 793 | py | Python | project7-------Pig Latin Translator.py | Omkar-Atugade/Python-Projects | a6e82aced415bff78ff0a2d14a8a4213ca7d09be | [
"MIT"
] | null | null | null | project7-------Pig Latin Translator.py | Omkar-Atugade/Python-Projects | a6e82aced415bff78ff0a2d14a8a4213ca7d09be | [
"MIT"
] | null | null | null | project7-------Pig Latin Translator.py | Omkar-Atugade/Python-Projects | a6e82aced415bff78ff0a2d14a8a4213ca7d09be | [
"MIT"
] | null | null | null | #Get sentence from user
sentence = input('Enter the sentence you want to translate : ').strip().lower()
#Spliting sentence into words
words = sentence.split()
#Converting words to pig latin
latin_words = []
for word in words :
if word[0] in "aeiou" :
latin_word = word + 'yay'
latin_words.append(latin_word)
else:
vowel_pos = 0
for letter in word :
if letter not in "aeiou" :
vowel_pos = vowel_pos +1
else:
break
cons = word[:vowel_pos]
rest = word[vowel_pos:]
latin_word = rest + cons + 'ay'
latin_words.append(latin_word)
#Stick back words back together
output = " ".join(latin_words)
#Printing final output
print(output)
| 23.323529 | 80 | 0.580076 | #Get sentence from user
sentence = input('Enter the sentence you want to translate : ').strip().lower()
#Spliting sentence into words
words = sentence.split()
#Converting words to pig latin
latin_words = []
for word in words :
if word[0] in "aeiou" :
latin_word = word + 'yay'
latin_words.append(latin_word)
else:
vowel_pos = 0
for letter in word :
if letter not in "aeiou" :
vowel_pos = vowel_pos +1
else:
break
cons = word[:vowel_pos]
rest = word[vowel_pos:]
latin_word = rest + cons + 'ay'
latin_words.append(latin_word)
#Stick back words back together
output = " ".join(latin_words)
#Printing final output
print(output)
| 0 | 0 | 0 |
4a9e0c3844fe2098fee7bb70d4bee89a6d62f2d9 | 2,352 | py | Python | cloud/endagaweb/tests/test_users.py | pcarivbts/CommunityCellularManager | aaeca413f7e6326d16c9e4587a83aa93dd5a0666 | [
"BSD-3-Clause"
] | 1 | 2018-04-27T17:55:53.000Z | 2018-04-27T17:55:53.000Z | cloud/endagaweb/tests/test_users.py | pcarivbts/CommunityCellularManager | aaeca413f7e6326d16c9e4587a83aa93dd5a0666 | [
"BSD-3-Clause"
] | 14 | 2017-12-12T08:49:41.000Z | 2018-08-23T20:57:01.000Z | cloud/endagaweb/tests/test_users.py | pcarivbts/CommunityCellularManager | aaeca413f7e6326d16c9e4587a83aa93dd5a0666 | [
"BSD-3-Clause"
] | 1 | 2018-07-04T00:53:38.000Z | 2018-07-04T00:53:38.000Z | """Tests for models.Users.
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
from random import randrange
import uuid
import pytz
from django.test import TestCase
from ccm.common import crdt
from endagaweb import models
class UserTests(TestBase):
"""
We can manage subscriber balances.
"""
def test_sub_get_balance(self):
""" Test the balance property. """
bal = randrange(1, 1000)
sub = self.add_sub(self.gen_imsi(),
balance=bal)
self.assertEqual(sub.balance, bal)
| 28.337349 | 75 | 0.642432 | """Tests for models.Users.
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
from random import randrange
import uuid
import pytz
from django.test import TestCase
from ccm.common import crdt
from endagaweb import models
class TestBase(TestCase):
@classmethod
def setUpTestData(cls):
user = models.User(username="km", email="k@m.com")
user.save()
user_profile = models.UserProfile.objects.get(user=user)
cls.network = user_profile.network
@classmethod
def add_sub(cls, imsi,
ev_kind=None, ev_reason=None, ev_date=None,
balance=0):
sub = models.Subscriber.objects.create(
imsi=imsi, network=cls.network, balance=balance)
if ev_kind:
if ev_date is None:
ev_date = datetime.now(pytz.utc)
ev = models.UsageEvent(
subscriber=sub, network=cls.network, date=ev_date,
kind=ev_kind, reason=ev_reason)
ev.save()
return sub
@staticmethod
def gen_crdt(delta):
# CRDT updates with the same UUID are merged - the max values of
# the P and N counters are taken - so we need to ensure the UUID
# of each update is distinct.
c = crdt.PNCounter(str(uuid.uuid4()))
if delta > 0:
c.increment(delta)
elif delta < 0:
c.decrement(-delta)
return c
@staticmethod
def gen_imsi():
return 'IMSI0%014d' % (randrange(1, 1e10), )
@staticmethod
def get_sub(imsi):
return models.Subscriber.objects.get(imsi=imsi)
class UserTests(TestBase):
"""
We can manage subscriber balances.
"""
def test_sub_get_balance(self):
""" Test the balance property. """
bal = randrange(1, 1000)
sub = self.add_sub(self.gen_imsi(),
balance=bal)
self.assertEqual(sub.balance, bal)
| 1,135 | 226 | 23 |
d78996f2c44c9ac476a065b14b21e830ef580e30 | 304 | py | Python | Python/OS Module/importing.py | themohitpapneja/Code_Dump | ec72144e66d12cba2ce719c37292517588490b42 | [
"Apache-2.0"
] | null | null | null | Python/OS Module/importing.py | themohitpapneja/Code_Dump | ec72144e66d12cba2ce719c37292517588490b42 | [
"Apache-2.0"
] | null | null | null | Python/OS Module/importing.py | themohitpapneja/Code_Dump | ec72144e66d12cba2ce719c37292517588490b42 | [
"Apache-2.0"
] | null | null | null | """
import os
print(os.getcwd())
"""
#OR#
from time import *
from os import *
print(getcwd())
print(name) ## OS name
print(path.abspath('.'))
print(listdir('.'))
mkdir("india") ## making directory
sleep(2)
rename("india","india2") ## renaming directory
sleep(2)
rmdir("india2") ## removing directory
| 17.882353 | 46 | 0.664474 | """
import os
print(os.getcwd())
"""
#OR#
from time import *
from os import *
print(getcwd())
print(name) ## OS name
print(path.abspath('.'))
print(listdir('.'))
mkdir("india") ## making directory
sleep(2)
rename("india","india2") ## renaming directory
sleep(2)
rmdir("india2") ## removing directory
| 0 | 0 | 0 |
d2929f6e3b5f0622d33214a597d49836b5363ae9 | 655 | py | Python | scripts/resume_parse.py | vishaljangid1729/Tej-WhatsApp | d346c3ef51a7f9502b027aaec3429ccfbda1b7ca | [
"MIT"
] | null | null | null | scripts/resume_parse.py | vishaljangid1729/Tej-WhatsApp | d346c3ef51a7f9502b027aaec3429ccfbda1b7ca | [
"MIT"
] | null | null | null | scripts/resume_parse.py | vishaljangid1729/Tej-WhatsApp | d346c3ef51a7f9502b027aaec3429ccfbda1b7ca | [
"MIT"
] | null | null | null |
import time
from pyresparser import ResumeParser
import os
from scripts.whatsapp import WhatsApp
# spacy.load('en_core_web_sm')
| 20.46875 | 74 | 0.696183 |
import time
from pyresparser import ResumeParser
import os
from scripts.whatsapp import WhatsApp
# spacy.load('en_core_web_sm')
def Hello ():
main_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
resume_path = f'{main_dir}/Resume/vishal_jangid.pdf'
data = ResumeParser(resume_path).get_extracted_data()
print(data['mobile_number'])
messsanger = WhatsApp()
# input()
messsanger.find_user("919663600101")
# time.sleep(3)
print("hewrwefrfs")
messsanger.send_message("This is the bot")
print("fsdafasdfasfasfasf")
# input()
# messsanger.send_message("hello")``
# print(data)
| 498 | 0 | 23 |
86b37d705857d108cdba720a1c9b283be9d61e61 | 9,593 | py | Python | jesse/modes/optimize_mode/__init__.py | jcsaaddupuy/jesse | 58c64c57dfb6aa31d2547415b97bba3e5de53548 | [
"MIT"
] | 1 | 2022-01-12T22:43:23.000Z | 2022-01-12T22:43:23.000Z | jesse/modes/optimize_mode/__init__.py | jcsaaddupuy/jesse | 58c64c57dfb6aa31d2547415b97bba3e5de53548 | [
"MIT"
] | 10 | 2021-12-08T10:32:12.000Z | 2022-03-29T10:30:10.000Z | jesse/modes/optimize_mode/__init__.py | b1nhm1nh/jesse-cli | b463bdc491cedd8042f1a9215e039efa4f701325 | [
"MIT"
] | null | null | null | import os
from math import log10
from multiprocessing import cpu_count
from typing import Dict, Any, Tuple, Union
import arrow
import click
from numpy import ndarray
import jesse.helpers as jh
import jesse.services.required_candles as required_candles
from jesse import exceptions
from jesse.config import config
from jesse.modes.backtest_mode import load_candles, simulator
from jesse.routes import router
from jesse.services import metrics as stats
from jesse.services.validators import validate_routes
from jesse.store import store
from .Genetics import Genetics
os.environ['NUMEXPR_MAX_THREADS'] = str(cpu_count())
| 42.259912 | 194 | 0.629834 | import os
from math import log10
from multiprocessing import cpu_count
from typing import Dict, Any, Tuple, Union
import arrow
import click
from numpy import ndarray
import jesse.helpers as jh
import jesse.services.required_candles as required_candles
from jesse import exceptions
from jesse.config import config
from jesse.modes.backtest_mode import load_candles, simulator
from jesse.routes import router
from jesse.services import metrics as stats
from jesse.services.validators import validate_routes
from jesse.store import store
from .Genetics import Genetics
os.environ['NUMEXPR_MAX_THREADS'] = str(cpu_count())
class Optimizer(Genetics):
def __init__(self, training_candles: ndarray, testing_candles: ndarray, optimal_total: int, cpu_cores: int, csv: bool,
json: bool, start_date: str, finish_date: str) -> None:
if len(router.routes) != 1:
raise NotImplementedError('optimize_mode mode only supports one route at the moment')
self.strategy_name = router.routes[0].strategy_name
self.optimal_total = optimal_total
self.exchange = router.routes[0].exchange
self.symbol = router.routes[0].symbol
self.timeframe = router.routes[0].timeframe
StrategyClass = jh.get_strategy_class(self.strategy_name)
self.strategy_hp = StrategyClass.hyperparameters(None)
solution_len = len(self.strategy_hp)
if solution_len == 0:
raise exceptions.InvalidStrategy('Targeted strategy does not implement a valid hyperparameters() method.')
super().__init__(
iterations=2000 * solution_len,
population_size=solution_len * 100,
solution_len=solution_len,
options={
'strategy_name': self.strategy_name,
'exchange': self.exchange,
'symbol': self.symbol,
'timeframe': self.timeframe,
'strategy_hp': self.strategy_hp,
'csv': csv,
'json': json,
'start_date': start_date,
'finish_date': finish_date,
}
)
if cpu_cores > cpu_count():
raise ValueError(f'Entered cpu cores number is more than available on this machine which is {cpu_count()}')
elif cpu_cores == 0:
self.cpu_cores = cpu_count()
else:
self.cpu_cores = cpu_cores
self.training_candles = training_candles
self.testing_candles = testing_candles
key = jh.key(self.exchange, self.symbol)
training_candles_start_date = jh.timestamp_to_time(self.training_candles[key]['candles'][0][0]).split('T')[0]
training_candles_finish_date = jh.timestamp_to_time(self.training_candles[key]['candles'][-1][0]).split('T')[0]
testing_candles_start_date = jh.timestamp_to_time(self.testing_candles[key]['candles'][0][0]).split('T')[0]
testing_candles_finish_date = jh.timestamp_to_time(self.testing_candles[key]['candles'][-1][0]).split('T')[0]
self.training_initial_candles = []
self.testing_initial_candles = []
for c in config['app']['considering_candles']:
self.training_initial_candles.append(
required_candles.load_required_candles(c[0], c[1], training_candles_start_date,
training_candles_finish_date))
self.testing_initial_candles.append(
required_candles.load_required_candles(c[0], c[1], testing_candles_start_date,
testing_candles_finish_date))
def fitness(self, dna: str) -> tuple:
hp = jh.dna_to_hp(self.strategy_hp, dna)
# init candle store
store.candles.init_storage(5000)
# inject required TRAINING candles to the candle store
for num, c in enumerate(config['app']['considering_candles']):
required_candles.inject_required_candles_to_store(
self.training_initial_candles[num],
c[0],
c[1]
)
# run backtest simulation
simulator(self.training_candles, hp)
training_data = {'win_rate': None, 'total': None,
'net_profit_percentage': None}
testing_data = {'win_rate': None, 'total': None,
'net_profit_percentage': None}
# TODO: some of these have to be dynamic based on how many days it's trading for like for example "total"
# I'm guessing we should accept "optimal" total from command line
if store.completed_trades.count > 5:
training_data = stats.trades(store.completed_trades.trades, store.app.daily_balance)
total_effect_rate = log10(training_data['total']) / log10(self.optimal_total)
total_effect_rate = min(total_effect_rate, 1)
ratio_config = jh.get_config('env.optimization.ratio', 'sharpe')
if ratio_config == 'sharpe':
ratio = training_data['sharpe_ratio']
ratio_normalized = jh.normalize(ratio, -.5, 5)
elif ratio_config == 'calmar':
ratio = training_data['calmar_ratio']
ratio_normalized = jh.normalize(ratio, -.5, 30)
elif ratio_config == 'sortino':
ratio = training_data['sortino_ratio']
ratio_normalized = jh.normalize(ratio, -.5, 15)
elif ratio_config == 'omega':
ratio = training_data['omega_ratio']
ratio_normalized = jh.normalize(ratio, -.5, 5)
elif ratio_config == 'serenity':
ratio = training_data['serenity_index']
ratio_normalized = jh.normalize(ratio, -.5, 15)
elif ratio_config == 'smart sharpe':
ratio = training_data['smart_sharpe']
ratio_normalized = jh.normalize(ratio, -.5, 5)
elif ratio_config == 'smart sortino':
ratio = training_data['smart_sortino']
ratio_normalized = jh.normalize(ratio, -.5, 15)
else:
raise ValueError(
f'The entered ratio configuration `{ratio_config}` for the optimization is unknown. Choose between sharpe, calmar, sortino, serenity, smart shapre, smart sortino and omega.')
if ratio < 0:
score = 0.0001
# reset store
store.reset()
return score, training_data, testing_data
score = total_effect_rate * ratio_normalized
# perform backtest with testing data. this is using data
# model hasn't trained for. if it works well, there is
# high change it will do good with future data too.
store.reset()
store.candles.init_storage(5000)
# inject required TESTING candles to the candle store
for num, c in enumerate(config['app']['considering_candles']):
required_candles.inject_required_candles_to_store(
self.testing_initial_candles[num],
c[0],
c[1]
)
# run backtest simulation
simulator(self.testing_candles, hp)
# log for debugging/monitoring
if store.completed_trades.count > 0:
testing_data = stats.trades(store.completed_trades.trades, store.app.daily_balance)
else:
score = 0.0001
# reset store
store.reset()
return score, training_data, testing_data
def optimize_mode(start_date: str, finish_date: str, optimal_total: int, cpu_cores: int, csv: bool, json: bool) -> None:
# clear the screen
click.clear()
print('loading candles...')
# validate routes
validate_routes(router)
# load historical candles and divide them into training
# and testing candles (15% for test, 85% for training)
training_candles, testing_candles = get_training_and_testing_candles(start_date, finish_date)
# clear the screen
click.clear()
optimizer = Optimizer(training_candles, testing_candles, optimal_total, cpu_cores, csv, json, start_date, finish_date)
optimizer.run()
# TODO: store hyper parameters into each strategies folder per each Exchange-symbol-timeframe
def get_training_and_testing_candles(start_date_str: str, finish_date_str: str) -> Tuple[
Dict[str, Dict[str, Union[Union[str, ndarray], Any]]], Dict[str, Dict[str, Union[Union[str, ndarray], Any]]]]:
start_date = jh.arrow_to_timestamp(arrow.get(start_date_str, 'YYYY-MM-DD'))
finish_date = jh.arrow_to_timestamp(arrow.get(finish_date_str, 'YYYY-MM-DD')) - 60000
# Load candles (first try cache, then database)
candles = load_candles(start_date_str, finish_date_str)
# divide into training(85%) and testing(15%) sets
training_candles = {}
testing_candles = {}
days_diff = jh.date_diff_in_days(jh.timestamp_to_arrow(start_date), jh.timestamp_to_arrow(finish_date))
divider_index = int(days_diff * 0.85) * 1440
for key in candles:
training_candles[key] = {
'exchange': candles[key]['exchange'],
'symbol': candles[key]['symbol'],
'candles': candles[key]['candles'][0:divider_index],
}
testing_candles[key] = {
'exchange': candles[key]['exchange'],
'symbol': candles[key]['symbol'],
'candles': candles[key]['candles'][divider_index:],
}
return training_candles, testing_candles
| 8,841 | 5 | 122 |
903ba598aae64934e655f86467a96927dfbb73c5 | 1,676 | py | Python | test/test_header.py | bugov/http-basic-auth | a6cfad797beb05d022febe0b7dca8b972491a487 | [
"BSD-3-Clause"
] | 3 | 2018-02-20T20:04:43.000Z | 2019-03-12T03:24:30.000Z | test/test_header.py | bugov/http-basic-auth | a6cfad797beb05d022febe0b7dca8b972491a487 | [
"BSD-3-Clause"
] | null | null | null | test/test_header.py | bugov/http-basic-auth | a6cfad797beb05d022febe0b7dca8b972491a487 | [
"BSD-3-Clause"
] | null | null | null | import pytest
from http_basic_auth import parse_header, generate_header, BasicAuthException
@pytest.mark.parametrize("token,expect", [
('Basic dGVzdDpzZWNyZXQ=', ('test', 'secret')),
('BASIC dGVzdDpzZWNyZXQx', ('test', 'secret1')),
('BaSiC dGVzdDpzZWM6cmV0MQ==', ('test', 'sec:ret1')),
('Basic \t bmFtZTp9e3NkYXNkJyI=', ('name', '}{sdasd\'\"')),
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
@pytest.mark.parametrize("header", [
'',
'BasicdGVzdDpzZWNyZXQ=',
'BASI dGVzdDpzZWNyZXQx',
'dGVzdDpzZWM6cmV0MQ==',
None,
1,
])
@pytest.mark.parametrize("token,login_password", [
('Basic dGVzdDpzZWNyZXQ=', ('test', 'secret')),
('Basic dGVzdDpzZWNyZXQx', ('test', 'secret1')),
('Basic dGVzdDpzZWM6cmV0MQ==', ('test', 'sec:ret1')),
('Basic bmFtZTp9e3NkYXNkJyI=', ('name', '}{sdasd\'\"')),
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
@pytest.mark.parametrize("token,expect", [
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
@pytest.mark.parametrize("token,login_password", [
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
| 31.622642 | 77 | 0.671241 | import pytest
from http_basic_auth import parse_header, generate_header, BasicAuthException
@pytest.mark.parametrize("token,expect", [
('Basic dGVzdDpzZWNyZXQ=', ('test', 'secret')),
('BASIC dGVzdDpzZWNyZXQx', ('test', 'secret1')),
('BaSiC dGVzdDpzZWM6cmV0MQ==', ('test', 'sec:ret1')),
('Basic \t bmFtZTp9e3NkYXNkJyI=', ('name', '}{sdasd\'\"')),
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
def test_header_parse(token, expect):
assert parse_header(token, coding='utf-8') == expect
@pytest.mark.parametrize("header", [
'',
'BasicdGVzdDpzZWNyZXQ=',
'BASI dGVzdDpzZWNyZXQx',
'dGVzdDpzZWM6cmV0MQ==',
None,
1,
])
def test_wrong_header_parse(header):
with pytest.raises(BasicAuthException):
parse_header(header, coding='utf-8')
@pytest.mark.parametrize("token,login_password", [
('Basic dGVzdDpzZWNyZXQ=', ('test', 'secret')),
('Basic dGVzdDpzZWNyZXQx', ('test', 'secret1')),
('Basic dGVzdDpzZWM6cmV0MQ==', ('test', 'sec:ret1')),
('Basic bmFtZTp9e3NkYXNkJyI=', ('name', '}{sdasd\'\"')),
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
def test_header_gen(token, login_password):
assert token == generate_header(*login_password, coding='utf-8')
@pytest.mark.parametrize("token,expect", [
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
def test_header_parse_utf8_default(token, expect):
assert parse_header(token) == expect
@pytest.mark.parametrize("token,login_password", [
('Basic 8J+YgTrQv9Cw0YA6w7bQu9GM', ('😁', 'пар:öль')),
])
def test_header_gen_utf8_default(token, login_password):
assert token == generate_header(*login_password)
| 426 | 0 | 110 |
2e0fb78fa4ac896060bf8099388628c620cb68d7 | 22 | py | Python | dipper/utils/__init__.py | monarch-ci/dipper | abcd4843ec051a47cef3b592fadc1cd7d1616b45 | [
"BSD-3-Clause"
] | 52 | 2015-01-28T21:22:19.000Z | 2022-03-15T09:21:07.000Z | dipper/utils/__init__.py | monarch-ci/dipper | abcd4843ec051a47cef3b592fadc1cd7d1616b45 | [
"BSD-3-Clause"
] | 742 | 2015-01-06T00:21:30.000Z | 2021-08-02T20:57:17.000Z | dipper/utils/__init__.py | monarch-ci/dipper | abcd4843ec051a47cef3b592fadc1cd7d1616b45 | [
"BSD-3-Clause"
] | 24 | 2015-07-28T17:06:30.000Z | 2021-08-18T21:28:53.000Z | __author__ = 'nicole'
| 11 | 21 | 0.727273 | __author__ = 'nicole'
| 0 | 0 | 0 |
aabd88d6d9266e9e9a00b2139c397b70f4290f58 | 2,320 | py | Python | dante.py | dantefurrybot/dantev4 | 7282e9466209e794b1e91234a0eb7aa83fd6e413 | [
"CC-BY-4.0"
] | 1 | 2020-08-17T15:59:40.000Z | 2020-08-17T15:59:40.000Z | dante.py | dantefurrybot/dantev4 | 7282e9466209e794b1e91234a0eb7aa83fd6e413 | [
"CC-BY-4.0"
] | null | null | null | dante.py | dantefurrybot/dantev4 | 7282e9466209e794b1e91234a0eb7aa83fd6e413 | [
"CC-BY-4.0"
] | 2 | 2020-08-08T19:02:27.000Z | 2020-08-11T00:40:35.000Z | import discord
import importlib
from discord.ext import commands
channels = [725776681159098408, 728691486115233804, 738967638964830341, 725566843766571112, 732288159060328529, 739199815627440232, 730887667491012720, 729753696199508088]
client = MyClient()
client.run('YourTokenHere')
| 42.181818 | 188 | 0.607759 | import discord
import importlib
from discord.ext import commands
channels = [725776681159098408, 728691486115233804, 738967638964830341, 725566843766571112, 732288159060328529, 739199815627440232, 730887667491012720, 729753696199508088]
class MyClient(discord.Client):
async def on_ready(self):
print('Logged on as {0}!'.format(self.user))
async def on_message(self, message):
prefix = "!"
staff = False
message.content = message.content.lower()
if 'dark' in message.content and 'cute' in message.content:
await message.delete()
if message.guild.id == 725201209358549012:
for role in message.author.roles:
if role.id == 739727880799518741:
staff = True
if 'discord.gg' in message.content:
if staff == True:
return
if message.channel.id in channels:
return
await message.channel.send("<@" + str(message.author.id) + "> Please do not promote your server here! \n\nIf you're looking to partner, please check <#729753696199508088>")
await message.delete()
mentions = message.mentions
if len(mentions) > 3:
if not message.author.bot:
if message.content.startswith("!"):
await message.delete()
await message.channel.send("<@" + str(message.author.id) + "> Too many mentions!")
return
fun = __import__("fun")
await fun.msg(str(message.content), message, prefix, self)
info = __import__("info")
await info.msg(str(message.content), message, prefix, self)
mod = __import__("mod")
await mod.msg(str(message.content), message, prefix, self)
christmas = __import__("christmas")
await christmas.msg(str(message.content), message, prefix, self)
food = __import__("foodanddrink")
await food.msg(str(message.content), message, prefix, self)
nsfw = __import__("nsfw")
await nsfw.msg(str(message.content), message, prefix, self)
if not message.author.bot:
if message.content.startswith("!"):
await message.delete()
client = MyClient()
client.run('YourTokenHere')
| 1,946 | 10 | 76 |
31259c906ff2965d051dbbd5b31dab0049ea6a49 | 14,800 | py | Python | L3NEW_TG_B10_graph.py | Drywock/L3N_TDG_Projet | a584307efb47f6e8ea0d23d177112aa0ca540104 | [
"MIT"
] | null | null | null | L3NEW_TG_B10_graph.py | Drywock/L3N_TDG_Projet | a584307efb47f6e8ea0d23d177112aa0ca540104 | [
"MIT"
] | null | null | null | L3NEW_TG_B10_graph.py | Drywock/L3N_TDG_Projet | a584307efb47f6e8ea0d23d177112aa0ca540104 | [
"MIT"
] | null | null | null | """
file : graph.py
author(s) : Thomas LINTANF, Laurent CALYDON
Version : 6.0
Definition de la classe Graph qui permet de stocker un graph orienté et de lui
appliquer différents algorithmes.
"""
import csv
import logging as log
class Graph:
"""
classe Graph : représente un graphe orienté
Version: 5.0
"""
def __init__(self):
"""
Constructeur de la classe Graph
Version: 4.0
"""
self.nb_sommets = 0
self.nb_arcs = 0
self.m_adjacence = []
self.m_valeurs = []
self.contient_circuit = 'u'
self.rang = []
self.est_ordonnancement = 'u'
self.dates_au_plus_tot = []
self.dates_au_plus_tard = []
self.marges_totales = []
self.marges_libres = []
def read_file(self, address):
"""
Charge un graphe depuis un fichier txt au format csv
version : 1.3
"""
l_rows = []
with open(address) as csvfile:
reader = csv.reader(csvfile, delimiter=';', quoting=csv.QUOTE_NONNUMERIC)
# stockage temporaire des données dans un tableau
for row in reader:
l_rows.append([int(i) for i in row])
log.info('Chargement du fichier : %s', address)
# extraction du nombre de sommets et d'arcs
self.nb_sommets = int(l_rows[0][0])
log.info('%d sommets', self.nb_sommets)
self.nb_arcs = int(l_rows[1][0])
log.info('%d arcs', self.nb_sommets)
# Initialisation des matrices d'adjacense et des valeurs
for _ in range(0, self.nb_sommets):
ligne_adjacence = []
ligne_valeur = []
for _ in range(0, self.nb_sommets):
ligne_adjacence.append(False)
ligne_valeur.append('*')
self.m_adjacence.append(ligne_adjacence)
self.m_valeurs.append(ligne_valeur)
log.info('Initialisation des matrices')
# écriture des arcs dans les matrice
log.info('Chargement des arcs')
for arc in l_rows[2:]:
sommet_depart = int(arc[0])
sommet_arrivee = int(arc[1])
poid = arc[2]
self.m_adjacence[sommet_depart][sommet_arrivee] = True
self.m_valeurs[sommet_depart][sommet_arrivee] = poid
log.info('%d --> %d = %d', sommet_depart, sommet_arrivee, poid)
# to do: Améliorer l'affichage des matices
def __str__(self):
"""
Fonction de représentation au format string
Version: 1.1
"""
repr_str = "Graphe :\n - {0} sommets`\n - {1} arcs\n".format(self.nb_sommets, self.nb_arcs)
repr_str += "Matrice d'Adjacence :\n\t"
for sommet in range(0, self.nb_sommets):
repr_str += "{0}\t".format(sommet)
repr_str += '\n'
indice = 0
for ligne in self.m_adjacence:
repr_str += "{0}\t".format(indice)
for case in ligne:
repr_str += "{0}\t".format('V' if case else 'F')
repr_str += '\n'
indice += 1
repr_str += 'Matrice des Valeurs :\n'
repr_str += "\t"
for sommet in range(0, self.nb_sommets):
repr_str += "{0}\t".format(sommet)
repr_str += '\n'
indice = 0
for ligne in self.m_valeurs:
repr_str += "{0}\t".format(indice)
for case in ligne:
repr_str += "{0}\t".format(case)
repr_str += '\n'
indice += 1
return repr_str
def detection_circuit(self):
"""
Cherche si le graphe contient un circuit
Retourne True si le graphe contient au moins un circuit False sinon
Écrit également le resultat sur la propriété contient_circuit
Version: 1.2
"""
log.info("Detection de circuit\nMéthode de détection des points d'entrés")
liste_sommets = list(range(0, self.nb_sommets))
continuer = True
while continuer:
continuer = False
sommet_a_supr = []
# Recherche des sommets sans prédécesseur
for sommet_arrivee in liste_sommets:
has_pred = False
for sommet_depart in liste_sommets:
has_pred = has_pred or self.m_adjacence[sommet_depart][sommet_arrivee]
if not has_pred:
sommet_a_supr.append(sommet_arrivee)
# Suppression des sommets sans prédécesseur
for sommet in sommet_a_supr:
liste_sommets.remove(sommet)
# Sortie de boucle si on a pas retiré de sommets
continuer = len(sommet_a_supr) > 0 and len(liste_sommets) > 0
log.info("Points d'entrés :")
if continuer:
log.info(sommet_a_supr)
log.info("Sommets restant :\n%s", liste_sommets)
else:
log.info('Aucun')
# On regarde si il reste des sommets pour savoir si il y a un circuit
self.contient_circuit = len(liste_sommets) != 0
if self.contient_circuit:
log.info('Le graphe contient au moins un circuit')
else:
log.info('Le graphe ne contient aucun circuit')
return self.contient_circuit
def calc_rang(self):
"""
Calcul le rang de chaque sommet du graphe
version: 1.2
"""
if self.contient_circuit == 'u':
log.warning("Calcul des rangs impossible : detectionCircuit() doit être lancée avant")
elif self.contient_circuit:
log.warning("Impossible de calculer les rangs : présence d'un circuit")
else:
# Intialisation de la liste des rangs
self.rang = [0 for _ in range(0, self.nb_sommets)]
liste_sommets = list(range(0, self.nb_sommets))
continuer = True
rang = 0
while continuer:
# Recherche des sommets sans prédécesseur
sommet_a_supr = []
for sommet_arrivee in liste_sommets:
has_pred = False
for sommet_depart in liste_sommets:
has_pred = has_pred or self.m_adjacence[sommet_depart][sommet_arrivee]
if not has_pred:
sommet_a_supr.append(sommet_arrivee)
# Suppression des sommets sans prédécesseur
for sommet in sommet_a_supr:
liste_sommets.remove(sommet)
self.rang[sommet] = rang
log.info("Rang courant = %d\nPoints d'entrés :\n%s", rang, sommet_a_supr)
rang += 1
continuer = len(liste_sommets) > 0
log.info("Graphe vide\nRangs calculés")
log.info("Sommets :\t%s", ''.join(["%d\t" % i for i in range(0, self.nb_sommets)]))
log.info("Rang :\t\t%s", ''.join(["%d\t" % i for i in self.rang]))
def est_graph_ordonnancement(self):
"""
Vérifie si c'est un graphe d'ordonnancement
Version: 1.2
"""
log.info("Verification qu'il s'agit d'un graphe d'ordonnancement :")
# Détection d'un seul point d'entrée
res = self.rang.count(0) == 1
log.info("A qu'un seul point d'entree : %s", res)
# Détection d'un seul point de sortie
ans = self.rang.count(max(self.rang)) == 1
log.info("A qu'un seul point de sortie : %s", ans)
res = res and ans
# Vérification de la présence d'un circuit
ans = not self.contient_circuit
log.info("Ne contient pas un circuit: %s", ans)
res = res and ans
# Vérification des poids identiques pour tous les arcs incidents vers l’extérieur à un sommet
ans = True
for ligne in self.m_valeurs:
i = 0
while ligne[i] == '*' and i < self.nb_sommets-1:
i += 1
# Vérification pas d’arcs à valeur négative.
is_pos = True
if ligne[i] != '*':
is_pos = ligne[i] >= 0
ans = ans and is_pos
for case in ligne:
ans = ans and (case == '*' or case == ligne[i])
log.info("Arcs incidents extérieurs positifs et égaux pour chaque sommet: %s", ans)
res = res and ans
# Arcs incidents vers l’extérieur au point d’entrée de valeur nulle
i = self.rang.index(0)
ans = True
for case in self.m_valeurs[i]:
ans = ans and (case == '*' or case == 0)
log.info("Arcs incidents extérieurs du point d'entrée à valeur 0 : %s", ans)
res = res and ans
if res:
log.info("Le graphe est un graphe d'ordonnancement")
else:
log.info("Le graphe n'est pas un graphe d'ordonnancement")
self.est_ordonnancement = res
return res
def calc_calend_plus_tot(self):
"""
Calcul le calendrier au plus tôt si le graphe est un graphe d'ordonnancement
version: 1.0
"""
if self.est_ordonnancement == 'u':
log.error("Le graphe n'as pas été testé pour l'ordonnancement")
elif self.est_ordonnancement:
log.info("Calcul du calendrier au plus tôt")
# Création de la liste des sommets ordonnés par rang croissant
sommets = []
for rang in range(0, max(self.rang)+1):
for sommet in range(0, self.nb_sommets):
if self.rang[sommet] == rang:
sommets.append(sommet)
# Initialisation du calendrier
for i in range(self.nb_sommets):
self.dates_au_plus_tot.append('*')
# Date de départ
i = self.rang.index(0)
self.dates_au_plus_tot[i] = 0
sommets.remove(i)
log.info("Sommet 0 date au plus tot : 0")
for sommet in sommets:
# Construction de la liste des prédécesseurs
liste_pred = []
for pred in range(0, self.nb_sommets):
if self.m_adjacence[pred][sommet]:
liste_pred.append(pred)
# Calcul des dates par prédécesseurs
dates = []
for pred in liste_pred:
dates.append(self.dates_au_plus_tot[pred] + self.m_valeurs[pred][sommet])
# Calcul de la dates au plus tôt
self.dates_au_plus_tot[sommet] = max(dates)
log.info("Sommet %d date au plus tot : %d", sommet, self.dates_au_plus_tot[sommet])
log.info("\nSommets:\t\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Dates au plus tot:\t%s", ''.join('%s\t' % i for i in self.dates_au_plus_tot))
else:
log.error("Le graphe n'est pas un graphe d'ordonnancement")
def calc_calend_plus_tard(self):
"""
Calcul du calendrier au plus tard
version: 1.0
"""
if len(self.dates_au_plus_tot) > 0:
log.info("Calcul du calendrier au plus tard :")
# Création de la liste des sommets ordonnés par rang décroissant
sommets = []
for rang in range(0, max(self.rang)+1):
for sommet in range(0, self.nb_sommets):
if self.rang[sommet] == rang:
sommets.insert(0, sommet)
# Initialisation du calendrier
self.dates_au_plus_tard = ['*' for _ in range(0, self.nb_sommets)]
# Date de fin
fin = self.rang.index(max(self.rang))
self.dates_au_plus_tard[fin] = self.dates_au_plus_tot[fin]
sommets.remove(fin)
log.info("Sommet %d date au plus tard : %d", fin, self.dates_au_plus_tard[fin])
for sommet in sommets:
# Construction de la liste des successeurs
liste_succ = []
for succ in range(0, self.nb_sommets):
if self.m_adjacence[sommet][succ]:
liste_succ.append(succ)
# Calcul des dates par successeur
dates = []
for succ in liste_succ:
dates.append(self.dates_au_plus_tard[succ] - self.m_valeurs[sommet][succ])
# Calcule de la dates au plus tard
self.dates_au_plus_tard[sommet] = min(dates)
log.info("Sommet %d date au plus tard : %d",
sommet, self.dates_au_plus_tard[sommet])
log.info("\nSommets:\t\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Dates au plus tard:\t%s",
''.join('%d\t' % i for i in self.dates_au_plus_tard))
else:
log.error("Le calendrier au plus tôt n'est pas calculé")
def calc_marges(self):
"""
Calcul les marges totales et libres
version: 1.1
"""
# Calcul des marges totales
log.info("Calcule des marges Totales :")
for i in range(0, self.nb_sommets):
self.marges_totales.append(self.dates_au_plus_tard[i] - self.dates_au_plus_tot[i])
log.info("Sommet %d --> marge totale : %d", i, self.marges_totales[i])
log.info("\nSommets:\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Marges Totales:\t%s", ''.join('%d\t' % i for i in self.marges_totales))
# Calcul des marges libres
log.info("Calcul des marges Libres :")
for sommet in range(0, self.nb_sommets - 1):
# Construction de la liste des successeurs
liste_succ = []
for succ in range(0, self.nb_sommets):
if self.m_adjacence[sommet][succ]:
liste_succ.append(succ)
# Calcul de la marge libre par successeur
marges_libres = []
for succ in liste_succ:
marges_libres.append(
self.dates_au_plus_tot[succ]
- self.dates_au_plus_tot[sommet]
- self.m_valeurs[sommet][succ])
self.marges_libres.append(min(marges_libres))
log.info("Sommet %d --> marge libre %d", sommet, self.marges_libres[sommet])
self.marges_libres.append(0)
log.info("Sommet %d --> marge libre %d",
self.nb_sommets-1, self.marges_libres[self.nb_sommets-1])
log.info("\nSommets:\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Marges Libres:\t%s", ''.join('%d\t' % i for i in self.marges_libres))
| 35.835351 | 101 | 0.548243 | """
file : graph.py
author(s) : Thomas LINTANF, Laurent CALYDON
Version : 6.0
Definition de la classe Graph qui permet de stocker un graph orienté et de lui
appliquer différents algorithmes.
"""
import csv
import logging as log
class Graph:
"""
classe Graph : représente un graphe orienté
Version: 5.0
"""
def __init__(self):
"""
Constructeur de la classe Graph
Version: 4.0
"""
self.nb_sommets = 0
self.nb_arcs = 0
self.m_adjacence = []
self.m_valeurs = []
self.contient_circuit = 'u'
self.rang = []
self.est_ordonnancement = 'u'
self.dates_au_plus_tot = []
self.dates_au_plus_tard = []
self.marges_totales = []
self.marges_libres = []
def read_file(self, address):
"""
Charge un graphe depuis un fichier txt au format csv
version : 1.3
"""
l_rows = []
with open(address) as csvfile:
reader = csv.reader(csvfile, delimiter=';', quoting=csv.QUOTE_NONNUMERIC)
# stockage temporaire des données dans un tableau
for row in reader:
l_rows.append([int(i) for i in row])
log.info('Chargement du fichier : %s', address)
# extraction du nombre de sommets et d'arcs
self.nb_sommets = int(l_rows[0][0])
log.info('%d sommets', self.nb_sommets)
self.nb_arcs = int(l_rows[1][0])
log.info('%d arcs', self.nb_sommets)
# Initialisation des matrices d'adjacense et des valeurs
for _ in range(0, self.nb_sommets):
ligne_adjacence = []
ligne_valeur = []
for _ in range(0, self.nb_sommets):
ligne_adjacence.append(False)
ligne_valeur.append('*')
self.m_adjacence.append(ligne_adjacence)
self.m_valeurs.append(ligne_valeur)
log.info('Initialisation des matrices')
# écriture des arcs dans les matrice
log.info('Chargement des arcs')
for arc in l_rows[2:]:
sommet_depart = int(arc[0])
sommet_arrivee = int(arc[1])
poid = arc[2]
self.m_adjacence[sommet_depart][sommet_arrivee] = True
self.m_valeurs[sommet_depart][sommet_arrivee] = poid
log.info('%d --> %d = %d', sommet_depart, sommet_arrivee, poid)
# to do: Améliorer l'affichage des matices
def __str__(self):
"""
Fonction de représentation au format string
Version: 1.1
"""
repr_str = "Graphe :\n - {0} sommets`\n - {1} arcs\n".format(self.nb_sommets, self.nb_arcs)
repr_str += "Matrice d'Adjacence :\n\t"
for sommet in range(0, self.nb_sommets):
repr_str += "{0}\t".format(sommet)
repr_str += '\n'
indice = 0
for ligne in self.m_adjacence:
repr_str += "{0}\t".format(indice)
for case in ligne:
repr_str += "{0}\t".format('V' if case else 'F')
repr_str += '\n'
indice += 1
repr_str += 'Matrice des Valeurs :\n'
repr_str += "\t"
for sommet in range(0, self.nb_sommets):
repr_str += "{0}\t".format(sommet)
repr_str += '\n'
indice = 0
for ligne in self.m_valeurs:
repr_str += "{0}\t".format(indice)
for case in ligne:
repr_str += "{0}\t".format(case)
repr_str += '\n'
indice += 1
return repr_str
def detection_circuit(self):
"""
Cherche si le graphe contient un circuit
Retourne True si le graphe contient au moins un circuit False sinon
Écrit également le resultat sur la propriété contient_circuit
Version: 1.2
"""
log.info("Detection de circuit\nMéthode de détection des points d'entrés")
liste_sommets = list(range(0, self.nb_sommets))
continuer = True
while continuer:
continuer = False
sommet_a_supr = []
# Recherche des sommets sans prédécesseur
for sommet_arrivee in liste_sommets:
has_pred = False
for sommet_depart in liste_sommets:
has_pred = has_pred or self.m_adjacence[sommet_depart][sommet_arrivee]
if not has_pred:
sommet_a_supr.append(sommet_arrivee)
# Suppression des sommets sans prédécesseur
for sommet in sommet_a_supr:
liste_sommets.remove(sommet)
# Sortie de boucle si on a pas retiré de sommets
continuer = len(sommet_a_supr) > 0 and len(liste_sommets) > 0
log.info("Points d'entrés :")
if continuer:
log.info(sommet_a_supr)
log.info("Sommets restant :\n%s", liste_sommets)
else:
log.info('Aucun')
# On regarde si il reste des sommets pour savoir si il y a un circuit
self.contient_circuit = len(liste_sommets) != 0
if self.contient_circuit:
log.info('Le graphe contient au moins un circuit')
else:
log.info('Le graphe ne contient aucun circuit')
return self.contient_circuit
def calc_rang(self):
"""
Calcul le rang de chaque sommet du graphe
version: 1.2
"""
if self.contient_circuit == 'u':
log.warning("Calcul des rangs impossible : detectionCircuit() doit être lancée avant")
elif self.contient_circuit:
log.warning("Impossible de calculer les rangs : présence d'un circuit")
else:
# Intialisation de la liste des rangs
self.rang = [0 for _ in range(0, self.nb_sommets)]
liste_sommets = list(range(0, self.nb_sommets))
continuer = True
rang = 0
while continuer:
# Recherche des sommets sans prédécesseur
sommet_a_supr = []
for sommet_arrivee in liste_sommets:
has_pred = False
for sommet_depart in liste_sommets:
has_pred = has_pred or self.m_adjacence[sommet_depart][sommet_arrivee]
if not has_pred:
sommet_a_supr.append(sommet_arrivee)
# Suppression des sommets sans prédécesseur
for sommet in sommet_a_supr:
liste_sommets.remove(sommet)
self.rang[sommet] = rang
log.info("Rang courant = %d\nPoints d'entrés :\n%s", rang, sommet_a_supr)
rang += 1
continuer = len(liste_sommets) > 0
log.info("Graphe vide\nRangs calculés")
log.info("Sommets :\t%s", ''.join(["%d\t" % i for i in range(0, self.nb_sommets)]))
log.info("Rang :\t\t%s", ''.join(["%d\t" % i for i in self.rang]))
def est_graph_ordonnancement(self):
"""
Vérifie si c'est un graphe d'ordonnancement
Version: 1.2
"""
log.info("Verification qu'il s'agit d'un graphe d'ordonnancement :")
# Détection d'un seul point d'entrée
res = self.rang.count(0) == 1
log.info("A qu'un seul point d'entree : %s", res)
# Détection d'un seul point de sortie
ans = self.rang.count(max(self.rang)) == 1
log.info("A qu'un seul point de sortie : %s", ans)
res = res and ans
# Vérification de la présence d'un circuit
ans = not self.contient_circuit
log.info("Ne contient pas un circuit: %s", ans)
res = res and ans
# Vérification des poids identiques pour tous les arcs incidents vers l’extérieur à un sommet
ans = True
for ligne in self.m_valeurs:
i = 0
while ligne[i] == '*' and i < self.nb_sommets-1:
i += 1
# Vérification pas d’arcs à valeur négative.
is_pos = True
if ligne[i] != '*':
is_pos = ligne[i] >= 0
ans = ans and is_pos
for case in ligne:
ans = ans and (case == '*' or case == ligne[i])
log.info("Arcs incidents extérieurs positifs et égaux pour chaque sommet: %s", ans)
res = res and ans
# Arcs incidents vers l’extérieur au point d’entrée de valeur nulle
i = self.rang.index(0)
ans = True
for case in self.m_valeurs[i]:
ans = ans and (case == '*' or case == 0)
log.info("Arcs incidents extérieurs du point d'entrée à valeur 0 : %s", ans)
res = res and ans
if res:
log.info("Le graphe est un graphe d'ordonnancement")
else:
log.info("Le graphe n'est pas un graphe d'ordonnancement")
self.est_ordonnancement = res
return res
def calc_calend_plus_tot(self):
"""
Calcul le calendrier au plus tôt si le graphe est un graphe d'ordonnancement
version: 1.0
"""
if self.est_ordonnancement == 'u':
log.error("Le graphe n'as pas été testé pour l'ordonnancement")
elif self.est_ordonnancement:
log.info("Calcul du calendrier au plus tôt")
# Création de la liste des sommets ordonnés par rang croissant
sommets = []
for rang in range(0, max(self.rang)+1):
for sommet in range(0, self.nb_sommets):
if self.rang[sommet] == rang:
sommets.append(sommet)
# Initialisation du calendrier
for i in range(self.nb_sommets):
self.dates_au_plus_tot.append('*')
# Date de départ
i = self.rang.index(0)
self.dates_au_plus_tot[i] = 0
sommets.remove(i)
log.info("Sommet 0 date au plus tot : 0")
for sommet in sommets:
# Construction de la liste des prédécesseurs
liste_pred = []
for pred in range(0, self.nb_sommets):
if self.m_adjacence[pred][sommet]:
liste_pred.append(pred)
# Calcul des dates par prédécesseurs
dates = []
for pred in liste_pred:
dates.append(self.dates_au_plus_tot[pred] + self.m_valeurs[pred][sommet])
# Calcul de la dates au plus tôt
self.dates_au_plus_tot[sommet] = max(dates)
log.info("Sommet %d date au plus tot : %d", sommet, self.dates_au_plus_tot[sommet])
log.info("\nSommets:\t\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Dates au plus tot:\t%s", ''.join('%s\t' % i for i in self.dates_au_plus_tot))
else:
log.error("Le graphe n'est pas un graphe d'ordonnancement")
def calc_calend_plus_tard(self):
"""
Calcul du calendrier au plus tard
version: 1.0
"""
if len(self.dates_au_plus_tot) > 0:
log.info("Calcul du calendrier au plus tard :")
# Création de la liste des sommets ordonnés par rang décroissant
sommets = []
for rang in range(0, max(self.rang)+1):
for sommet in range(0, self.nb_sommets):
if self.rang[sommet] == rang:
sommets.insert(0, sommet)
# Initialisation du calendrier
self.dates_au_plus_tard = ['*' for _ in range(0, self.nb_sommets)]
# Date de fin
fin = self.rang.index(max(self.rang))
self.dates_au_plus_tard[fin] = self.dates_au_plus_tot[fin]
sommets.remove(fin)
log.info("Sommet %d date au plus tard : %d", fin, self.dates_au_plus_tard[fin])
for sommet in sommets:
# Construction de la liste des successeurs
liste_succ = []
for succ in range(0, self.nb_sommets):
if self.m_adjacence[sommet][succ]:
liste_succ.append(succ)
# Calcul des dates par successeur
dates = []
for succ in liste_succ:
dates.append(self.dates_au_plus_tard[succ] - self.m_valeurs[sommet][succ])
# Calcule de la dates au plus tard
self.dates_au_plus_tard[sommet] = min(dates)
log.info("Sommet %d date au plus tard : %d",
sommet, self.dates_au_plus_tard[sommet])
log.info("\nSommets:\t\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Dates au plus tard:\t%s",
''.join('%d\t' % i for i in self.dates_au_plus_tard))
else:
log.error("Le calendrier au plus tôt n'est pas calculé")
def calc_marges(self):
"""
Calcul les marges totales et libres
version: 1.1
"""
# Calcul des marges totales
log.info("Calcule des marges Totales :")
for i in range(0, self.nb_sommets):
self.marges_totales.append(self.dates_au_plus_tard[i] - self.dates_au_plus_tot[i])
log.info("Sommet %d --> marge totale : %d", i, self.marges_totales[i])
log.info("\nSommets:\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Marges Totales:\t%s", ''.join('%d\t' % i for i in self.marges_totales))
# Calcul des marges libres
log.info("Calcul des marges Libres :")
for sommet in range(0, self.nb_sommets - 1):
# Construction de la liste des successeurs
liste_succ = []
for succ in range(0, self.nb_sommets):
if self.m_adjacence[sommet][succ]:
liste_succ.append(succ)
# Calcul de la marge libre par successeur
marges_libres = []
for succ in liste_succ:
marges_libres.append(
self.dates_au_plus_tot[succ]
- self.dates_au_plus_tot[sommet]
- self.m_valeurs[sommet][succ])
self.marges_libres.append(min(marges_libres))
log.info("Sommet %d --> marge libre %d", sommet, self.marges_libres[sommet])
self.marges_libres.append(0)
log.info("Sommet %d --> marge libre %d",
self.nb_sommets-1, self.marges_libres[self.nb_sommets-1])
log.info("\nSommets:\t\t%s", ''.join('%d\t' % i for i in range(0, self.nb_sommets)))
log.info("Marges Libres:\t%s", ''.join('%d\t' % i for i in self.marges_libres))
| 0 | 0 | 0 |
84234b9119a4722806e3ea59c33ea0d1f267519b | 565 | py | Python | _basics/nano blink.py | albertoSoto/raspberry-tic-projects | 692762dade2397ba4bedb77b4733a1d5d9829450 | [
"MIT"
] | null | null | null | _basics/nano blink.py | albertoSoto/raspberry-tic-projects | 692762dade2397ba4bedb77b4733a1d5d9829450 | [
"MIT"
] | null | null | null | _basics/nano blink.py | albertoSoto/raspberry-tic-projects | 692762dade2397ba4bedb77b4733a1d5d9829450 | [
"MIT"
] | null | null | null | #EJEMPLO DE BLINKING CON RASPBERRY PI
#Escrito por Gl4r3
import RPi.GPIO as GPIO #importamos la libreria y cambiamos su nombre por "GPIO"
import time #necesario para los delays
#establecemos el sistema de numeracion que queramos, en mi caso BCM
GPIO.setmode(GPIO.BCM)
#configuramos el pin GPIO17 como una salida
GPIO.setup(17, GPIO.OUT)
#encendemos y apagamos el led 5 veces
for i in range(0,200):
GPIO.output(17, GPIO.HIGH)
time.sleep(0.05)
GPIO.output(17, GPIO.LOW)
time.sleep(0.05)
GPIO.cleanup() #devuelve los pines a su estado inicial
| 26.904762 | 80 | 0.743363 | #EJEMPLO DE BLINKING CON RASPBERRY PI
#Escrito por Gl4r3
import RPi.GPIO as GPIO #importamos la libreria y cambiamos su nombre por "GPIO"
import time #necesario para los delays
#establecemos el sistema de numeracion que queramos, en mi caso BCM
GPIO.setmode(GPIO.BCM)
#configuramos el pin GPIO17 como una salida
GPIO.setup(17, GPIO.OUT)
#encendemos y apagamos el led 5 veces
for i in range(0,200):
GPIO.output(17, GPIO.HIGH)
time.sleep(0.05)
GPIO.output(17, GPIO.LOW)
time.sleep(0.05)
GPIO.cleanup() #devuelve los pines a su estado inicial
| 0 | 0 | 0 |
0a5a9577bfb54220f64cd0996cbec96b469bb62e | 11,726 | py | Python | hubspot/cms/blogs/blog_posts/models/styles.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | null | null | null | hubspot/cms/blogs/blog_posts/models/styles.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | null | null | null | hubspot/cms/blogs/blog_posts/models/styles.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Blog Post endpoints
\"Use these endpoints for interacting with Blog Posts, Blog Authors, and Blog Tags\" # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.cms.blogs.blog_posts.configuration import Configuration
class Styles(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"vertical_alignment": "str",
"background_color": "RGBAColor",
"background_image": "BackgroundImage",
"background_gradient": "Gradient",
"max_width_section_centering": "int",
"force_full_width_section": "bool",
"flexbox_positioning": "str",
}
attribute_map = {
"vertical_alignment": "verticalAlignment",
"background_color": "backgroundColor",
"background_image": "backgroundImage",
"background_gradient": "backgroundGradient",
"max_width_section_centering": "maxWidthSectionCentering",
"force_full_width_section": "forceFullWidthSection",
"flexbox_positioning": "flexboxPositioning",
}
def __init__(
self,
vertical_alignment=None,
background_color=None,
background_image=None,
background_gradient=None,
max_width_section_centering=None,
force_full_width_section=None,
flexbox_positioning=None,
local_vars_configuration=None,
): # noqa: E501
"""Styles - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._vertical_alignment = None
self._background_color = None
self._background_image = None
self._background_gradient = None
self._max_width_section_centering = None
self._force_full_width_section = None
self._flexbox_positioning = None
self.discriminator = None
self.vertical_alignment = vertical_alignment
self.background_color = background_color
self.background_image = background_image
self.background_gradient = background_gradient
self.max_width_section_centering = max_width_section_centering
self.force_full_width_section = force_full_width_section
self.flexbox_positioning = flexbox_positioning
@property
def vertical_alignment(self):
"""Gets the vertical_alignment of this Styles. # noqa: E501
:return: The vertical_alignment of this Styles. # noqa: E501
:rtype: str
"""
return self._vertical_alignment
@vertical_alignment.setter
def vertical_alignment(self, vertical_alignment):
"""Sets the vertical_alignment of this Styles.
:param vertical_alignment: The vertical_alignment of this Styles. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation
and vertical_alignment is None
): # noqa: E501
raise ValueError(
"Invalid value for `vertical_alignment`, must not be `None`"
) # noqa: E501
allowed_values = ["TOP", "MIDDLE", "BOTTOM"] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and vertical_alignment not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `vertical_alignment` ({0}), must be one of {1}".format( # noqa: E501
vertical_alignment, allowed_values
)
)
self._vertical_alignment = vertical_alignment
@property
def background_color(self):
"""Gets the background_color of this Styles. # noqa: E501
:return: The background_color of this Styles. # noqa: E501
:rtype: RGBAColor
"""
return self._background_color
@background_color.setter
def background_color(self, background_color):
"""Sets the background_color of this Styles.
:param background_color: The background_color of this Styles. # noqa: E501
:type: RGBAColor
"""
if (
self.local_vars_configuration.client_side_validation
and background_color is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_color`, must not be `None`"
) # noqa: E501
self._background_color = background_color
@property
def background_image(self):
"""Gets the background_image of this Styles. # noqa: E501
:return: The background_image of this Styles. # noqa: E501
:rtype: BackgroundImage
"""
return self._background_image
@background_image.setter
def background_image(self, background_image):
"""Sets the background_image of this Styles.
:param background_image: The background_image of this Styles. # noqa: E501
:type: BackgroundImage
"""
if (
self.local_vars_configuration.client_side_validation
and background_image is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_image`, must not be `None`"
) # noqa: E501
self._background_image = background_image
@property
def background_gradient(self):
"""Gets the background_gradient of this Styles. # noqa: E501
:return: The background_gradient of this Styles. # noqa: E501
:rtype: Gradient
"""
return self._background_gradient
@background_gradient.setter
def background_gradient(self, background_gradient):
"""Sets the background_gradient of this Styles.
:param background_gradient: The background_gradient of this Styles. # noqa: E501
:type: Gradient
"""
if (
self.local_vars_configuration.client_side_validation
and background_gradient is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_gradient`, must not be `None`"
) # noqa: E501
self._background_gradient = background_gradient
@property
def max_width_section_centering(self):
"""Gets the max_width_section_centering of this Styles. # noqa: E501
:return: The max_width_section_centering of this Styles. # noqa: E501
:rtype: int
"""
return self._max_width_section_centering
@max_width_section_centering.setter
def max_width_section_centering(self, max_width_section_centering):
"""Sets the max_width_section_centering of this Styles.
:param max_width_section_centering: The max_width_section_centering of this Styles. # noqa: E501
:type: int
"""
if (
self.local_vars_configuration.client_side_validation
and max_width_section_centering is None
): # noqa: E501
raise ValueError(
"Invalid value for `max_width_section_centering`, must not be `None`"
) # noqa: E501
self._max_width_section_centering = max_width_section_centering
@property
def force_full_width_section(self):
"""Gets the force_full_width_section of this Styles. # noqa: E501
:return: The force_full_width_section of this Styles. # noqa: E501
:rtype: bool
"""
return self._force_full_width_section
@force_full_width_section.setter
def force_full_width_section(self, force_full_width_section):
"""Sets the force_full_width_section of this Styles.
:param force_full_width_section: The force_full_width_section of this Styles. # noqa: E501
:type: bool
"""
if (
self.local_vars_configuration.client_side_validation
and force_full_width_section is None
): # noqa: E501
raise ValueError(
"Invalid value for `force_full_width_section`, must not be `None`"
) # noqa: E501
self._force_full_width_section = force_full_width_section
@property
def flexbox_positioning(self):
"""Gets the flexbox_positioning of this Styles. # noqa: E501
:return: The flexbox_positioning of this Styles. # noqa: E501
:rtype: str
"""
return self._flexbox_positioning
@flexbox_positioning.setter
def flexbox_positioning(self, flexbox_positioning):
"""Sets the flexbox_positioning of this Styles.
:param flexbox_positioning: The flexbox_positioning of this Styles. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation
and flexbox_positioning is None
): # noqa: E501
raise ValueError(
"Invalid value for `flexbox_positioning`, must not be `None`"
) # noqa: E501
allowed_values = [
"TOP_LEFT",
"TOP_CENTER",
"TOP_RIGHT",
"MIDDLE_LEFT",
"MIDDLE_CENTER",
"MIDDLE_RIGHT",
"BOTTOM_LEFT",
"BOTTOM_CENTER",
"BOTTOM_RIGHT",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and flexbox_positioning not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `flexbox_positioning` ({0}), must be one of {1}".format( # noqa: E501
flexbox_positioning, allowed_values
)
)
self._flexbox_positioning = flexbox_positioning
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Styles):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Styles):
return True
return self.to_dict() != other.to_dict()
| 32.481994 | 105 | 0.613082 | # coding: utf-8
"""
Blog Post endpoints
\"Use these endpoints for interacting with Blog Posts, Blog Authors, and Blog Tags\" # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.cms.blogs.blog_posts.configuration import Configuration
class Styles(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"vertical_alignment": "str",
"background_color": "RGBAColor",
"background_image": "BackgroundImage",
"background_gradient": "Gradient",
"max_width_section_centering": "int",
"force_full_width_section": "bool",
"flexbox_positioning": "str",
}
attribute_map = {
"vertical_alignment": "verticalAlignment",
"background_color": "backgroundColor",
"background_image": "backgroundImage",
"background_gradient": "backgroundGradient",
"max_width_section_centering": "maxWidthSectionCentering",
"force_full_width_section": "forceFullWidthSection",
"flexbox_positioning": "flexboxPositioning",
}
def __init__(
self,
vertical_alignment=None,
background_color=None,
background_image=None,
background_gradient=None,
max_width_section_centering=None,
force_full_width_section=None,
flexbox_positioning=None,
local_vars_configuration=None,
): # noqa: E501
"""Styles - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._vertical_alignment = None
self._background_color = None
self._background_image = None
self._background_gradient = None
self._max_width_section_centering = None
self._force_full_width_section = None
self._flexbox_positioning = None
self.discriminator = None
self.vertical_alignment = vertical_alignment
self.background_color = background_color
self.background_image = background_image
self.background_gradient = background_gradient
self.max_width_section_centering = max_width_section_centering
self.force_full_width_section = force_full_width_section
self.flexbox_positioning = flexbox_positioning
@property
def vertical_alignment(self):
"""Gets the vertical_alignment of this Styles. # noqa: E501
:return: The vertical_alignment of this Styles. # noqa: E501
:rtype: str
"""
return self._vertical_alignment
@vertical_alignment.setter
def vertical_alignment(self, vertical_alignment):
"""Sets the vertical_alignment of this Styles.
:param vertical_alignment: The vertical_alignment of this Styles. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation
and vertical_alignment is None
): # noqa: E501
raise ValueError(
"Invalid value for `vertical_alignment`, must not be `None`"
) # noqa: E501
allowed_values = ["TOP", "MIDDLE", "BOTTOM"] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and vertical_alignment not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `vertical_alignment` ({0}), must be one of {1}".format( # noqa: E501
vertical_alignment, allowed_values
)
)
self._vertical_alignment = vertical_alignment
@property
def background_color(self):
"""Gets the background_color of this Styles. # noqa: E501
:return: The background_color of this Styles. # noqa: E501
:rtype: RGBAColor
"""
return self._background_color
@background_color.setter
def background_color(self, background_color):
"""Sets the background_color of this Styles.
:param background_color: The background_color of this Styles. # noqa: E501
:type: RGBAColor
"""
if (
self.local_vars_configuration.client_side_validation
and background_color is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_color`, must not be `None`"
) # noqa: E501
self._background_color = background_color
@property
def background_image(self):
"""Gets the background_image of this Styles. # noqa: E501
:return: The background_image of this Styles. # noqa: E501
:rtype: BackgroundImage
"""
return self._background_image
@background_image.setter
def background_image(self, background_image):
"""Sets the background_image of this Styles.
:param background_image: The background_image of this Styles. # noqa: E501
:type: BackgroundImage
"""
if (
self.local_vars_configuration.client_side_validation
and background_image is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_image`, must not be `None`"
) # noqa: E501
self._background_image = background_image
@property
def background_gradient(self):
"""Gets the background_gradient of this Styles. # noqa: E501
:return: The background_gradient of this Styles. # noqa: E501
:rtype: Gradient
"""
return self._background_gradient
@background_gradient.setter
def background_gradient(self, background_gradient):
"""Sets the background_gradient of this Styles.
:param background_gradient: The background_gradient of this Styles. # noqa: E501
:type: Gradient
"""
if (
self.local_vars_configuration.client_side_validation
and background_gradient is None
): # noqa: E501
raise ValueError(
"Invalid value for `background_gradient`, must not be `None`"
) # noqa: E501
self._background_gradient = background_gradient
@property
def max_width_section_centering(self):
"""Gets the max_width_section_centering of this Styles. # noqa: E501
:return: The max_width_section_centering of this Styles. # noqa: E501
:rtype: int
"""
return self._max_width_section_centering
@max_width_section_centering.setter
def max_width_section_centering(self, max_width_section_centering):
"""Sets the max_width_section_centering of this Styles.
:param max_width_section_centering: The max_width_section_centering of this Styles. # noqa: E501
:type: int
"""
if (
self.local_vars_configuration.client_side_validation
and max_width_section_centering is None
): # noqa: E501
raise ValueError(
"Invalid value for `max_width_section_centering`, must not be `None`"
) # noqa: E501
self._max_width_section_centering = max_width_section_centering
@property
def force_full_width_section(self):
"""Gets the force_full_width_section of this Styles. # noqa: E501
:return: The force_full_width_section of this Styles. # noqa: E501
:rtype: bool
"""
return self._force_full_width_section
@force_full_width_section.setter
def force_full_width_section(self, force_full_width_section):
"""Sets the force_full_width_section of this Styles.
:param force_full_width_section: The force_full_width_section of this Styles. # noqa: E501
:type: bool
"""
if (
self.local_vars_configuration.client_side_validation
and force_full_width_section is None
): # noqa: E501
raise ValueError(
"Invalid value for `force_full_width_section`, must not be `None`"
) # noqa: E501
self._force_full_width_section = force_full_width_section
@property
def flexbox_positioning(self):
"""Gets the flexbox_positioning of this Styles. # noqa: E501
:return: The flexbox_positioning of this Styles. # noqa: E501
:rtype: str
"""
return self._flexbox_positioning
@flexbox_positioning.setter
def flexbox_positioning(self, flexbox_positioning):
"""Sets the flexbox_positioning of this Styles.
:param flexbox_positioning: The flexbox_positioning of this Styles. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation
and flexbox_positioning is None
): # noqa: E501
raise ValueError(
"Invalid value for `flexbox_positioning`, must not be `None`"
) # noqa: E501
allowed_values = [
"TOP_LEFT",
"TOP_CENTER",
"TOP_RIGHT",
"MIDDLE_LEFT",
"MIDDLE_CENTER",
"MIDDLE_RIGHT",
"BOTTOM_LEFT",
"BOTTOM_CENTER",
"BOTTOM_RIGHT",
] # noqa: E501
if (
self.local_vars_configuration.client_side_validation
and flexbox_positioning not in allowed_values
): # noqa: E501
raise ValueError(
"Invalid value for `flexbox_positioning` ({0}), must be one of {1}".format( # noqa: E501
flexbox_positioning, allowed_values
)
)
self._flexbox_positioning = flexbox_positioning
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Styles):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Styles):
return True
return self.to_dict() != other.to_dict()
| 0 | 0 | 0 |
9f3deda47c7c529d5518249b5f4b244e3e3d95c1 | 572 | py | Python | image_finder.py | AlexKent3141/PicASCII | 464415d9eb43553720336ae1e5f66a46d9f3eef5 | [
"MIT"
] | null | null | null | image_finder.py | AlexKent3141/PicASCII | 464415d9eb43553720336ae1e5f66a46d9f3eef5 | [
"MIT"
] | null | null | null | image_finder.py | AlexKent3141/PicASCII | 464415d9eb43553720336ae1e5f66a46d9f3eef5 | [
"MIT"
] | null | null | null | import urllib2
# Base class for image finders.
# This method must be implemented in the derived class.
# Download the file at the specified URL and save it.
if __name__ == "__main__":
f = find_image(["mountain", "alps"])
download_image(f, "mount.jpg")
| 31.777778 | 71 | 0.674825 | import urllib2
# Base class for image finders.
class ImageFinder(object):
# This method must be implemented in the derived class.
def find(self, search_terms):
raise NotImplementedError("ImageFinder::find not implemented!")
# Download the file at the specified URL and save it.
def download(self, url, file_name):
download = urllib2.urlopen(url)
with open(file_name, 'wb') as target:
target.write(download.read())
if __name__ == "__main__":
f = find_image(["mountain", "alps"])
download_image(f, "mount.jpg")
| 222 | 5 | 74 |
8954f8f15acd02243dd17db5d5e2bbba5c12f1d4 | 118 | py | Python | calculator/util/__init__.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | calculator/util/__init__.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | calculator/util/__init__.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | from .address import Address
from .calculator_result import CalculatorResult
from .reference_city import ReferenceCity | 39.333333 | 47 | 0.881356 | from .address import Address
from .calculator_result import CalculatorResult
from .reference_city import ReferenceCity | 0 | 0 | 0 |
6adc04cb703a03f9e3146952968df89ed80db336 | 1,665 | py | Python | nanodet/util/misc.py | Sean-hku/nanodet | f62a3a1e311fb446afabb3512a5ebedc81105778 | [
"Apache-2.0"
] | 8 | 2021-05-01T14:11:19.000Z | 2022-01-11T01:08:35.000Z | nanodet/util/misc.py | Sean-hku/nanodet | f62a3a1e311fb446afabb3512a5ebedc81105778 | [
"Apache-2.0"
] | 1 | 2022-02-17T14:20:11.000Z | 2022-02-17T14:20:11.000Z | nanodet/util/misc.py | Sean-hku/nanodet | f62a3a1e311fb446afabb3512a5ebedc81105778 | [
"Apache-2.0"
] | null | null | null | # Modification 2020 RangiLyu
# Copyright 2018-2019 Open-MMLab.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from functools import partial
def images_to_levels(target, num_level_anchors):
"""Convert targets by image to targets by feature level.
[target_img0, target_img1] -> [target_level0, target_level1, ...]
"""
target = torch.stack(target, 0)
level_targets = []
start = 0
for n in num_level_anchors:
end = start + n
level_targets.append(target[:, start:end].squeeze(0))
start = end
return level_targets
def unmap(data, count, inds, fill=0):
""" Unmap a subset of item (data) back to the original set of items (of
size count) """
if data.dim() == 1:
ret = data.new_full((count, ), fill)
ret[inds.type(torch.bool)] = data
else:
new_size = (count, ) + data.size()[1:]
ret = data.new_full(new_size, fill)
ret[inds.type(torch.bool), :] = data
return ret | 32.647059 | 75 | 0.678679 | # Modification 2020 RangiLyu
# Copyright 2018-2019 Open-MMLab.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from functools import partial
def multi_apply(func, *args, **kwargs):
pfunc = partial(func, **kwargs) if kwargs else func
map_results = map(pfunc, *args)
return tuple(map(list, zip(*map_results)))
def images_to_levels(target, num_level_anchors):
"""Convert targets by image to targets by feature level.
[target_img0, target_img1] -> [target_level0, target_level1, ...]
"""
target = torch.stack(target, 0)
level_targets = []
start = 0
for n in num_level_anchors:
end = start + n
level_targets.append(target[:, start:end].squeeze(0))
start = end
return level_targets
def unmap(data, count, inds, fill=0):
""" Unmap a subset of item (data) back to the original set of items (of
size count) """
if data.dim() == 1:
ret = data.new_full((count, ), fill)
ret[inds.type(torch.bool)] = data
else:
new_size = (count, ) + data.size()[1:]
ret = data.new_full(new_size, fill)
ret[inds.type(torch.bool), :] = data
return ret | 157 | 0 | 23 |
e2cd8925067691083a53320bbc702cff13ae910f | 11,031 | py | Python | tests/test_mcts_player.py | donkirkby/zero-play | 15e3afa950037cfd1f373ee4943cd8b42d4c82c9 | [
"MIT"
] | 7 | 2020-04-30T15:44:56.000Z | 2021-04-07T18:37:21.000Z | tests/test_mcts_player.py | donkirkby/zero-play | 15e3afa950037cfd1f373ee4943cd8b42d4c82c9 | [
"MIT"
] | 84 | 2019-05-07T04:37:10.000Z | 2022-03-04T18:17:57.000Z | tests/test_mcts_player.py | donkirkby/zero-play | 15e3afa950037cfd1f373ee4943cd8b42d4c82c9 | [
"MIT"
] | 1 | 2021-04-07T18:37:25.000Z | 2021-04-07T18:37:25.000Z | import typing
from collections import Counter
import numpy as np
from pytest import approx
from zero_play.connect4.game import Connect4State
from zero_play.game_state import GameState
from zero_play.heuristic import Heuristic
from zero_play.mcts_player import SearchNode, MctsPlayer, SearchManager
from zero_play.playout import Playout
from zero_play.tictactoe.state import TicTacToeState
class EarlyChoiceHeuristic(FirstChoiceHeuristic):
""" Thinks each move is 90% as good as the previous option. """
def test_choose_moves_at_random():
""" Early moves are chosen from a weighted random population. """
np.random.seed(0)
start_state = TicTacToeState()
state1 = TicTacToeState("""\
...
...
X..
""")
player = MctsPlayer(start_state,
iteration_count=80,
heuristic=EarlyChoiceHeuristic())
moves = set()
for _ in range(10):
move = player.choose_move(state1)
moves.add(move)
player.search_manager.reset()
assert 1 < len(moves)
def test_search_manager_with_opponent():
""" Like when opponent is not sharing the SearchManager. """
start_state = TicTacToeState()
manager = SearchManager(start_state, Playout())
manager.search(start_state, iterations=10)
node = manager.current_node.children[0] # Didn't call get_best_move().
move = 0
state2 = start_state.make_move(move)
first_value_count = node.value_count
manager.search(state2, iterations=10)
second_value_count = node.value_count
assert first_value_count > 0
assert first_value_count + 10 == second_value_count
def test_win_scores_one():
""" Expose bug where search continues after a game-ending position. """
state1 = TicTacToeState("""\
..X
XX.
OO.
""")
player = MctsPlayer(TicTacToeState(), state1.X_PLAYER, iteration_count=100)
move = player.choose_move(state1)
search_node1 = player.search_manager.current_node.parent
for child_node in search_node1.children:
if child_node.move == 8:
assert child_node.average_value == 1.0
assert move == 8
| 24.900677 | 98 | 0.658327 | import typing
from collections import Counter
import numpy as np
from pytest import approx
from zero_play.connect4.game import Connect4State
from zero_play.game_state import GameState
from zero_play.heuristic import Heuristic
from zero_play.mcts_player import SearchNode, MctsPlayer, SearchManager
from zero_play.playout import Playout
from zero_play.tictactoe.state import TicTacToeState
class FirstChoiceHeuristic(Heuristic):
def get_summary(self) -> typing.Sequence[str]:
return 'first choice',
def analyse(self, board: GameState) -> typing.Tuple[float, np.ndarray]:
policy = self.get_policy(board)
player = board.get_active_player()
if board.is_win(player):
value = 1.0
elif board.is_win(-player):
value = -1.0
else:
value = 0.0
return value, policy
def get_policy(self, board: GameState):
valid_moves = board.get_valid_moves()
if valid_moves.any():
first_valid = np.nonzero(valid_moves)[0][0]
else:
first_valid = 0
policy = np.zeros_like(valid_moves)
policy[first_valid] = 1.0
return policy
class EarlyChoiceHeuristic(FirstChoiceHeuristic):
""" Thinks each move is 90% as good as the previous option. """
def get_summary(self) -> typing.Sequence[str]:
return 'early choice',
def get_policy(self, board: GameState):
valid_moves = board.get_valid_moves()
if not valid_moves.any():
valid_moves = (valid_moves == 0)
raw_policy = np.multiply(valid_moves, 0.9 ** np.arange(len(valid_moves)))
policy = raw_policy / raw_policy.sum()
return policy
def test_repr():
board_text = """\
.O.
.X.
...
"""
board = TicTacToeState(board_text)
expected_repr = "SearchNode(TicTacToeState(spaces=array([[0, -1, 0], [0, 1, 0], [0, 0, 0]])))"
node = SearchNode(board)
node_repr = repr(node)
assert node_repr == expected_repr
def test_eq():
board1 = TicTacToeState()
board2 = TicTacToeState()
board3 = TicTacToeState("""\
...
.X.
...
""")
node1 = SearchNode(board1)
node2 = SearchNode(board2)
node3 = SearchNode(board3)
assert node1 == node2
assert node1 != node3
assert node1 != 42
def test_default_board():
expected_board = TicTacToeState()
expected_node = SearchNode(expected_board)
node = SearchNode(expected_board)
assert expected_node == node
def test_select_leaf_self():
game = TicTacToeState()
node = SearchNode(game)
expected_leaf = node
leaf = node.select_leaf()
assert expected_leaf == leaf
def test_select_first_child():
start_state = TicTacToeState()
expected_leaf_board = start_state.make_move(0)
expected_leaf = SearchNode(expected_leaf_board)
node = SearchNode(start_state)
node.record_value(1)
leaf = node.select_leaf()
assert leaf == expected_leaf
assert node.average_value == -1.0
def test_select_second_child():
start_state = TicTacToeState()
expected_leaf_board = start_state.make_move(1)
expected_leaf = SearchNode(expected_leaf_board)
node = SearchNode(start_state)
node.select_leaf().record_value(0)
node.select_leaf().record_value(0)
leaf = node.select_leaf()
assert leaf == expected_leaf
assert node.average_value == 0
def test_select_grandchild():
start_state = TicTacToeState()
expected_leaf_board = TicTacToeState("""\
XO.
...
...
""")
expected_leaf = SearchNode(expected_leaf_board)
node = SearchNode(start_state)
for _ in range(10):
node.select_leaf().record_value(0)
leaf = node.select_leaf()
assert leaf == expected_leaf
def test_select_good_grandchild():
start_state = TicTacToeState()
node = SearchNode(start_state)
node.select_leaf().record_value(0) # Root node returns itself.
node.select_leaf().record_value(0) # Move 0 AT 1A, value is a tie.
node.select_leaf().record_value(-1) # Move 1 AT 1B, value is a win.
# Expect it to exploit the win at 1B, and try the first grandchild at 1A.
expected_leaf_board = TicTacToeState("""\
ABC
1 OX.
2 ...
3 ...
""")
expected_leaf = SearchNode(expected_leaf_board)
leaf = node.select_leaf()
assert leaf == expected_leaf
def test_select_no_children():
start_board = TicTacToeState("""\
XOX
OOX
.XO
""")
expected_leaf_board = TicTacToeState("""\
XOX
OOX
XXO
""")
expected_leaf = SearchNode(expected_leaf_board)
start_node = SearchNode(start_board)
leaf1 = start_node.select_leaf()
leaf1.record_value(1)
leaf2 = start_node.select_leaf()
leaf2.record_value(1)
leaf3 = start_node.select_leaf()
assert leaf1 == start_node
assert leaf2 == expected_leaf
assert leaf3 == expected_leaf
def test_choose_move():
np.random.seed(0)
start_state = Connect4State()
state1 = Connect4State("""\
.......
.......
.......
...XX..
OXOXO..
XOXOXOO
""")
expected_display = """\
.......
.......
.......
..XXX..
OXOXO..
XOXOXOO
"""
player = MctsPlayer(start_state, iteration_count=200)
move = player.choose_move(state1)
state2 = state1.make_move(move)
display = state2.display()
assert display == expected_display
def test_choose_move_in_pool():
start_state = Connect4State()
state1 = Connect4State("""\
.......
.......
.......
...XX..
OXOXO..
XOXOXOO
""")
player = MctsPlayer(start_state, iteration_count=200, process_count=2)
valid_moves = start_state.get_valid_moves()
move = player.choose_move(state1)
# Can't rely on which move, because other process has separate random seed.
assert valid_moves[move]
def test_choose_moves_at_random():
""" Early moves are chosen from a weighted random population. """
np.random.seed(0)
start_state = TicTacToeState()
state1 = TicTacToeState("""\
...
...
X..
""")
player = MctsPlayer(start_state,
iteration_count=80,
heuristic=EarlyChoiceHeuristic())
moves = set()
for _ in range(10):
move = player.choose_move(state1)
moves.add(move)
player.search_manager.reset()
assert 1 < len(moves)
def test_choose_move_no_iterations():
np.random.seed(0)
start_state = Connect4State()
state1 = Connect4State("""\
.......
.......
.......
...XX..
OXOXO..
XOXOXOO
""")
test_count = 400
expected_count = test_count/7
expected_low = expected_count * 0.9
expected_high = expected_count * 1.1
move_counts = Counter()
for _ in range(test_count):
player = MctsPlayer(start_state, iteration_count=0)
move = player.choose_move(state1)
move_counts[move] += 1
assert expected_low < move_counts[2] < expected_high
def test_analyse_finished_game():
board = TicTacToeState("""\
OXO
XXO
XOX
""")
heuristic = Playout()
expected_value = 0 # A tie
expected_policy = [1/9] * 9
value, policy = heuristic.analyse(board)
assert expected_value == value
assert expected_policy == policy.tolist()
def test_search_manager_reuses_node():
start_state = TicTacToeState()
manager = SearchManager(start_state, Playout())
manager.search(start_state, iterations=10)
move = manager.get_best_move()
state2 = start_state.make_move(move)
node = manager.current_node
first_value_count = node.value_count
manager.search(state2, iterations=10)
second_value_count = node.value_count
assert first_value_count > 0
assert first_value_count + 10 == second_value_count
def test_search_manager_with_opponent():
""" Like when opponent is not sharing the SearchManager. """
start_state = TicTacToeState()
manager = SearchManager(start_state, Playout())
manager.search(start_state, iterations=10)
node = manager.current_node.children[0] # Didn't call get_best_move().
move = 0
state2 = start_state.make_move(move)
first_value_count = node.value_count
manager.search(state2, iterations=10)
second_value_count = node.value_count
assert first_value_count > 0
assert first_value_count + 10 == second_value_count
def test_annotate():
start_state = TicTacToeState()
player = MctsPlayer(start_state,
iteration_count=10,
heuristic=FirstChoiceHeuristic())
player.choose_move(start_state)
move_probabilities = player.get_move_probabilities(start_state)
best_move, best_probability, best_count, best_value = move_probabilities[0]
assert best_move == '1A'
assert best_probability == approx(0.999013)
assert best_count == 9
assert best_value == approx(2/9)
def test_create_training_data():
start_state = TicTacToeState()
manager = SearchManager(start_state, FirstChoiceHeuristic())
expected_boards, expected_outputs = zip(*[
[start_state.get_spaces(),
np.array([1., 0., 0., 0., 0., 0., 0., 0., 0., -1.])],
[TicTacToeState("""\
X..
...
...
""").get_spaces(), np.array([0., 1., 0., 0., 0., 0., 0., 0., 0., 1.])],
[TicTacToeState("""\
XO.
...
...
""").get_spaces(), np.array([0., 0., 1., 0., 0., 0., 0., 0., 0., -1.])],
[TicTacToeState("""\
XOX
...
...
""").get_spaces(), np.array([0., 0., 0., 1., 0., 0., 0., 0., 0., 1.])],
[TicTacToeState("""\
XOX
O..
...
""").get_spaces(), np.array([0., 0., 0., 0., 1., 0., 0., 0., 0., -1.])],
[TicTacToeState("""\
XOX
OX.
...
""").get_spaces(), np.array([0., 0., 0., 0., 0., 1., 0., 0., 0., 1.])],
[TicTacToeState("""\
XOX
OXO
...
""").get_spaces(), np.array([0., 0., 0., 0., 0., 0., 1., 0., 0., -1.])]])
expected_boards = np.stack(expected_boards)
expected_outputs = np.stack(expected_outputs)
boards, outputs = manager.create_training_data(iterations=1, data_size=7)
assert repr(boards) == repr(expected_boards)
assert repr(outputs) == repr(expected_outputs)
def test_win_scores_one():
""" Expose bug where search continues after a game-ending position. """
state1 = TicTacToeState("""\
..X
XX.
OO.
""")
player = MctsPlayer(TicTacToeState(), state1.X_PLAYER, iteration_count=100)
move = player.choose_move(state1)
search_node1 = player.search_manager.current_node.parent
for child_node in search_node1.children:
if child_node.move == 8:
assert child_node.average_value == 1.0
assert move == 8
def test_choose_move_sets_current_node():
np.random.seed(0)
start_state = Connect4State()
state1 = Connect4State("""\
.......
.......
.......
.......
OXOXOXO
XOXOXOX
""")
player = MctsPlayer(start_state, iteration_count=20)
move1 = player.choose_move(state1)
current_node1 = player.search_manager.current_node
state2 = state1.make_move(move1)
move2 = player.choose_move(state2)
current_node2 = player.search_manager.current_node
state3 = state2.make_move(move2)
assert current_node1.game_state == state2
assert current_node2.game_state == state3
| 8,336 | 17 | 547 |
c20fdadff12c2cc3041ee50f60f7664f620c36ab | 1,436 | py | Python | test/test_base.py | Xabab/moddb | 22e912e4e93c663727e8f3459eaccadbcc3df1f8 | [
"MIT"
] | 7 | 2019-01-05T19:36:37.000Z | 2021-09-20T20:28:01.000Z | test/test_base.py | Xabab/moddb | 22e912e4e93c663727e8f3459eaccadbcc3df1f8 | [
"MIT"
] | 4 | 2020-01-18T14:28:51.000Z | 2021-09-20T00:45:46.000Z | test/test_base.py | Xabab/moddb | 22e912e4e93c663727e8f3459eaccadbcc3df1f8 | [
"MIT"
] | 1 | 2021-09-20T22:36:32.000Z | 2021-09-20T22:36:32.000Z | import unittest
import moddb
from test.test_config import username, password
| 25.642857 | 94 | 0.643454 | import unittest
import moddb
from test.test_config import username, password
class TestFrontPage(unittest.TestCase):
def setUp(self):
self.fp = moddb.front_page()
def get_articles(self):
for article in self.fp.articles:
article.parse()
def get_games(self):
for game in self.fp.games:
game.parse()
def get_files(self):
for file in self.fp.files:
file.parse()
class TestSearch(unittest.TestCase):
def setUp(self):
cat = getattr(self, "category", moddb.SearchCategory.mods)
self.search= moddb.search(cat)
def test_resort(self):
results = self.search.results
search2 = self.search.resort(("visitstotal", "asc"))
self.assertNotEqual(results, search2.results)
def test_next_page(self):
self.search.next_page()
def test_previous_pages(self):
search = self.search.next_page()
search.previous_page()
class TestParse(unittest.TestCase):
def setUp(self):
self.model = moddb.parse(getattr(self, "url", "https://www.moddb.com/mods/edain-mod"))
def test_check(self):
pass
class TestLogin(unittest.TestCase):
def test_login(self):
moddb.login(username, password)
def test_fake_login(self):
with self.assertRaises(ValueError):
moddb.login("tico", "ticoisgod")
def tearDown(self):
moddb.SESSION.close()
| 858 | 61 | 439 |
b3dc77bb14f38cb12956556bf85829eb1755a3ee | 1,218 | py | Python | 11/star1.py | nfitzen/advent-of-code-2020 | 774b7db35aaf31b0e72a569b3441343d50f4d079 | [
"CC0-1.0",
"MIT"
] | null | null | null | 11/star1.py | nfitzen/advent-of-code-2020 | 774b7db35aaf31b0e72a569b3441343d50f4d079 | [
"CC0-1.0",
"MIT"
] | null | null | null | 11/star1.py | nfitzen/advent-of-code-2020 | 774b7db35aaf31b0e72a569b3441343d50f4d079 | [
"CC0-1.0",
"MIT"
] | null | null | null | #!/usr/bin/env python3
# SPDX-FileCopyrightText: 2020 Nathaniel Fitzenrider <https://github.com/nfitzen>
#
# SPDX-License-Identifier: CC0-1.0
import itertools
from typing import List
from copy import deepcopy
with open('input.txt') as f:
data = list(list(s.strip()) for s in f.readlines())
def update(state: List[List[str]]) -> List[List[str]]:
'''Returns the updated seating state.'''
newState = deepcopy(state)
for i, row in enumerate(state):
for j, v in enumerate(row):
adj = []
for k, l in itertools.product(range(-1, 2), repeat=2):
if not (k == 0 and l == 0) and (i+k >= 0 and j+l >= 0):
try:
adj.append(state[i+k][j+l])
except:
pass
numAdj = adj.count('#')
if v == 'L' and numAdj == 0:
newState[i][j] = '#'
elif v == '#' and numAdj >= 4:
newState[i][j] = 'L'
return newState
old = None
new = deepcopy(data)
while old != new:
old = deepcopy(new)
new = update(old)
# print('\n'.join(''.join(l) for l in new) + '\n')
print(sum(map(list.count, new, itertools.repeat('#'))))
| 29.707317 | 81 | 0.526273 | #!/usr/bin/env python3
# SPDX-FileCopyrightText: 2020 Nathaniel Fitzenrider <https://github.com/nfitzen>
#
# SPDX-License-Identifier: CC0-1.0
import itertools
from typing import List
from copy import deepcopy
with open('input.txt') as f:
data = list(list(s.strip()) for s in f.readlines())
def update(state: List[List[str]]) -> List[List[str]]:
'''Returns the updated seating state.'''
newState = deepcopy(state)
for i, row in enumerate(state):
for j, v in enumerate(row):
adj = []
for k, l in itertools.product(range(-1, 2), repeat=2):
if not (k == 0 and l == 0) and (i+k >= 0 and j+l >= 0):
try:
adj.append(state[i+k][j+l])
except:
pass
numAdj = adj.count('#')
if v == 'L' and numAdj == 0:
newState[i][j] = '#'
elif v == '#' and numAdj >= 4:
newState[i][j] = 'L'
return newState
old = None
new = deepcopy(data)
while old != new:
old = deepcopy(new)
new = update(old)
# print('\n'.join(''.join(l) for l in new) + '\n')
print(sum(map(list.count, new, itertools.repeat('#'))))
| 0 | 0 | 0 |
dd174a854aefb599200038952f83310dd642ca25 | 4,985 | py | Python | src/models/MultVAE/MultVAE_training_helper.py | EricHe98/sad_final_project | 4b2b57e44f939840eede6f134493c5f8d809b1a7 | [
"MIT"
] | 3 | 2020-10-22T05:04:30.000Z | 2021-02-03T01:24:55.000Z | src/models/MultVAE/MultVAE_training_helper.py | EricHe98/sad_final_project | 4b2b57e44f939840eede6f134493c5f8d809b1a7 | [
"MIT"
] | null | null | null | src/models/MultVAE/MultVAE_training_helper.py | EricHe98/sad_final_project | 4b2b57e44f939840eede6f134493c5f8d809b1a7 | [
"MIT"
] | null | null | null | from MultVAE_Dataset import *
from MultVAE_model import *
from torch import nn
from torch.utils.data import DataLoader
from datetime import datetime
import argparse
from scipy import sparse
import numpy as np
import mlflow.pytorch
| 34.143836 | 140 | 0.623671 | from MultVAE_Dataset import *
from MultVAE_model import *
from torch import nn
from torch.utils.data import DataLoader
from datetime import datetime
import argparse
from scipy import sparse
import numpy as np
import mlflow.pytorch
def make_dataloader(data_path = None, hotel_path = None, batch_size = 256):
hotel_dataset = BasicHotelDataset(data_path, hotel_path)
hotel_length = hotel_dataset.hotel_length
return DataLoader(hotel_dataset, batch_size = batch_size), hotel_length
def train(model,
beta,
train_loader,
optimizer,
device):
loss_per_epoch = 0
bce_per_epoch = 0
kld_per_epoch = 0
model.train()
for data in train_loader:
#Send to devices
x, observed = data[0].to(device), data[1].to(device)
# Foward pass thru model
x_hat, mu, logvar = model(x)
# Zero out optimizer gradients
optimizer.zero_grad()
# Loss and calculate gradients
loss, bce, kld = VAE_loss_function(x_hat, x, observed, mu, logvar, beta)
# Backward Pass
loss.backward()
# Take the gradient descent step
optimizer.step()
#Record Loss
loss_per_epoch += loss.item()
bce_per_epoch += bce.item()
kld_per_epoch += kld.item()
train_loss = loss_per_epoch / len(train_loader.dataset)
train_bce = bce_per_epoch / len(train_loader.dataset)
train_kld = kld_per_epoch / len(train_loader.dataset)
print('Train Loss: {:.6f}'.format(train_loss))
return train_loss,train_bce,train_kld
def validate(model,
beta,
valid_loader,
best_val_loss,
device,
save_path='src/models/MultVAE/checkpoints/multvae_basic_model.pth'):
total_loss = 0
model.eval()
loss_per_epoch = 0
bce_per_epoch = 0
kld_per_epoch = 0
with torch.no_grad():
for data in valid_loader:
x, observed = data[0].to(device), data[1].to(device)
x_hat, mu, logvar = model(x)
loss, bce, kld = VAE_loss_function(x_hat, x, observed, mu, logvar, beta)
loss_per_epoch += loss.item()
bce_per_epoch += bce.item()
kld_per_epoch += kld.item()
val_loss = loss_per_epoch / len(valid_loader.dataset)
val_bce = bce_per_epoch / len(valid_loader.dataset)
val_kld = kld_per_epoch / len(valid_loader.dataset)
print('Validation Loss: {:.6f}'.format(val_loss))
#Something here for validation ndcg@100?
if val_loss < best_val_loss:
best_val_loss = val_loss
torch.save(model.state_dict(), save_path)
print('Saved best model in the checkpoint directory\n')
return val_loss, best_val_loss, val_bce, val_kld
def train_and_validate(model,
train_loader,
valid_loader,
device,
start_beta = 0.0,
max_beta = 1.0,
num_epoch = 100,
learning_rate = 1e-4,
log_interval = 1,
max_patience = 5,
run_id = None,
save_path = '/scratch/work/js11133/sad_data/models/multVAE/'
):
#Initialize stuff
# patience_counter = 0
optimizer = torch.optim.Adam(
model.parameters(), lr=learning_rate)
train_loss_history = []
train_bce_history = []
train_kld_history = []
val_loss_history = []
val_bce_history = []
val_kld_history = []
best_val_loss = 10e7
final_epoch = 0
beta_incrementer = max_beta/200.0
beta = start_beta
for epoch_ii in range(num_epoch):
print("Epoch {}".format(epoch_ii + 1,))
#Train
train_loss,train_bce,train_kld = train(model,beta,train_loader,optimizer, device)
train_loss_history.append(train_loss)
train_bce_history.append(train_bce)
train_kld_history.append(train_kld)
# Validate
current_val_loss, new_best_val_loss,val_bce,val_kld = validate(model,beta,valid_loader, best_val_loss, device)
val_loss_history.append(current_val_loss)
val_bce_history.append(val_bce)
val_kld_history.append(val_kld)
if beta < max_beta:
beta += beta_incrementer
# if current_val_loss >= best_val_loss:
# patience_counter+=1
# else:
# patience_counter=0
# best_val_loss=new_best_val_loss
# print('patience',patience_counter)
# if patience_counter>max_patience:
# break
mlflow.pytorch.save_model(pytorch_model = model, path = save_path + 'multvae_{}_annealed_epoch_{}.uri'.format(run_id, 200+epoch_ii))
final_epoch = epoch_ii
metrics= (train_loss_history,train_bce_history,train_kld_history, val_loss_history,val_bce_history,val_kld_history)
return metrics, final_epoch
| 4,661 | 0 | 92 |
0c57458a5e82bfdc85a8d11ffd2f10db47dcbdb0 | 451 | py | Python | lona/unique_ids.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 230 | 2021-08-15T20:46:24.000Z | 2022-03-30T10:17:43.000Z | lona/unique_ids.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 176 | 2021-08-18T08:19:37.000Z | 2022-03-29T16:45:06.000Z | lona/unique_ids.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 13 | 2021-08-20T10:35:04.000Z | 2022-01-17T15:49:40.000Z | from threading import Lock
_name_spaces = {
'': UniqueIDGenerator(),
'nodes': UniqueIDGenerator(),
'view_runtimes': UniqueIDGenerator(),
}
| 18.04 | 41 | 0.625277 | from threading import Lock
class UniqueIDGenerator:
def __init__(self):
self._lock = Lock()
self._value = 0
def __call__(self):
with self._lock:
self._value += 1
return str(self._value)
_name_spaces = {
'': UniqueIDGenerator(),
'nodes': UniqueIDGenerator(),
'view_runtimes': UniqueIDGenerator(),
}
def generate_unique_id(name_space=''):
return _name_spaces[name_space]()
| 194 | 3 | 99 |