hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
18645f94ba67063154674ceff77d5989d4dbd944
| 8,550
|
py
|
Python
|
secureaws/secureaws.py
|
paliwalvimal/aws-secure-account
|
78447720a17176cc539d62775817026609e67339
|
[
"MIT"
] | 1
|
2021-02-11T17:15:18.000Z
|
2021-02-11T17:15:18.000Z
|
secureaws/secureaws.py
|
paliwalvimal/aws-secure-account
|
78447720a17176cc539d62775817026609e67339
|
[
"MIT"
] | null | null | null |
secureaws/secureaws.py
|
paliwalvimal/aws-secure-account
|
78447720a17176cc539d62775817026609e67339
|
[
"MIT"
] | 1
|
2019-12-12T09:01:59.000Z
|
2019-12-12T09:01:59.000Z
|
"""
## ## ## ## ##
## ## ##
## ## ##
## ## ## ## ## ##
## ##
## ##
##
AUTHOR = Vimal Paliwal <hello@vimalpaliwal.com>
"""
import sys
import boto3
import click
import threading
from botocore.exceptions import ClientError
from secureaws import checkaws
from secureaws import setupaws
from secureaws import rsautil
# Important Variables - DO NOT change the values
REGION = {
"N_VIRGINIA": "us-east-1",
"OHIO": "us-east-2",
"N_CALIFORNIA": "us-west-1",
"OREGON": "us-west-2",
"MUMBAI": "ap-south-1",
"SEOUL": "ap-northeast-2",
"SINGAPORE": "ap-southeast-1",
"SYDNEY": "ap-southeast-2",
"TOKYO": "ap-northeast-1",
"CANADA": "ca-central-1",
"FRANKFURT": "eu-central-1",
"IRELAND": "eu-west-1",
"LONDON": "eu-west-2",
"PARIS": "eu-west-3",
"SAO_PAULO": "sa-east-1",
"BAHRAIN": "me-south-1",
"STOCKHOLM": "eu-north-1",
"HONG_KONG": "ap-east-1"
}
class secureaws:
region = ""
session = None
def __init__(self, access_key="", secret_key="", profile="", region=""):
self.region = region
try:
if access_key == "" and secret_key == "" and profile == "":
self.session = boto3.Session(region_name=region)
elif profile != "":
self.session = boto3.Session(profile_name=profile, region_name=region)
elif access_key != "" and secret_key != "":
self.session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=region)
except Exception as e:
print("Error: {}".format(e))
exit(1)
def getSession(self):
return self.session
# Managing CLI
@click.group()
def chk_group():
pass
@chk_group.command()
@click.option('--access-key', help='AWS IAM User Access Key')
@click.option('--secret-key', help='AWS IAM User Access Key')
@click.option('--profile', help='AWS CLI profile')
@click.option('--region', default='us-east-1', help='AWS region identifier. Default: us-east-1')
def check(access_key, secret_key, profile, region):
'''
This command will scan your AWS account to identify whether basic security services are enabled or not.
\b
IAM Policy:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"cloudtrail:DescribeTrails",
"config:DescribeConfigurationRecorderStatus",
"ec2:DescribeFlowLogs",
"iam:GetAccountSummary",
"iam:GetAccountPasswordPolicy",
"macie:ListMemberAccounts",
"guardduty:ListDetectors",
"s3:ListAllMyBuckets",
"s3:GetEncryptionConfiguration",
"ec2:DescribeVolumes"
],
"Resource": "*"
}
]
}
\b
Usage:
- Scan AWS account using profile:
secureaws check --profile xxx --region xxx
- Scan AWS account using keys:
secureaws check --access-key xxx --secret-key xxx --region xxx
'''
secureaws_obj = secureaws(access_key, secret_key, profile, region)
checkaws.check_account(secureaws_obj.getSession())
@click.group()
def setup_group():
pass
@setup_group.command()
@click.option('--menu', is_flag=True, help='Display interactive menu to setup security services')
@click.option('--access-key', help='AWS IAM User Access Key')
@click.option('--secret-key', help='AWS IAM User Access Key')
@click.option('--profile', help='AWS CLI profile')
@click.option('--region', default='us-east-1', help='AWS region identifier. Default: us-east-1')
@click.option('--yes', '-y', 'non_interactive', is_flag=True, help='Non-interactive mode')
@click.option('--service', '-s', 'svc', multiple=True, help='Specific service name to setup')
@click.option('--bucket-name', multiple=True, help='Bucket name to encrypt. Only applicable for s3-sse')
@click.option('--instance-id', multiple=True, help='Instance ID (Required only for ebs-sse)')
@click.option('--volume-id', multiple=True, help='Volume ID (Required only for ebs-sse)')
@click.option('--kms-id', help='Supports both KMS Key ID or Alias. Only supported for s3-sse and ebs-sse')
def setup(menu, access_key, secret_key, profile, region, non_interactive, svc, bucket_name, instance_id, volume_id, kms_id):
'''
\b
This command supports securing following services on your AWS account:
- CloudTrail
- Config
- Flow Logs
- MFA (Default User: root)
- S3 SSE (Default: AES256)
- EBS SSE (Default: aws/ebs)
- Password Policy
\b
It is recommended to further restrict down the policy as per your need.
IAM Policy:
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:CreateBucket",
"s3:PutEncryptionConfiguration",
"s3:ListAllMyBuckets",
"s3:PutBucketPolicy",
"s3:HeadBucket",
"cloudtrail:StartLogging",
"cloudtrail:CreateTrail",
"iam:CreateRole",
"iam:PassRole",
"iam:AttachRolePolicy",
"iam:CreatePolicy",
"iam:UpdateAccountPasswordPolicy",
"iam:CreateVirtualMFADevice",
"iam:EnableMFADevice",
"iam:GetUser",
"iam:ListMFADevices",
"config:StartConfigurationRecorder",
"config:PutDeliveryChannel",
"config:PutConfigurationRecorder",
"logs:CreateLogGroup",
"logs:DescribeLogGroups",
"ec2:CreateFlowLogs",
"ec2:DescribeVpcs",
"ec2:StopInstances",
"ec2:StartInstances",
"ec2:CreateSnapshot",
"ec2:CopySnapshot",
"ec2:CreateVolume",
"ec2:AttachVolume",
"ec2:DeleteVolume",
"ec2:DeleteSnapshot"
],
"Resource": "*"
}
]
}
\b
Service Names:
- cloudtrail
- config
- flowlogs
- mfa
- s3-sse
- ebs-sse
- password-policy
\b
Usage:
- Setup all services using AWS profile:
secureaws setup --profile xxx --region xxx
- Setup all services using AWS keys in non-interactive mode (except ebs-sse):
secureaws setup --access-key xxx --secret-key xxx --region xxx -y
- Setup specific service(s):
secureaws setup --profile xxx --service cloudtrail -s flowlogs -s mfa --region xxx
- Setup MFA for an Root user:
secureaws setup --profile xxx -s mfa
- Setup MFA for an IAM user:
secureaws setup --profile xxx -s mfa=username
- Encrypt all S3 buckets using KMS Key ID:
secureaws setup --profile xxx --region xxx -s s3-sse --kms-id xxx
- Encrypt specific S3 buckets using default encryption:
secureaws setup --profile xxx --region xxx -s s3-sse --bucket-name xxx --bucket-name xxx
- Encrypt EBS Volumes using Instance ID(s):
secureaws setup --profile xxx -s ebs-sse --instance-id xxx --region xxx
- Encrypt EBS Volumes using Volume ID(s) and KMS Alias:
secureaws setup --profile xxx -s ebs-sse --volume-id xxx --volume-id xxx --kms-id alias/xxx --region xxx
'''
secureaws_obj = secureaws(access_key, secret_key, profile, region)
if menu:
setupaws.secure_account_menu(secureaws_obj.getSession())
else:
setupaws.secure_account(secureaws_obj.getSession(), svc, buckets=bucket_name, instance_id=instance_id, volume_id=volume_id, kms_id=kms_id, non_interactive=non_interactive)
@click.group()
def rsa_group():
pass
@rsa_group.command()
@click.option('--file-name', help='File name for private and public key')
@click.option('--key-size', default=4096, help='Key size (Default: 4096)')
def genrsa(file_name, key_size):
'''
This will generate RSA key pair
'''
rsautil.create_rsa_key_pair(file_name, key_size)
# Map all click groups
sa = click.CommandCollection(sources=[chk_group,setup_group,rsa_group])
def main():
sa()
if __name__ == '__main__':
sa()
| 34.615385
| 179
| 0.582456
| 759
| 0.088772
| 0
| 0
| 6,620
| 0.774269
| 0
| 0
| 5,881
| 0.687836
|
1864d86874c3d8b77ca9978c07a999b3a352d135
| 888
|
py
|
Python
|
python/examples/find_similar.py
|
yupbank/euclidesdb
|
c4210b68a79aab20e6911c78940b909b8bede557
|
[
"Apache-2.0"
] | null | null | null |
python/examples/find_similar.py
|
yupbank/euclidesdb
|
c4210b68a79aab20e6911c78940b909b8bede557
|
[
"Apache-2.0"
] | null | null | null |
python/examples/find_similar.py
|
yupbank/euclidesdb
|
c4210b68a79aab20e6911c78940b909b8bede557
|
[
"Apache-2.0"
] | null | null | null |
import sys
import argparse
import euclides
from PIL import Image
import numpy as np
from torchvision.transforms import functional as F
def run_main():
parser = argparse.ArgumentParser(description='Find similar images in EuclidesDB.')
parser.add_argument('--topk', dest='topk', type=int, required=True,
help='Find top k results.')
parser.add_argument('--file', dest='filename', type=str, required=True,
help='Image file name.')
args = parser.parse_args()
image = Image.open(args.filename)
image.thumbnail((300, 300), Image.ANTIALIAS)
image = F.center_crop(image, 224)
with euclides.Channel("localhost", 50000) as channel:
db = euclides.EuclidesDB(channel)
ret_similar = db.find_similar(image, ["resnet18"], args.topk)
print(ret_similar)
if __name__ == "__main__":
run_main()
| 26.909091
| 86
| 0.667793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 138
| 0.155405
|
18662f52c2055666297ec86901f3368b3430ce9a
| 868
|
py
|
Python
|
gunpowder/nodes/renumber_connected_components.py
|
trivoldus28/gunpowder
|
97e9e64709fb616e2c47567b22d5f11a9234fe48
|
[
"MIT"
] | 43
|
2017-05-03T22:27:11.000Z
|
2022-02-11T19:07:28.000Z
|
gunpowder/nodes/renumber_connected_components.py
|
trivoldus28/gunpowder
|
97e9e64709fb616e2c47567b22d5f11a9234fe48
|
[
"MIT"
] | 102
|
2017-06-09T10:11:06.000Z
|
2022-03-29T13:56:37.000Z
|
gunpowder/nodes/renumber_connected_components.py
|
trivoldus28/gunpowder
|
97e9e64709fb616e2c47567b22d5f11a9234fe48
|
[
"MIT"
] | 43
|
2017-04-25T20:25:17.000Z
|
2022-02-11T19:07:34.000Z
|
from .batch_filter import BatchFilter
from gunpowder.ext import malis
class RenumberConnectedComponents(BatchFilter):
'''Find connected components of the same value, and replace each component
with a new label.
Args:
labels (:class:`ArrayKey`):
The label array to modify.
'''
def __init__(self, labels):
self.labels = labels
def process(self, batch, request):
components = batch.arrays[self.labels].data
dtype = components.dtype
simple_neighborhood = malis.mknhood3d()
affinities_from_components = malis.seg_to_affgraph(
components,
simple_neighborhood)
components, _ = malis.connected_components_affgraph(
affinities_from_components,
simple_neighborhood)
batch.arrays[self.labels].data = components.astype(dtype)
| 29.931034
| 78
| 0.670507
| 796
| 0.917051
| 0
| 0
| 0
| 0
| 0
| 0
| 192
| 0.221198
|
18664b760b4ae7d4a23d616670b3152102c11769
| 401
|
py
|
Python
|
project/main/migrations/0003_auto_20200504_1852.py
|
Leeoku/MovieCrud
|
fb9e364895684f0cb1e3c1bc68971f0d4a7df1fc
|
[
"MIT"
] | null | null | null |
project/main/migrations/0003_auto_20200504_1852.py
|
Leeoku/MovieCrud
|
fb9e364895684f0cb1e3c1bc68971f0d4a7df1fc
|
[
"MIT"
] | 6
|
2021-03-19T02:52:05.000Z
|
2021-09-22T18:58:44.000Z
|
project/main/migrations/0003_auto_20200504_1852.py
|
Leeoku/MovieCrud
|
fb9e364895684f0cb1e3c1bc68971f0d4a7df1fc
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.4 on 2020-05-04 18:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20200323_0327'),
]
operations = [
migrations.AlterField(
model_name='movieentry',
name='date_watched',
field=models.DateField(blank=True, null=True),
),
]
| 21.105263
| 58
| 0.605985
| 308
| 0.76808
| 0
| 0
| 0
| 0
| 0
| 0
| 104
| 0.259352
|
1868e8987c751a0abe91a5dd69173ea001090442
| 3,534
|
py
|
Python
|
LR/lr/model/resource_data_monitor/incoming_copy_handler.py
|
LearningRegistry/LearningRegistry
|
d9f0a8117a4adb8fcf6bf101d3d58d799463a2e2
|
[
"Apache-2.0"
] | 26
|
2015-04-14T03:11:58.000Z
|
2022-01-06T14:31:07.000Z
|
LR/lr/model/resource_data_monitor/incoming_copy_handler.py
|
LearningRegistry/LearningRegistry
|
d9f0a8117a4adb8fcf6bf101d3d58d799463a2e2
|
[
"Apache-2.0"
] | 11
|
2015-04-03T21:54:03.000Z
|
2017-05-02T17:20:03.000Z
|
LR/lr/model/resource_data_monitor/incoming_copy_handler.py
|
LearningRegistry/LearningRegistry
|
d9f0a8117a4adb8fcf6bf101d3d58d799463a2e2
|
[
"Apache-2.0"
] | 16
|
2015-02-11T09:30:18.000Z
|
2020-11-20T02:06:24.000Z
|
import logging
import couchdb
from collections import deque
from threading import Thread
from pylons import config
from lr.lib import SpecValidationException, helpers as h
from lr.lib.couch_change_monitor import BaseChangeHandler
from lr.model import ResourceDataModel
from couchdb import ResourceConflict
from lr.lib.replacement_helper import ResourceDataReplacement
from lr.lib.schema_helper import ResourceDataModelValidator
log = logging.getLogger(__name__)
# this doesn't need to be done... should be handled by pylons.config
# scriptPath = os.path.dirname(os.path.abspath(__file__))
# _PYLONS_CONFIG = os.path.join(scriptPath, '..', '..', '..', 'development.ini')
# _config = ConfigParser.ConfigParser()
# _config.read(_PYLONS_CONFIG)
_RESOURCE_DISTRIBUTABLE_TYPE = "resource_data_distributable"
_RESOURCE_TYPE = "resource_data"
_DOC_TYPE = "doc_type"
_DOC = "doc"
_ID = "id"
_DOCUMENT_UPDATE_THRESHOLD = 100
class IncomingCopyHandler(BaseChangeHandler):
def __init__(self):
self._serverUrl = config["couchdb.url.dbadmin"]
self._targetName = config["couchdb.db.resourcedata"]
self.documents = deque()
s = couchdb.Server(self._serverUrl)
self._db = s[self._targetName]
self.repl_helper = ResourceDataReplacement()
self.threads = {}
self.max_threads = 50
def _canHandle(self, change, database):
if ((_DOC in change) and \
(change[_DOC].get(_DOC_TYPE) == _RESOURCE_DISTRIBUTABLE_TYPE or \
change[_DOC].get(_DOC_TYPE) == _RESOURCE_TYPE)):
return True
return False
def _handle(self, change, database):
def threadName(doc):
return "T-"+doc["_id"]
def handleDocument(newDoc):
should_delete = True
try:
# newDoc['node_timestamp'] = h.nowToISO8601Zformat()
ResourceDataModelValidator.set_timestamps(newDoc)
del newDoc["_rev"]
self.repl_helper.handle(newDoc)
# rd = ResourceDataModel(newDoc)
# rd.save(log_exceptions=False)
except SpecValidationException as e:
log.error("SpecValidationException: %s, %s",newDoc['_id'],str(e))
except couchdb.ResourceConflict as rc:
log.error("Document conflicts", exc_info=1)
except Exception as ex:
should_delete = False # don't delete something unexpected happend
log.error("Unable to save %s", newDoc['_id'], exc_info=ex)
if should_delete:
try:
del database[newDoc['_id']]
except Exception as ex:
log.error("Error when deleting", exc_info=ex)
try:
del self.threads[threadName(newDoc)]
except:
pass
self.documents.append(change[_DOC])
if len(self.documents) >= _DOCUMENT_UPDATE_THRESHOLD or len(self.documents) >= database.info()['doc_count']:
while len(self.documents) > 0:
doc = self.documents.popleft()
tname = threadName(doc)
t = Thread(target=handleDocument, name=tname, args=(doc,))
self.threads[tname] = t
t.start()
while len(self.threads) > self.max_threads:
time.sleep(.1)
def isRunning(self):
return len(self.threads) > 0
def threadCount(self):
return len(self.threads)
| 36.061224
| 116
| 0.621958
| 2,612
| 0.739106
| 0
| 0
| 0
| 0
| 0
| 0
| 676
| 0.191285
|
186a448cd375a10732fb3690423f8d8f87976e4a
| 1,432
|
py
|
Python
|
proxyclient/m1n1/fw/asc/base.py
|
EricRabil/m1n1
|
0a1a9348c32e2e44374720cd9d68cbe81cf696df
|
[
"MIT"
] | 1
|
2022-02-19T17:47:58.000Z
|
2022-02-19T17:47:58.000Z
|
proxyclient/m1n1/fw/asc/base.py
|
EricRabil/m1n1
|
0a1a9348c32e2e44374720cd9d68cbe81cf696df
|
[
"MIT"
] | null | null | null |
proxyclient/m1n1/fw/asc/base.py
|
EricRabil/m1n1
|
0a1a9348c32e2e44374720cd9d68cbe81cf696df
|
[
"MIT"
] | 2
|
2022-02-01T18:33:16.000Z
|
2022-02-19T17:50:25.000Z
|
# SPDX-License-Identifier: MIT
from ...utils import *
# System endpoints
def msg_handler(message, regtype=None):
def f(x):
x.is_message = True
x.message = message
x.regtype = regtype
return x
return f
class ASCMessage1(Register64):
EP = 7, 0
class ASCBaseEndpoint:
BASE_MESSAGE = Register64
SHORT = None
def __init__(self, asc, epnum, name=None):
self.asc = asc
self.epnum = epnum
self.name = name or self.SHORT or f"{type(self).__name__}@{epnum:#x}"
self.msghandler = {}
self.msgtypes = {}
for name in dir(self):
i = getattr(self, name)
if not callable(i):
continue
if not getattr(i, "is_message", False):
continue
self.msghandler[i.message] = i
self.msgtypes[i.message] = i.regtype if i.regtype else self.BASE_MESSAGE
def handle_msg(self, msg0, msg1):
msg0 = self.BASE_MESSAGE(msg0)
handler = self.msghandler.get(msg0.TYPE, None)
regtype = self.msgtypes.get(msg0.TYPE, self.BASE_MESSAGE)
if handler is None:
return False
return handler(regtype(msg0.value))
def send(self, msg):
self.asc.send(msg, ASCMessage1(EP=self.epnum))
def start(self):
pass
def stop(self):
pass
def log(self, msg):
print(f"[{self.name}] {msg}")
| 25.122807
| 84
| 0.578212
| 1,185
| 0.827514
| 0
| 0
| 0
| 0
| 0
| 0
| 117
| 0.081704
|
186a5816589e84e463b32b76302f76cecdf63a3d
| 710
|
py
|
Python
|
misc/redirector.py
|
ktan2020/tooling
|
5a22adc2895f5baa98faad7028061219c545a675
|
[
"MIT"
] | null | null | null |
misc/redirector.py
|
ktan2020/tooling
|
5a22adc2895f5baa98faad7028061219c545a675
|
[
"MIT"
] | null | null | null |
misc/redirector.py
|
ktan2020/tooling
|
5a22adc2895f5baa98faad7028061219c545a675
|
[
"MIT"
] | null | null | null |
import SimpleHTTPServer
import SocketServer
import sys
from optparse import OptionParser
class myHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
print self.path
self.send_response(301)
new_path = 'http://%s%s'%(o.ip, self.path)
self.send_header('Location', new_path)
self.end_headers()
p = OptionParser()
p.add_option("--ip", dest="ip")
p.add_option("--port", dest="port", type=int, default=8080)
(o,p) = p.parse_args()
if o.ip == None:
print "XXX FATAL : IP address to redirect to is mandatory! XXX"
sys.exit(1)
handler = SocketServer.TCPServer(("", o.port), myHandler)
print "serving at port %s" % o.port
handler.serve_forever()
| 27.307692
| 67
| 0.685915
| 256
| 0.360563
| 0
| 0
| 0
| 0
| 0
| 0
| 126
| 0.177465
|
186a69b010242e5cd6623bba8225f28d59422edb
| 943
|
py
|
Python
|
alert/getinfo/model/configdata.py
|
xwwwb/genshin_task-resin-expedition_alert
|
cddaafc2723c5d9eea6fbd1db792ad70427344c8
|
[
"MIT"
] | 2
|
2022-03-01T10:39:30.000Z
|
2022-03-29T13:40:37.000Z
|
alert/getinfo/model/configdata.py
|
xwwwb/genshin_task-resin-expedition_alert
|
cddaafc2723c5d9eea6fbd1db792ad70427344c8
|
[
"MIT"
] | null | null | null |
alert/getinfo/model/configdata.py
|
xwwwb/genshin_task-resin-expedition_alert
|
cddaafc2723c5d9eea6fbd1db792ad70427344c8
|
[
"MIT"
] | null | null | null |
from typing import List, Literal
import pydantic
class ConfigData(pydantic.BaseModel):
UID: str
COOKIE:str
SCKEY:str
SCTKEY:str
BARK_KEY:str
BARK_GROUP:str
BARK_ICON:str
BARK_ARCHIVE:str
WW_ID: str
WW_APP_SECRET: str
WW_APP_USERID: str
WW_APP_AGENTID: str
WW_BOT_KEY: str
DD_BOT_TOKEN: str
DD_BOT_SECRET: str
DISCORD_WEBHOOK: str
IGOT_KEY: str
PUSH_PLUS_TOKEN: str
PUSH_PLUS_USER: str
TG_BOT_API: str
TG_BOT_TOKEN: str
TG_USER_ID: str
COOL_PUSH_SKEY: str
COOL_PUSH_MODE: Literal["send","psend","group","pgroup"]
COOL_PUSH_SENDID: str
QMSG_KEY: str
RESIN_ALERT_NUM: int
RECEIVE_RESIN_DATA: Literal["ON","OFF"]
RECEIVE_BOSS_COUNT: Literal["ON","OFF"]
RECEIVE_TASK_NUM: Literal["ON","OFF"]
REVEIVE_EXPEDITION_NUM: Literal["ON","OFF"]
INCOMPLETE_ALERT: str
SLEEP_TIME: int
ALERT_SUCCESS_SLEEP_TIME:int
| 23
| 60
| 0.688229
| 888
| 0.941676
| 0
| 0
| 0
| 0
| 0
| 0
| 64
| 0.067869
|
186ceed8bf38c2d8c4e7809751f03d8df4473f09
| 6,479
|
py
|
Python
|
src/cli.py
|
stefantaubert/tacotron
|
9ac37fbf8789b4e7fe1067212a736074181b6fd8
|
[
"MIT"
] | null | null | null |
src/cli.py
|
stefantaubert/tacotron
|
9ac37fbf8789b4e7fe1067212a736074181b6fd8
|
[
"MIT"
] | 1
|
2021-11-11T08:50:32.000Z
|
2021-11-19T12:39:06.000Z
|
src/cli.py
|
stefantaubert/tacotron
|
9ac37fbf8789b4e7fe1067212a736074181b6fd8
|
[
"MIT"
] | null | null | null |
import os
from argparse import ArgumentParser
from pathlib import Path
from general_utils import split_hparams_string, split_int_set_str
# from tacotron.app.eval_checkpoints import eval_checkpoints
from tacotron.app import (DEFAULT_MAX_DECODER_STEPS, continue_train, infer,
plot_embeddings, train, validate)
from tacotron.app.defaults import (DEFAULT_MCD_NO_OF_COEFFS_PER_FRAME,
DEFAULT_REPETITIONS,
DEFAULT_SAVE_MEL_INFO_COPY_PATH,
DEFAULT_SEED)
BASE_DIR_VAR = "base_dir"
def init_plot_emb_parser(parser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--custom_checkpoint', type=int)
return plot_embeddings
# def init_eval_checkpoints_parser(parser):
# parser.add_argument('--train_name', type=str, required=True)
# parser.add_argument('--custom_hparams', type=str)
# parser.add_argument('--select', type=int)
# parser.add_argument('--min_it', type=int)
# parser.add_argument('--max_it', type=int)
# return eval_checkpoints_main_cli
# def evaeckpoints_main_cli(**args):
# argsl_ch["custom_hparams"] = split_hparams_string(args["custom_hparams"])
# eval_checkpoints(**args)
# def init_restore_parser(parser: ArgumentParser) -> None:
# parser.add_argument('--train_name', type=str, required=True)
# parser.add_argument('--checkpoint_dir', type=Path, required=True)
# return restore_model
def init_train_parser(parser: ArgumentParser) -> None:
parser.add_argument('--ttsp_dir', type=Path, required=True)
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--merge_name', type=str, required=True)
parser.add_argument('--prep_name', type=str, required=True)
parser.add_argument('--warm_start_train_name', type=str)
parser.add_argument('--warm_start_checkpoint', type=int)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--weights_train_name', type=str)
parser.add_argument('--weights_checkpoint', type=int)
parser.add_argument('--map_from_speaker', type=str)
parser.add_argument('--map_symbol_weights', action='store_true')
parser.add_argument('--use_weights_map', action='store_true')
return train_cli
def train_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
train(**args)
def init_continue_train_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--custom_hparams', type=str)
return continue_train_cli
def continue_train_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
continue_train(**args)
def init_validate_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--entry_ids', type=str, help="Utterance ids or nothing if random")
parser.add_argument('--speaker', type=str, help="ds_name,speaker_name")
parser.add_argument('--ds', type=str, help="Choose if validation- or testset should be taken.",
choices=["val", "test"], default="val")
parser.add_argument('--custom_checkpoints', type=str)
parser.add_argument('--full_run', action='store_true')
parser.add_argument('--max_decoder_steps', type=int, default=DEFAULT_MAX_DECODER_STEPS)
parser.add_argument('--copy_mel_info_to', type=str, default=DEFAULT_SAVE_MEL_INFO_COPY_PATH)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--select_best_from', type=str)
parser.add_argument('--mcd_no_of_coeffs_per_frame', type=int,
default=DEFAULT_MCD_NO_OF_COEFFS_PER_FRAME)
parser.add_argument('--fast', action='store_true')
parser.add_argument('--repetitions', type=int, default=DEFAULT_REPETITIONS)
parser.add_argument('--seed', type=int, default=DEFAULT_SEED)
return validate_cli
def validate_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
args["entry_ids"] = split_int_set_str(args["entry_ids"])
args["custom_checkpoints"] = split_int_set_str(args["custom_checkpoints"])
validate(**args)
def init_inference_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--text_name', type=str, required=True)
parser.add_argument('--speaker', type=str, required=True, help="ds_name,speaker_name")
parser.add_argument('--utterance_ids', type=str)
parser.add_argument('--custom_checkpoint', type=int)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--full_run', action='store_true')
parser.add_argument('--max_decoder_steps', type=int, default=DEFAULT_MAX_DECODER_STEPS)
parser.add_argument('--seed', type=int, default=DEFAULT_SEED)
parser.add_argument('--copy_mel_info_to', type=str, default=DEFAULT_SAVE_MEL_INFO_COPY_PATH)
return infer_cli
def infer_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
args["utterance_ids"] = split_int_set_str(args["utterance_ids"])
infer(**args)
def add_base_dir(parser: ArgumentParser) -> None:
assert BASE_DIR_VAR in os.environ.keys()
base_dir = Path(os.environ[BASE_DIR_VAR])
parser.set_defaults(base_dir=base_dir)
def _add_parser_to(subparsers, name: str, init_method) -> None:
parser = subparsers.add_parser(name, help=f"{name} help")
invoke_method = init_method(parser)
parser.set_defaults(invoke_handler=invoke_method)
add_base_dir(parser)
return parser
def _init_parser():
result = ArgumentParser()
subparsers = result.add_subparsers(help='sub-command help')
_add_parser_to(subparsers, "train", init_train_parser)
_add_parser_to(subparsers, "continue-train", init_continue_train_parser)
_add_parser_to(subparsers, "validate", init_validate_parser)
_add_parser_to(subparsers, "infer", init_inference_parser)
# _add_parser_to(subparsers, "eval-checkpoints", init_taco_eval_checkpoints_parser)
_add_parser_to(subparsers, "plot-embeddings", init_plot_emb_parser)
#_add_parser_to(subparsers, "restore", init_restore_parser)
return result
def _process_args(args) -> None:
params = vars(args)
invoke_handler = params.pop("invoke_handler")
invoke_handler(**params)
if __name__ == "__main__":
main_parser = _init_parser()
received_args = main_parser.parse_args()
_process_args(received_args)
| 39.03012
| 97
| 0.748572
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,107
| 0.325205
|
186d347af5ccfb1407fd9334ac01a2985ccc1dd2
| 969
|
py
|
Python
|
apps/hello/uploadHandler.py
|
tenqaz/tornado_learning
|
3ff18039b69c49927452d778098e1a1b7fe7b5da
|
[
"MIT"
] | 11
|
2019-10-08T07:31:06.000Z
|
2021-09-27T01:08:40.000Z
|
apps/hello/uploadHandler.py
|
tenqaz/tornado_learning
|
3ff18039b69c49927452d778098e1a1b7fe7b5da
|
[
"MIT"
] | null | null | null |
apps/hello/uploadHandler.py
|
tenqaz/tornado_learning
|
3ff18039b69c49927452d778098e1a1b7fe7b5da
|
[
"MIT"
] | 3
|
2020-04-17T06:29:42.000Z
|
2021-09-27T01:08:41.000Z
|
# -*- coding: utf-8 -*-
"""
@author: Jim
@project: tornado_learning
@time: 2019/8/20 14:48
@desc:
上传文件
"""
from __future__ import annotations
from tornado_learning.handler import BaseHandler
import os
import uuid
import aiofiles
class UploadHandler(BaseHandler):
async def post(self):
ret_data = {}
files_meta = self.request.files.get("front_image", None)
if not files_meta:
self.set_status(400)
ret_data["front_image"] = "请上传图片"
else:
for meta in files_meta:
filename = meta["filename"]
new_filename = "{uuid}_{filename}".format(uuid=uuid.uuid1(), filename=filename)
file_path = os.path.join(self.settings["MEDIA_ROOT"], new_filename)
async with aiofiles.open(file_path, "wb") as f:
await f.write(meta["body"])
ret_data['file_path'] = file_path
return self.finish(ret_data)
| 24.225
| 95
| 0.603715
| 740
| 0.749747
| 0
| 0
| 0
| 0
| 701
| 0.710233
| 222
| 0.224924
|
18719fea4e335f1ca1128345b7f27750044e6081
| 2,906
|
py
|
Python
|
mathgrid_app/main.py
|
logiflo/mathgrid
|
9cfff50b66a45a6598651afd2c785560eed78f27
|
[
"BSD-2-Clause"
] | null | null | null |
mathgrid_app/main.py
|
logiflo/mathgrid
|
9cfff50b66a45a6598651afd2c785560eed78f27
|
[
"BSD-2-Clause"
] | null | null | null |
mathgrid_app/main.py
|
logiflo/mathgrid
|
9cfff50b66a45a6598651afd2c785560eed78f27
|
[
"BSD-2-Clause"
] | null | null | null |
"""Main module
"""
# Standard library imports
import string
# Third party imports
import numpy as np
import justpy as jp
import pandas as pd
START_INDEX: int = 1
END_INDEX: int = 20
GRID_OPTIONS = """
{
class: 'ag-theme-alpine',
defaultColDef: {
filter: true,
sortable: false,
resizable: true,
headerClass: 'font-bold',
editable: true
},
rowSelection: 'single',
}
"""
def on_input_key(self, msg):
"""On input key event.
Update the clicked cell with the new value from the input field.
Args:
msg (object): Event data object.
"""
if self.last_cell is not None:
self.grid.options['rowData'][self.last_cell['row']
][self.last_cell['col']] = msg.value
def on_cell_clicked(self, msg):
"""On cell clicked event.
Update the cell label value with the coordinates of the cell and set
the value of the cell in the input field.
Args:
msg (object): Event data object.
"""
self.cell_label.value = msg.colId + str(msg.rowIndex)
self.input_field.value = msg.data[msg.colId]
self.input_field.last_cell = {"row": msg.rowIndex, "col": msg.colId}
self.last_row = msg.row
def on_cell_value_changed(self, msg):
"""On input key event.
Update the input field value to match the cell value.
Args:
msg (object): Event data object.
"""
self.input_field.value = msg.data[msg.colId]
def grid_test():
"""Grid test app.
"""
headings = list(string.ascii_uppercase)
index = np.arange(START_INDEX, END_INDEX)
data_frame = pd.DataFrame(index=index, columns=headings)
data_frame = data_frame.fillna('')
# data = np.array([np.arange(10)]*3).T
# css_values = """
# .ag-theme-alpine .ag-ltr .ag-cell {
# border-right: 1px solid #aaa;
# }
# .ag-theme-balham .ag-ltr .ag-cell {
# border-right: 1px solid #aaa;
# }
# """
web_page = jp.WebPage()
root_div = jp.Div(classes='q-pa-md', a=web_page)
in_root_div = jp.Div(classes='q-gutter-md', a=root_div)
cell_label = jp.Input(
a=in_root_div, style='width: 32px; margin-left: 16px', disabled=True)
input_field = jp.Input(classes=jp.Styles.input_classes,
a=in_root_div, width='32px')
input_field.on("input", on_input_key)
input_field.last_cell = None
grid = jp.AgGrid(a=web_page, options=GRID_OPTIONS)
grid.load_pandas_frame(data_frame)
grid.options.pagination = True
grid.options.paginationAutoPageSize = True
grid.cell_label = cell_label
grid.input_field = input_field
grid.on('cellClicked', on_cell_clicked)
grid.on('cellValueChanged', on_cell_value_changed)
input_field.grid = grid
return web_page
def main():
"""Main app.
"""
jp.justpy(grid_test)
if __name__ == "__main__":
main()
| 23.819672
| 77
| 0.631108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,184
| 0.407433
|
1874ca96a1f31b40d52d15b318f020ba7a9562e6
| 811
|
py
|
Python
|
tests/test_linked_queue.py
|
dataloudlabs/dloud-ads
|
d0ad3f169c2384292db4097e00ba7858f37a8198
|
[
"MIT"
] | null | null | null |
tests/test_linked_queue.py
|
dataloudlabs/dloud-ads
|
d0ad3f169c2384292db4097e00ba7858f37a8198
|
[
"MIT"
] | null | null | null |
tests/test_linked_queue.py
|
dataloudlabs/dloud-ads
|
d0ad3f169c2384292db4097e00ba7858f37a8198
|
[
"MIT"
] | null | null | null |
""" Unit tests for linked_queue.LinkedQueue """
from dloud_ads import linked_queue
def test_dummy():
""" Test definition"""
the_queue = linked_queue.LinkedQueue()
assert the_queue.is_empty()
assert not the_queue
the_queue.enqueue(2)
assert not the_queue.is_empty()
assert len(the_queue) == 1
assert the_queue.dequeue() == 2
_ = [the_queue.enqueue(x) for x in range(4)]
assert len(the_queue) == 4
assert [the_queue.dequeue() for x in range(4)] == [0, 1, 2, 3]
assert not the_queue
_ = [the_queue.enqueue(x) for x in range(9)]
assert len(the_queue) == 9
_ = [the_queue.enqueue(x) for x in range(2)]
assert len(the_queue) == 11
expected = [0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 1]
assert [the_queue.dequeue() for x in range(11)] == expected
| 26.16129
| 66
| 0.637485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 69
| 0.08508
|
1875fb8f105e2c1eaf8a87c9adee8cca7ddd3e65
| 1,831
|
py
|
Python
|
setup.py
|
AnacletoLAB/grape
|
5ed0a84b7cedf588715919782f37c9492263bd12
|
[
"MIT"
] | 6
|
2021-09-22T17:40:01.000Z
|
2022-03-24T04:28:00.000Z
|
setup.py
|
AnacletoLAB/grape
|
5ed0a84b7cedf588715919782f37c9492263bd12
|
[
"MIT"
] | 5
|
2021-10-14T10:48:27.000Z
|
2022-03-23T11:03:05.000Z
|
setup.py
|
AnacletoLAB/grape
|
5ed0a84b7cedf588715919782f37c9492263bd12
|
[
"MIT"
] | 2
|
2021-09-13T16:24:08.000Z
|
2021-09-24T16:23:35.000Z
|
import os
import re
# To use a consistent encoding
from codecs import open as copen
from os import path
from setuptools import find_packages, setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with copen(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
def read(*parts):
with copen(os.path.join(here, *parts), 'r') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
__version__ = find_version("grape", "__version__.py")
test_deps = []
# TODO: Authors add your emails!!!
authors = {
"Luca Cappelletti": "luca.cappelletti1@unimi.it",
"Tommaso Fontana": "tommaso.fontana@mail.polimi.it",
"Vida Ravanmehr": "vida.ravanmehr@jax.org",
"Peter Robinson": "peter.robinson@jax.org",
}
setup(
name='grape',
version=__version__,
description="Rust/Python for high performance Graph Processing and Embedding.",
long_description=long_description,
url="https://github.com/AnacletoLAB/grape",
author=", ".join(list(authors.keys())),
author_email=", ".join(list(authors.values())),
# Choose your license
license='MIT',
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3'
],
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
tests_require=test_deps,
install_requires=[
"ensmallen==0.7.0.dev6",
"embiggen==0.10.0.dev2",
]
)
| 27.328358
| 83
| 0.653195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 726
| 0.396505
|
187614996f13120eae23f5d092c2a9efde0e80bf
| 76,079
|
py
|
Python
|
pyLMS7002Soapy/LMS7002_BIAS.py
|
Surfndez/pyLMS7002Soapy
|
ea230dcb12048007300477e1e2e4decc5414f954
|
[
"Apache-2.0"
] | 46
|
2016-11-29T05:10:36.000Z
|
2021-10-31T19:27:46.000Z
|
pyLMS7002M/LMS7002_BIAS.py
|
myriadrf/pyLMS7002M
|
b866deea1f05dba44c9ed1a1a4666352b811b66b
|
[
"Apache-2.0"
] | 2
|
2017-04-15T21:36:01.000Z
|
2017-06-08T09:44:26.000Z
|
pyLMS7002Soapy/LMS7002_BIAS.py
|
Surfndez/pyLMS7002Soapy
|
ea230dcb12048007300477e1e2e4decc5414f954
|
[
"Apache-2.0"
] | 16
|
2016-11-28T20:47:55.000Z
|
2021-04-07T01:48:20.000Z
|
#***************************************************************
#* Name: LMS7002_BIAS.py
#* Purpose: Class implementing LMS7002 BIAS functions
#* Author: Lime Microsystems ()
#* Created: 2016-11-14
#* Copyright: Lime Microsystems (limemicro.com)
#* License:
#**************************************************************
from LMS7002_base import *
class LMS7002_BIAS(LMS7002_base):
__slots__ = ['allowLDO'] # Used to generate error on typos
def __init__(self, chip):
self.chip = chip
self.channel = None
self.prefix = "BIAS_"
self.allowLDO = False # Do not allow changing of LDO voltage
#
# BIAS_CFG (0x0084)
#
# MUX_BIAS_OUT<1:0>
@property
def MUX_BIAS_OUT(self):
"""
Get the value of MUX_BIAS_OUT<1:0>
"""
return self._readReg('CFG', 'MUX_BIAS_OUT<1:0>')
@MUX_BIAS_OUT.setter
def MUX_BIAS_OUT(self, value):
"""
Set the value of MUX_BIAS_OUT<1:0>
"""
if not(0 <= value <= 3):
raise ValueError("Value must be [0..3]")
self._writeReg('CFG', 'MUX_BIAS_OUT<1:0>', value)
# RP_CALIB_BIAS<4:0>
@property
def RP_CALIB_BIAS(self):
"""
Get the value of RP_CALIB_BIAS<4:0>
"""
return self._readReg('CFG', 'RP_CALIB_BIAS<4:0>')
@RP_CALIB_BIAS.setter
def RP_CALIB_BIAS(self, value):
"""
Set the value of RP_CALIB_BIAS<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('CFG', 'RP_CALIB_BIAS<4:0>', value)
# PD_FRP_BIAS
@property
def PD_FRP_BIAS(self):
"""
Get the value of PD_FRP_BIAS
"""
return self._readReg('CFG', 'PD_FRP_BIAS')
@PD_FRP_BIAS.setter
def PD_FRP_BIAS(self, value):
"""
Set the value of PD_FRP_BIAS
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_FRP_BIAS', value)
# PD_F_BIAS
@property
def PD_F_BIAS(self):
"""
Get the value of PD_F_BIAS
"""
return self._readReg('CFG', 'PD_F_BIAS')
@PD_F_BIAS.setter
def PD_F_BIAS(self, value):
"""
Set the value of PD_F_BIAS
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_F_BIAS', value)
# PD_PTRP_BIAS
@property
def PD_PTRP_BIAS(self):
"""
Get the value of PD_PTRP_BIAS
"""
return self._readReg('CFG', 'PD_PTRP_BIAS')
@PD_PTRP_BIAS.setter
def PD_PTRP_BIAS(self, value):
"""
Set the value of PD_PTRP_BIAS
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_PTRP_BIAS', value)
# PD_PT_BIAS
@property
def PD_PT_BIAS(self):
"""
Get the value of PD_PT_BIAS
"""
return self._readReg('CFG', 'PD_PT_BIAS')
@PD_PT_BIAS.setter
def PD_PT_BIAS(self, value):
"""
Set the value of PD_PT_BIAS
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_PT_BIAS', value)
# PD_BIAS_MASTER
@property
def PD_BIAS_MASTER(self):
"""
Get the value of PD_BIAS_MASTER
"""
return self._readReg('CFG', 'PD_BIAS_MASTER')
@PD_BIAS_MASTER.setter
def PD_BIAS_MASTER(self, value):
"""
Set the value of PD_BIAS_MASTER
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_BIAS_MASTER', value)
#
# BIAS_CFG0 (0x0092)
#
# EN_LDO_DIG
@property
def EN_LDO_DIG(self):
"""
Get the value of EN_LDO_DIG
"""
return self._readReg('CFG0', 'EN_LDO_DIG')
@EN_LDO_DIG.setter
def EN_LDO_DIG(self, value):
"""
Set the value of EN_LDO_DIG
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIG', value)
# EN_LDO_DIGGN
@property
def EN_LDO_DIGGN(self):
"""
Get the value of EN_LDO_DIGGN
"""
return self._readReg('CFG0', 'EN_LDO_DIGGN')
@EN_LDO_DIGGN.setter
def EN_LDO_DIGGN(self, value):
"""
Set the value of EN_LDO_DIGGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIGGN', value)
# EN_LDO_DIGSXR
@property
def EN_LDO_DIGSXR(self):
"""
Get the value of EN_LDO_DIGSXR
"""
return self._readReg('CFG0', 'EN_LDO_DIGSXR')
@EN_LDO_DIGSXR.setter
def EN_LDO_DIGSXR(self, value):
"""
Set the value of EN_LDO_DIGSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIGSXR', value)
# EN_LDO_DIGSXT
@property
def EN_LDO_DIGSXT(self):
"""
Get the value of EN_LDO_DIGSXT
"""
return self._readReg('CFG0', 'EN_LDO_DIGSXT')
@EN_LDO_DIGSXT.setter
def EN_LDO_DIGSXT(self, value):
"""
Set the value of EN_LDO_DIGSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIGSXT', value)
# EN_LDO_DIVGN
@property
def EN_LDO_DIVGN(self):
"""
Get the value of EN_LDO_DIVGN
"""
return self._readReg('CFG0', 'EN_LDO_DIVGN')
@EN_LDO_DIVGN.setter
def EN_LDO_DIVGN(self, value):
"""
Set the value of EN_LDO_DIVGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIVGN', value)
# EN_LDO_DIVSXR
@property
def EN_LDO_DIVSXR(self):
"""
Get the value of EN_LDO_DIVSXR
"""
return self._readReg('CFG0', 'EN_LDO_DIVSXR')
@EN_LDO_DIVSXR.setter
def EN_LDO_DIVSXR(self, value):
"""
Set the value of EN_LDO_DIVSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIVSXR', value)
# EN_LDO_DIVSXT
@property
def EN_LDO_DIVSXT(self):
"""
Get the value of EN_LDO_DIVSXT
"""
return self._readReg('CFG0', 'EN_LDO_DIVSXT')
@EN_LDO_DIVSXT.setter
def EN_LDO_DIVSXT(self, value):
"""
Set the value of EN_LDO_DIVSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_DIVSXT', value)
# EN_LDO_LNA12
@property
def EN_LDO_LNA12(self):
"""
Get the value of EN_LDO_LNA12
"""
return self._readReg('CFG0', 'EN_LDO_LNA12')
@EN_LDO_LNA12.setter
def EN_LDO_LNA12(self, value):
"""
Set the value of EN_LDO_LNA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_LNA12', value)
# EN_LDO_LNA14
@property
def EN_LDO_LNA14(self):
"""
Get the value of EN_LDO_LNA14
"""
return self._readReg('CFG0', 'EN_LDO_LNA14')
@EN_LDO_LNA14.setter
def EN_LDO_LNA14(self, value):
"""
Set the value of EN_LDO_LNA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_LNA14', value)
# EN_LDO_MXRFE
@property
def EN_LDO_MXRFE(self):
"""
Get the value of EN_LDO_MXRFE
"""
return self._readReg('CFG0', 'EN_LDO_MXRFE')
@EN_LDO_MXRFE.setter
def EN_LDO_MXRFE(self, value):
"""
Set the value of EN_LDO_MXRFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_MXRFE', value)
# EN_LDO_RBB
@property
def EN_LDO_RBB(self):
"""
Get the value of EN_LDO_RBB
"""
return self._readReg('CFG0', 'EN_LDO_RBB')
@EN_LDO_RBB.setter
def EN_LDO_RBB(self, value):
"""
Set the value of EN_LDO_RBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_RBB', value)
# EN_LDO_RXBUF
@property
def EN_LDO_RXBUF(self):
"""
Get the value of EN_LDO_RXBUF
"""
return self._readReg('CFG0', 'EN_LDO_RXBUF')
@EN_LDO_RXBUF.setter
def EN_LDO_RXBUF(self, value):
"""
Set the value of EN_LDO_RXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_RXBUF', value)
# EN_LDO_TBB
@property
def EN_LDO_TBB(self):
"""
Get the value of EN_LDO_TBB
"""
return self._readReg('CFG0', 'EN_LDO_TBB')
@EN_LDO_TBB.setter
def EN_LDO_TBB(self, value):
"""
Set the value of EN_LDO_TBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_TBB', value)
# EN_LDO_TIA12
@property
def EN_LDO_TIA12(self):
"""
Get the value of EN_LDO_TIA12
"""
return self._readReg('CFG0', 'EN_LDO_TIA12')
@EN_LDO_TIA12.setter
def EN_LDO_TIA12(self, value):
"""
Set the value of EN_LDO_TIA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_TIA12', value)
# EN_LDO_TIA14
@property
def EN_LDO_TIA14(self):
"""
Get the value of EN_LDO_TIA14
"""
return self._readReg('CFG0', 'EN_LDO_TIA14')
@EN_LDO_TIA14.setter
def EN_LDO_TIA14(self, value):
"""
Set the value of EN_LDO_TIA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_LDO_TIA14', value)
# EN_G_LDO
@property
def EN_G_LDO(self):
"""
Get the value of EN_G_LDO
"""
return self._readReg('CFG0', 'EN_G_LDO')
@EN_G_LDO.setter
def EN_G_LDO(self, value):
"""
Set the value of EN_G_LDO
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG0', 'EN_G_LDO', value)
#
# BIAS_CFG1 (0x0093)
#
# EN_LOADIMP_LDO_TLOB
@property
def EN_LOADIMP_LDO_TLOB(self):
"""
Get the value of EN_LOADIMP_LDO_TLOB
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_TLOB')
@EN_LOADIMP_LDO_TLOB.setter
def EN_LOADIMP_LDO_TLOB(self, value):
"""
Set the value of EN_LOADIMP_LDO_TLOB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_TLOB', value)
# EN_LOADIMP_LDO_TPAD
@property
def EN_LOADIMP_LDO_TPAD(self):
"""
Get the value of EN_LOADIMP_LDO_TPAD
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_TPAD')
@EN_LOADIMP_LDO_TPAD.setter
def EN_LOADIMP_LDO_TPAD(self, value):
"""
Set the value of EN_LOADIMP_LDO_TPAD
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_TPAD', value)
# EN_LOADIMP_LDO_TXBUF
@property
def EN_LOADIMP_LDO_TXBUF(self):
"""
Get the value of EN_LOADIMP_LDO_TXBUF
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_TXBUF')
@EN_LOADIMP_LDO_TXBUF.setter
def EN_LOADIMP_LDO_TXBUF(self, value):
"""
Set the value of EN_LOADIMP_LDO_TXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_TXBUF', value)
# EN_LOADIMP_LDO_VCOGN
@property
def EN_LOADIMP_LDO_VCOGN(self):
"""
Get the value of EN_LOADIMP_LDO_VCOGN
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_VCOGN')
@EN_LOADIMP_LDO_VCOGN.setter
def EN_LOADIMP_LDO_VCOGN(self, value):
"""
Set the value of EN_LOADIMP_LDO_VCOGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_VCOGN', value)
# EN_LOADIMP_LDO_VCOSXR
@property
def EN_LOADIMP_LDO_VCOSXR(self):
"""
Get the value of EN_LOADIMP_LDO_VCOSXR
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_VCOSXR')
@EN_LOADIMP_LDO_VCOSXR.setter
def EN_LOADIMP_LDO_VCOSXR(self, value):
"""
Set the value of EN_LOADIMP_LDO_VCOSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_VCOSXR', value)
# EN_LOADIMP_LDO_VCOSXT
@property
def EN_LOADIMP_LDO_VCOSXT(self):
"""
Get the value of EN_LOADIMP_LDO_VCOSXT
"""
return self._readReg('CFG1', 'EN_LOADIMP_LDO_VCOSXT')
@EN_LOADIMP_LDO_VCOSXT.setter
def EN_LOADIMP_LDO_VCOSXT(self, value):
"""
Set the value of EN_LOADIMP_LDO_VCOSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LOADIMP_LDO_VCOSXT', value)
# EN_LDO_AFE
@property
def EN_LDO_AFE(self):
"""
Get the value of EN_LDO_AFE
"""
return self._readReg('CFG1', 'EN_LDO_AFE')
@EN_LDO_AFE.setter
def EN_LDO_AFE(self, value):
"""
Set the value of EN_LDO_AFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_AFE', value)
# EN_LDO_CPGN
@property
def EN_LDO_CPGN(self):
"""
Get the value of EN_LDO_CPGN
"""
return self._readReg('CFG1', 'EN_LDO_CPGN')
@EN_LDO_CPGN.setter
def EN_LDO_CPGN(self, value):
"""
Set the value of EN_LDO_CPGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_CPGN', value)
# EN_LDO_CPSXR
@property
def EN_LDO_CPSXR(self):
"""
Get the value of EN_LDO_CPSXR
"""
return self._readReg('CFG1', 'EN_LDO_CPSXR')
@EN_LDO_CPSXR.setter
def EN_LDO_CPSXR(self, value):
"""
Set the value of EN_LDO_CPSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_CPSXR', value)
# EN_LDO_TLOB
@property
def EN_LDO_TLOB(self):
"""
Get the value of EN_LDO_TLOB
"""
return self._readReg('CFG1', 'EN_LDO_TLOB')
@EN_LDO_TLOB.setter
def EN_LDO_TLOB(self, value):
"""
Set the value of EN_LDO_TLOB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_TLOB', value)
# EN_LDO_TPAD
@property
def EN_LDO_TPAD(self):
"""
Get the value of EN_LDO_TPAD
"""
return self._readReg('CFG1', 'EN_LDO_TPAD')
@EN_LDO_TPAD.setter
def EN_LDO_TPAD(self, value):
"""
Set the value of EN_LDO_TPAD
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_TPAD', value)
# EN_LDO_TXBUF
@property
def EN_LDO_TXBUF(self):
"""
Get the value of EN_LDO_TXBUF
"""
return self._readReg('CFG1', 'EN_LDO_TXBUF')
@EN_LDO_TXBUF.setter
def EN_LDO_TXBUF(self, value):
"""
Set the value of EN_LDO_TXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_TXBUF', value)
# EN_LDO_VCOGN
@property
def EN_LDO_VCOGN(self):
"""
Get the value of EN_LDO_VCOGN
"""
return self._readReg('CFG1', 'EN_LDO_VCOGN')
@EN_LDO_VCOGN.setter
def EN_LDO_VCOGN(self, value):
"""
Set the value of EN_LDO_VCOGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_VCOGN', value)
# EN_LDO_VCOSXR
@property
def EN_LDO_VCOSXR(self):
"""
Get the value of EN_LDO_VCOSXR
"""
return self._readReg('CFG1', 'EN_LDO_VCOSXR')
@EN_LDO_VCOSXR.setter
def EN_LDO_VCOSXR(self, value):
"""
Set the value of EN_LDO_VCOSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_VCOSXR', value)
# EN_LDO_VCOSXT
@property
def EN_LDO_VCOSXT(self):
"""
Get the value of EN_LDO_VCOSXT
"""
return self._readReg('CFG1', 'EN_LDO_VCOSXT')
@EN_LDO_VCOSXT.setter
def EN_LDO_VCOSXT(self, value):
"""
Set the value of EN_LDO_VCOSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_VCOSXT', value)
# EN_LDO_CPSXT
@property
def EN_LDO_CPSXT(self):
"""
Get the value of EN_LDO_CPSXT
"""
return self._readReg('CFG1', 'EN_LDO_CPSXT')
@EN_LDO_CPSXT.setter
def EN_LDO_CPSXT(self, value):
"""
Set the value of EN_LDO_CPSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG1', 'EN_LDO_CPSXT', value)
#
# BIAS_CFG2 (0x0094)
#
# EN_LOADIMP_LDO_CPSXT
@property
def EN_LOADIMP_LDO_CPSXT(self):
"""
Get the value of EN_LOADIMP_LDO_CPSXT
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_CPSXT')
@EN_LOADIMP_LDO_CPSXT.setter
def EN_LOADIMP_LDO_CPSXT(self, value):
"""
Set the value of EN_LOADIMP_LDO_CPSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_CPSXT', value)
# EN_LOADIMP_LDO_DIG
@property
def EN_LOADIMP_LDO_DIG(self):
"""
Get the value of EN_LOADIMP_LDO_DIG
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIG')
@EN_LOADIMP_LDO_DIG.setter
def EN_LOADIMP_LDO_DIG(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIG
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIG', value)
# EN_LOADIMP_LDO_DIGGN
@property
def EN_LOADIMP_LDO_DIGGN(self):
"""
Get the value of EN_LOADIMP_LDO_DIGGN
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIGGN')
@EN_LOADIMP_LDO_DIGGN.setter
def EN_LOADIMP_LDO_DIGGN(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIGGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIGGN', value)
# EN_LOADIMP_LDO_DIGSXR
@property
def EN_LOADIMP_LDO_DIGSXR(self):
"""
Get the value of EN_LOADIMP_LDO_DIGSXR
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIGSXR')
@EN_LOADIMP_LDO_DIGSXR.setter
def EN_LOADIMP_LDO_DIGSXR(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIGSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIGSXR', value)
# EN_LOADIMP_LDO_DIGSXT
@property
def EN_LOADIMP_LDO_DIGSXT(self):
"""
Get the value of EN_LOADIMP_LDO_DIGSXT
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIGSXT')
@EN_LOADIMP_LDO_DIGSXT.setter
def EN_LOADIMP_LDO_DIGSXT(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIGSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIGSXT', value)
# EN_LOADIMP_LDO_DIVGN
@property
def EN_LOADIMP_LDO_DIVGN(self):
"""
Get the value of EN_LOADIMP_LDO_DIVGN
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIVGN')
@EN_LOADIMP_LDO_DIVGN.setter
def EN_LOADIMP_LDO_DIVGN(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIVGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIVGN', value)
# EN_LOADIMP_LDO_DIVSXR
@property
def EN_LOADIMP_LDO_DIVSXR(self):
"""
Get the value of EN_LOADIMP_LDO_DIVSXR
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIVSXR')
@EN_LOADIMP_LDO_DIVSXR.setter
def EN_LOADIMP_LDO_DIVSXR(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIVSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIVSXR', value)
# EN_LOADIMP_LDO_DIVSXT
@property
def EN_LOADIMP_LDO_DIVSXT(self):
"""
Get the value of EN_LOADIMP_LDO_DIVSXT
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_DIVSXT')
@EN_LOADIMP_LDO_DIVSXT.setter
def EN_LOADIMP_LDO_DIVSXT(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIVSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_DIVSXT', value)
# EN_LOADIMP_LDO_LNA12
@property
def EN_LOADIMP_LDO_LNA12(self):
"""
Get the value of EN_LOADIMP_LDO_LNA12
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_LNA12')
@EN_LOADIMP_LDO_LNA12.setter
def EN_LOADIMP_LDO_LNA12(self, value):
"""
Set the value of EN_LOADIMP_LDO_LNA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_LNA12', value)
# EN_LOADIMP_LDO_LNA14
@property
def EN_LOADIMP_LDO_LNA14(self):
"""
Get the value of EN_LOADIMP_LDO_LNA14
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_LNA14')
@EN_LOADIMP_LDO_LNA14.setter
def EN_LOADIMP_LDO_LNA14(self, value):
"""
Set the value of EN_LOADIMP_LDO_LNA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_LNA14', value)
# EN_LOADIMP_LDO_MXRFE
@property
def EN_LOADIMP_LDO_MXRFE(self):
"""
Get the value of EN_LOADIMP_LDO_MXRFE
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_MXRFE')
@EN_LOADIMP_LDO_MXRFE.setter
def EN_LOADIMP_LDO_MXRFE(self, value):
"""
Set the value of EN_LOADIMP_LDO_MXRFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_MXRFE', value)
# EN_LOADIMP_LDO_RBB
@property
def EN_LOADIMP_LDO_RBB(self):
"""
Get the value of EN_LOADIMP_LDO_RBB
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_RBB')
@EN_LOADIMP_LDO_RBB.setter
def EN_LOADIMP_LDO_RBB(self, value):
"""
Set the value of EN_LOADIMP_LDO_RBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_RBB', value)
# EN_LOADIMP_LDO_RXBUF
@property
def EN_LOADIMP_LDO_RXBUF(self):
"""
Get the value of EN_LOADIMP_LDO_RXBUF
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_RXBUF')
@EN_LOADIMP_LDO_RXBUF.setter
def EN_LOADIMP_LDO_RXBUF(self, value):
"""
Set the value of EN_LOADIMP_LDO_RXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_RXBUF', value)
# EN_LOADIMP_LDO_TBB
@property
def EN_LOADIMP_LDO_TBB(self):
"""
Get the value of EN_LOADIMP_LDO_TBB
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_TBB')
@EN_LOADIMP_LDO_TBB.setter
def EN_LOADIMP_LDO_TBB(self, value):
"""
Set the value of EN_LOADIMP_LDO_TBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_TBB', value)
# EN_LOADIMP_LDO_TIA12
@property
def EN_LOADIMP_LDO_TIA12(self):
"""
Get the value of EN_LOADIMP_LDO_TIA12
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_TIA12')
@EN_LOADIMP_LDO_TIA12.setter
def EN_LOADIMP_LDO_TIA12(self, value):
"""
Set the value of EN_LOADIMP_LDO_TIA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_TIA12', value)
# EN_LOADIMP_LDO_TIA14
@property
def EN_LOADIMP_LDO_TIA14(self):
"""
Get the value of EN_LOADIMP_LDO_TIA14
"""
return self._readReg('CFG2', 'EN_LOADIMP_LDO_TIA14')
@EN_LOADIMP_LDO_TIA14.setter
def EN_LOADIMP_LDO_TIA14(self, value):
"""
Set the value of EN_LOADIMP_LDO_TIA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG2', 'EN_LOADIMP_LDO_TIA14', value)
#
# BIAS_CFG3 (0x0095)
#
# BYP_LDO_TBB
@property
def BYP_LDO_TBB(self):
"""
Get the value of BYP_LDO_TBB
"""
return self._readReg('CFG3', 'BYP_LDO_TBB')
@BYP_LDO_TBB.setter
def BYP_LDO_TBB(self, value):
"""
Set the value of BYP_LDO_TBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TBB', value)
# BYP_LDO_TIA12
@property
def BYP_LDO_TIA12(self):
"""
Get the value of BYP_LDO_TIA12
"""
return self._readReg('CFG3', 'BYP_LDO_TIA12')
@BYP_LDO_TIA12.setter
def BYP_LDO_TIA12(self, value):
"""
Set the value of BYP_LDO_TIA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TIA12', value)
# BYP_LDO_TIA14
@property
def BYP_LDO_TIA14(self):
"""
Get the value of BYP_LDO_TIA14
"""
return self._readReg('CFG3', 'BYP_LDO_TIA14')
@BYP_LDO_TIA14.setter
def BYP_LDO_TIA14(self, value):
"""
Set the value of BYP_LDO_TIA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TIA14', value)
# BYP_LDO_TLOB
@property
def BYP_LDO_TLOB(self):
"""
Get the value of BYP_LDO_TLOB
"""
return self._readReg('CFG3', 'BYP_LDO_TLOB')
@BYP_LDO_TLOB.setter
def BYP_LDO_TLOB(self, value):
"""
Set the value of BYP_LDO_TLOB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TLOB', value)
# BYP_LDO_TPAD
@property
def BYP_LDO_TPAD(self):
"""
Get the value of BYP_LDO_TPAD
"""
return self._readReg('CFG3', 'BYP_LDO_TPAD')
@BYP_LDO_TPAD.setter
def BYP_LDO_TPAD(self, value):
"""
Set the value of BYP_LDO_TPAD
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TPAD', value)
# BYP_LDO_TXBUF
@property
def BYP_LDO_TXBUF(self):
"""
Get the value of BYP_LDO_TXBUF
"""
return self._readReg('CFG3', 'BYP_LDO_TXBUF')
@BYP_LDO_TXBUF.setter
def BYP_LDO_TXBUF(self, value):
"""
Set the value of BYP_LDO_TXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_TXBUF', value)
# BYP_LDO_VCOGN
@property
def BYP_LDO_VCOGN(self):
"""
Get the value of BYP_LDO_VCOGN
"""
return self._readReg('CFG3', 'BYP_LDO_VCOGN')
@BYP_LDO_VCOGN.setter
def BYP_LDO_VCOGN(self, value):
"""
Set the value of BYP_LDO_VCOGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_VCOGN', value)
# BYP_LDO_VCOSXR
@property
def BYP_LDO_VCOSXR(self):
"""
Get the value of BYP_LDO_VCOSXR
"""
return self._readReg('CFG3', 'BYP_LDO_VCOSXR')
@BYP_LDO_VCOSXR.setter
def BYP_LDO_VCOSXR(self, value):
"""
Set the value of BYP_LDO_VCOSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_VCOSXR', value)
# BYP_LDO_VCOSXT
@property
def BYP_LDO_VCOSXT(self):
"""
Get the value of BYP_LDO_VCOSXT
"""
return self._readReg('CFG3', 'BYP_LDO_VCOSXT')
@BYP_LDO_VCOSXT.setter
def BYP_LDO_VCOSXT(self, value):
"""
Set the value of BYP_LDO_VCOSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'BYP_LDO_VCOSXT', value)
# EN_LOADIMP_LDO_AFE
@property
def EN_LOADIMP_LDO_AFE(self):
"""
Get the value of EN_LOADIMP_LDO_AFE
"""
return self._readReg('CFG3', 'EN_LOADIMP_LDO_AFE')
@EN_LOADIMP_LDO_AFE.setter
def EN_LOADIMP_LDO_AFE(self, value):
"""
Set the value of EN_LOADIMP_LDO_AFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'EN_LOADIMP_LDO_AFE', value)
# EN_LOADIMP_LDO_CPGN
@property
def EN_LOADIMP_LDO_CPGN(self):
"""
Get the value of EN_LOADIMP_LDO_CPGN
"""
return self._readReg('CFG3', 'EN_LOADIMP_LDO_CPGN')
@EN_LOADIMP_LDO_CPGN.setter
def EN_LOADIMP_LDO_CPGN(self, value):
"""
Set the value of EN_LOADIMP_LDO_CPGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'EN_LOADIMP_LDO_CPGN', value)
# EN_LOADIMP_LDO_CPSXR
@property
def EN_LOADIMP_LDO_CPSXR(self):
"""
Get the value of EN_LOADIMP_LDO_CPSXR
"""
return self._readReg('CFG3', 'EN_LOADIMP_LDO_CPSXR')
@EN_LOADIMP_LDO_CPSXR.setter
def EN_LOADIMP_LDO_CPSXR(self, value):
"""
Set the value of EN_LOADIMP_LDO_CPSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG3', 'EN_LOADIMP_LDO_CPSXR', value)
#
# BIAS_CFG4 (0x0096)
#
# BYP_LDO_AFE
@property
def BYP_LDO_AFE(self):
"""
Get the value of BYP_LDO_AFE
"""
return self._readReg('CFG4', 'BYP_LDO_AFE')
@BYP_LDO_AFE.setter
def BYP_LDO_AFE(self, value):
"""
Set the value of BYP_LDO_AFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_AFE', value)
# BYP_LDO_CPGN
@property
def BYP_LDO_CPGN(self):
"""
Get the value of BYP_LDO_CPGN
"""
return self._readReg('CFG4', 'BYP_LDO_CPGN')
@BYP_LDO_CPGN.setter
def BYP_LDO_CPGN(self, value):
"""
Set the value of BYP_LDO_CPGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_CPGN', value)
# BYP_LDO_CPSXR
@property
def BYP_LDO_CPSXR(self):
"""
Get the value of BYP_LDO_CPSXR
"""
return self._readReg('CFG4', 'BYP_LDO_CPSXR')
@BYP_LDO_CPSXR.setter
def BYP_LDO_CPSXR(self, value):
"""
Set the value of BYP_LDO_CPSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_CPSXR', value)
# BYP_LDO_CPSXT
@property
def BYP_LDO_CPSXT(self):
"""
Get the value of BYP_LDO_CPSXT
"""
return self._readReg('CFG4', 'BYP_LDO_CPSXT')
@BYP_LDO_CPSXT.setter
def BYP_LDO_CPSXT(self, value):
"""
Set the value of BYP_LDO_CPSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_CPSXT', value)
# BYP_LDO_DIG
@property
def BYP_LDO_DIG(self):
"""
Get the value of BYP_LDO_DIG
"""
return self._readReg('CFG4', 'BYP_LDO_DIG')
@BYP_LDO_DIG.setter
def BYP_LDO_DIG(self, value):
"""
Set the value of BYP_LDO_DIG
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIG', value)
# BYP_LDO_DIGGN
@property
def BYP_LDO_DIGGN(self):
"""
Get the value of BYP_LDO_DIGGN
"""
return self._readReg('CFG4', 'BYP_LDO_DIGGN')
@BYP_LDO_DIGGN.setter
def BYP_LDO_DIGGN(self, value):
"""
Set the value of BYP_LDO_DIGGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIGGN', value)
# BYP_LDO_DIGSXR
@property
def BYP_LDO_DIGSXR(self):
"""
Get the value of BYP_LDO_DIGSXR
"""
return self._readReg('CFG4', 'BYP_LDO_DIGSXR')
@BYP_LDO_DIGSXR.setter
def BYP_LDO_DIGSXR(self, value):
"""
Set the value of BYP_LDO_DIGSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIGSXR', value)
# BYP_LDO_DIGSXT
@property
def BYP_LDO_DIGSXT(self):
"""
Get the value of BYP_LDO_DIGSXT
"""
return self._readReg('CFG4', 'BYP_LDO_DIGSXT')
@BYP_LDO_DIGSXT.setter
def BYP_LDO_DIGSXT(self, value):
"""
Set the value of BYP_LDO_DIGSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIGSXT', value)
# BYP_LDO_DIVGN
@property
def BYP_LDO_DIVGN(self):
"""
Get the value of BYP_LDO_DIVGN
"""
return self._readReg('CFG4', 'BYP_LDO_DIVGN')
@BYP_LDO_DIVGN.setter
def BYP_LDO_DIVGN(self, value):
"""
Set the value of BYP_LDO_DIVGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIVGN', value)
# BYP_LDO_DIVSXR
@property
def BYP_LDO_DIVSXR(self):
"""
Get the value of BYP_LDO_DIVSXR
"""
return self._readReg('CFG4', 'BYP_LDO_DIVSXR')
@BYP_LDO_DIVSXR.setter
def BYP_LDO_DIVSXR(self, value):
"""
Set the value of BYP_LDO_DIVSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIVSXR', value)
# BYP_LDO_DIVSXT
@property
def BYP_LDO_DIVSXT(self):
"""
Get the value of BYP_LDO_DIVSXT
"""
return self._readReg('CFG4', 'BYP_LDO_DIVSXT')
@BYP_LDO_DIVSXT.setter
def BYP_LDO_DIVSXT(self, value):
"""
Set the value of BYP_LDO_DIVSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_DIVSXT', value)
# BYP_LDO_LNA12
@property
def BYP_LDO_LNA12(self):
"""
Get the value of BYP_LDO_LNA12
"""
return self._readReg('CFG4', 'BYP_LDO_LNA12')
@BYP_LDO_LNA12.setter
def BYP_LDO_LNA12(self, value):
"""
Set the value of BYP_LDO_LNA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_LNA12', value)
# BYP_LDO_LNA14
@property
def BYP_LDO_LNA14(self):
"""
Get the value of BYP_LDO_LNA14
"""
return self._readReg('CFG4', 'BYP_LDO_LNA14')
@BYP_LDO_LNA14.setter
def BYP_LDO_LNA14(self, value):
"""
Set the value of BYP_LDO_LNA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_LNA14', value)
# BYP_LDO_MXRFE
@property
def BYP_LDO_MXRFE(self):
"""
Get the value of BYP_LDO_MXRFE
"""
return self._readReg('CFG4', 'BYP_LDO_MXRFE')
@BYP_LDO_MXRFE.setter
def BYP_LDO_MXRFE(self, value):
"""
Set the value of BYP_LDO_MXRFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_MXRFE', value)
# BYP_LDO_RBB
@property
def BYP_LDO_RBB(self):
"""
Get the value of BYP_LDO_RBB
"""
return self._readReg('CFG4', 'BYP_LDO_RBB')
@BYP_LDO_RBB.setter
def BYP_LDO_RBB(self, value):
"""
Set the value of BYP_LDO_RBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_RBB', value)
# BYP_LDO_RXBUF
@property
def BYP_LDO_RXBUF(self):
"""
Get the value of BYP_LDO_RXBUF
"""
return self._readReg('CFG4', 'BYP_LDO_RXBUF')
@BYP_LDO_RXBUF.setter
def BYP_LDO_RXBUF(self, value):
"""
Set the value of BYP_LDO_RXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG4', 'BYP_LDO_RXBUF', value)
#
# BIAS_CFG5 (0x0097)
#
# SPDUP_LDO_DIVSXR
@property
def SPDUP_LDO_DIVSXR(self):
"""
Get the value of SPDUP_LDO_DIVSXR
"""
return self._readReg('CFG5', 'SPDUP_LDO_DIVSXR')
@SPDUP_LDO_DIVSXR.setter
def SPDUP_LDO_DIVSXR(self, value):
"""
Set the value of SPDUP_LDO_DIVSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_DIVSXR', value)
# SPDUP_LDO_DIVSXT
@property
def SPDUP_LDO_DIVSXT(self):
"""
Get the value of SPDUP_LDO_DIVSXT
"""
return self._readReg('CFG5', 'SPDUP_LDO_DIVSXT')
@SPDUP_LDO_DIVSXT.setter
def SPDUP_LDO_DIVSXT(self, value):
"""
Set the value of SPDUP_LDO_DIVSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_DIVSXT', value)
# SPDUP_LDO_LNA12
@property
def SPDUP_LDO_LNA12(self):
"""
Get the value of SPDUP_LDO_LNA12
"""
return self._readReg('CFG5', 'SPDUP_LDO_LNA12')
@SPDUP_LDO_LNA12.setter
def SPDUP_LDO_LNA12(self, value):
"""
Set the value of SPDUP_LDO_LNA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_LNA12', value)
# SPDUP_LDO_LNA14
@property
def SPDUP_LDO_LNA14(self):
"""
Get the value of SPDUP_LDO_LNA14
"""
return self._readReg('CFG5', 'SPDUP_LDO_LNA14')
@SPDUP_LDO_LNA14.setter
def SPDUP_LDO_LNA14(self, value):
"""
Set the value of SPDUP_LDO_LNA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_LNA14', value)
# SPDUP_LDO_MXRFE
@property
def SPDUP_LDO_MXRFE(self):
"""
Get the value of SPDUP_LDO_MXRFE
"""
return self._readReg('CFG5', 'SPDUP_LDO_MXRFE')
@SPDUP_LDO_MXRFE.setter
def SPDUP_LDO_MXRFE(self, value):
"""
Set the value of SPDUP_LDO_MXRFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_MXRFE', value)
# SPDUP_LDO_RBB
@property
def SPDUP_LDO_RBB(self):
"""
Get the value of SPDUP_LDO_RBB
"""
return self._readReg('CFG5', 'SPDUP_LDO_RBB')
@SPDUP_LDO_RBB.setter
def SPDUP_LDO_RBB(self, value):
"""
Set the value of SPDUP_LDO_RBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_RBB', value)
# SPDUP_LDO_RXBUF
@property
def SPDUP_LDO_RXBUF(self):
"""
Get the value of SPDUP_LDO_RXBUF
"""
return self._readReg('CFG5', 'SPDUP_LDO_RXBUF')
@SPDUP_LDO_RXBUF.setter
def SPDUP_LDO_RXBUF(self, value):
"""
Set the value of SPDUP_LDO_RXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_RXBUF', value)
# SPDUP_LDO_TBB
@property
def SPDUP_LDO_TBB(self):
"""
Get the value of SPDUP_LDO_TBB
"""
return self._readReg('CFG5', 'SPDUP_LDO_TBB')
@SPDUP_LDO_TBB.setter
def SPDUP_LDO_TBB(self, value):
"""
Set the value of SPDUP_LDO_TBB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TBB', value)
# SPDUP_LDO_TIA12
@property
def SPDUP_LDO_TIA12(self):
"""
Get the value of SPDUP_LDO_TIA12
"""
return self._readReg('CFG5', 'SPDUP_LDO_TIA12')
@SPDUP_LDO_TIA12.setter
def SPDUP_LDO_TIA12(self, value):
"""
Set the value of SPDUP_LDO_TIA12
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TIA12', value)
# SPDUP_LDO_TIA14
@property
def SPDUP_LDO_TIA14(self):
"""
Get the value of SPDUP_LDO_TIA14
"""
return self._readReg('CFG5', 'SPDUP_LDO_TIA14')
@SPDUP_LDO_TIA14.setter
def SPDUP_LDO_TIA14(self, value):
"""
Set the value of SPDUP_LDO_TIA14
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TIA14', value)
# SPDUP_LDO_TLOB
@property
def SPDUP_LDO_TLOB(self):
"""
Get the value of SPDUP_LDO_TLOB
"""
return self._readReg('CFG5', 'SPDUP_LDO_TLOB')
@SPDUP_LDO_TLOB.setter
def SPDUP_LDO_TLOB(self, value):
"""
Set the value of SPDUP_LDO_TLOB
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TLOB', value)
# SPDUP_LDO_TPAD
@property
def SPDUP_LDO_TPAD(self):
"""
Get the value of SPDUP_LDO_TPAD
"""
return self._readReg('CFG5', 'SPDUP_LDO_TPAD')
@SPDUP_LDO_TPAD.setter
def SPDUP_LDO_TPAD(self, value):
"""
Set the value of SPDUP_LDO_TPAD
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TPAD', value)
# SPDUP_LDO_TXBUF
@property
def SPDUP_LDO_TXBUF(self):
"""
Get the value of SPDUP_LDO_TXBUF
"""
return self._readReg('CFG5', 'SPDUP_LDO_TXBUF')
@SPDUP_LDO_TXBUF.setter
def SPDUP_LDO_TXBUF(self, value):
"""
Set the value of SPDUP_LDO_TXBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_TXBUF', value)
# SPDUP_LDO_VCOGN
@property
def SPDUP_LDO_VCOGN(self):
"""
Get the value of SPDUP_LDO_VCOGN
"""
return self._readReg('CFG5', 'SPDUP_LDO_VCOGN')
@SPDUP_LDO_VCOGN.setter
def SPDUP_LDO_VCOGN(self, value):
"""
Set the value of SPDUP_LDO_VCOGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_VCOGN', value)
# SPDUP_LDO_VCOSXR
@property
def SPDUP_LDO_VCOSXR(self):
"""
Get the value of SPDUP_LDO_VCOSXR
"""
return self._readReg('CFG5', 'SPDUP_LDO_VCOSXR')
@SPDUP_LDO_VCOSXR.setter
def SPDUP_LDO_VCOSXR(self, value):
"""
Set the value of SPDUP_LDO_VCOSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_VCOSXR', value)
# SPDUP_LDO_VCOSXT
@property
def SPDUP_LDO_VCOSXT(self):
"""
Get the value of SPDUP_LDO_VCOSXT
"""
return self._readReg('CFG5', 'SPDUP_LDO_VCOSXT')
@SPDUP_LDO_VCOSXT.setter
def SPDUP_LDO_VCOSXT(self, value):
"""
Set the value of SPDUP_LDO_VCOSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG5', 'SPDUP_LDO_VCOSXT', value)
#
# BIAS_CFG6 (0x0098)
#
# SPDUP_LDO_AFE
@property
def SPDUP_LDO_AFE(self):
"""
Get the value of SPDUP_LDO_AFE
"""
return self._readReg('CFG6', 'SPDUP_LDO_AFE')
@SPDUP_LDO_AFE.setter
def SPDUP_LDO_AFE(self, value):
"""
Set the value of SPDUP_LDO_AFE
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_AFE', value)
# SPDUP_LDO_CPGN
@property
def SPDUP_LDO_CPGN(self):
"""
Get the value of SPDUP_LDO_CPGN
"""
return self._readReg('CFG6', 'SPDUP_LDO_CPGN')
@SPDUP_LDO_CPGN.setter
def SPDUP_LDO_CPGN(self, value):
"""
Set the value of SPDUP_LDO_CPGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_CPGN', value)
# SPDUP_LDO_CPSXR
@property
def SPDUP_LDO_CPSXR(self):
"""
Get the value of SPDUP_LDO_CPSXR
"""
return self._readReg('CFG6', 'SPDUP_LDO_CPSXR')
@SPDUP_LDO_CPSXR.setter
def SPDUP_LDO_CPSXR(self, value):
"""
Set the value of SPDUP_LDO_CPSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_CPSXR', value)
# SPDUP_LDO_CPSXT
@property
def SPDUP_LDO_CPSXT(self):
"""
Get the value of SPDUP_LDO_CPSXT
"""
return self._readReg('CFG6', 'SPDUP_LDO_CPSXT')
@SPDUP_LDO_CPSXT.setter
def SPDUP_LDO_CPSXT(self, value):
"""
Set the value of SPDUP_LDO_CPSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_CPSXT', value)
# SPDUP_LDO_DIG
@property
def SPDUP_LDO_DIG(self):
"""
Get the value of SPDUP_LDO_DIG
"""
return self._readReg('CFG6', 'SPDUP_LDO_DIG')
@SPDUP_LDO_DIG.setter
def SPDUP_LDO_DIG(self, value):
"""
Set the value of SPDUP_LDO_DIG
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_DIG', value)
# SPDUP_LDO_DIGGN
@property
def SPDUP_LDO_DIGGN(self):
"""
Get the value of SPDUP_LDO_DIGGN
"""
return self._readReg('CFG6', 'SPDUP_LDO_DIGGN')
@SPDUP_LDO_DIGGN.setter
def SPDUP_LDO_DIGGN(self, value):
"""
Set the value of SPDUP_LDO_DIGGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_DIGGN', value)
# SPDUP_LDO_DIGSXR
@property
def SPDUP_LDO_DIGSXR(self):
"""
Get the value of SPDUP_LDO_DIGSXR
"""
return self._readReg('CFG6', 'SPDUP_LDO_DIGSXR')
@SPDUP_LDO_DIGSXR.setter
def SPDUP_LDO_DIGSXR(self, value):
"""
Set the value of SPDUP_LDO_DIGSXR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_DIGSXR', value)
# SPDUP_LDO_DIGSXT
@property
def SPDUP_LDO_DIGSXT(self):
"""
Get the value of SPDUP_LDO_DIGSXT
"""
return self._readReg('CFG6', 'SPDUP_LDO_DIGSXT')
@SPDUP_LDO_DIGSXT.setter
def SPDUP_LDO_DIGSXT(self, value):
"""
Set the value of SPDUP_LDO_DIGSXT
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_DIGSXT', value)
# SPDUP_LDO_DIVGN
@property
def SPDUP_LDO_DIVGN(self):
"""
Get the value of SPDUP_LDO_DIVGN
"""
return self._readReg('CFG6', 'SPDUP_LDO_DIVGN')
@SPDUP_LDO_DIVGN.setter
def SPDUP_LDO_DIVGN(self, value):
"""
Set the value of SPDUP_LDO_DIVGN
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG6', 'SPDUP_LDO_DIVGN', value)
#
# BIAS_CFG7 (0x0099)
#
# RDIV_VCOSXR<7:0>
@property
def RDIV_VCOSXR(self):
"""
Get the value of RDIV_VCOSXR<7:0>
"""
return self._readReg('CFG7', 'RDIV_VCOSXR<7:0>')
@RDIV_VCOSXR.setter
def RDIV_VCOSXR(self, value):
"""
Set the value of RDIV_VCOSXR<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG7', 'RDIV_VCOSXR<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_VCOSXT<7:0>
@property
def RDIV_VCOSXT(self):
"""
Get the value of RDIV_VCOSXT<7:0>
"""
return self._readReg('CFG7', 'RDIV_VCOSXT<7:0>')
@RDIV_VCOSXT.setter
def RDIV_VCOSXT(self, value):
"""
Set the value of RDIV_VCOSXT<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG7', 'RDIV_VCOSXT<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG8 (0x009A)
#
# RDIV_TXBUF<7:0>
@property
def RDIV_TXBUF(self):
"""
Get the value of RDIV_TXBUF<7:0>
"""
return self._readReg('CFG8', 'RDIV_TXBUF<7:0>')
@RDIV_TXBUF.setter
def RDIV_TXBUF(self, value):
"""
Set the value of RDIV_TXBUF<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG8', 'RDIV_TXBUF<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_VCOGN<7:0>
@property
def RDIV_VCOGN(self):
"""
Get the value of RDIV_VCOGN<7:0>
"""
return self._readReg('CFG8', 'RDIV_VCOGN<7:0>')
@RDIV_VCOGN.setter
def RDIV_VCOGN(self, value):
"""
Set the value of RDIV_VCOGN<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG8', 'RDIV_VCOGN<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG9 (0x009B)
#
# RDIV_TLOB<7:0>
@property
def RDIV_TLOB(self):
"""
Get the value of RDIV_TLOB<7:0>
"""
return self._readReg('CFG9', 'RDIV_TLOB<7:0>')
@RDIV_TLOB.setter
def RDIV_TLOB(self, value):
"""
Set the value of RDIV_TLOB<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG9', 'RDIV_TLOB<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_TPAD<7:0>
@property
def RDIV_TPAD(self):
"""
Get the value of RDIV_TPAD<7:0>
"""
return self._readReg('CFG9', 'RDIV_TPAD<7:0>')
@RDIV_TPAD.setter
def RDIV_TPAD(self, value):
"""
Set the value of RDIV_TPAD<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG9', 'RDIV_TPAD<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG10 (0x009C)
#
# RDIV_TIA12<7:0>
@property
def RDIV_TIA12(self):
"""
Get the value of RDIV_TIA12<7:0>
"""
return self._readReg('CFG10', 'RDIV_TIA12<7:0>')
@RDIV_TIA12.setter
def RDIV_TIA12(self, value):
"""
Set the value of RDIV_TIA12<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG10', 'RDIV_TIA12<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_TIA14<7:0>
@property
def RDIV_TIA14(self):
"""
Get the value of RDIV_TIA14<7:0>
"""
return self._readReg('CFG10', 'RDIV_TIA14<7:0>')
@RDIV_TIA14.setter
def RDIV_TIA14(self, value):
"""
Set the value of RDIV_TIA14<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG10', 'RDIV_TIA14<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG11 (0x009D)
#
# RDIV_RXBUF<7:0>
@property
def RDIV_RXBUF(self):
"""
Get the value of RDIV_RXBUF<7:0>
"""
return self._readReg('CFG11', 'RDIV_RXBUF<7:0>')
@RDIV_RXBUF.setter
def RDIV_RXBUF(self, value):
"""
Set the value of RDIV_RXBUF<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG11', 'RDIV_RXBUF<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_TBB<7:0>
@property
def RDIV_TBB(self):
"""
Get the value of RDIV_TBB<7:0>
"""
return self._readReg('CFG11', 'RDIV_TBB<7:0>')
@RDIV_TBB.setter
def RDIV_TBB(self, value):
"""
Set the value of RDIV_TBB<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG11', 'RDIV_TBB<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG12 (0x009E)
#
# RDIV_MXRFE<7:0>
@property
def RDIV_MXRFE(self):
"""
Get the value of RDIV_MXRFE<7:0>
"""
return self._readReg('CFG12', 'RDIV_MXRFE<7:0>')
@RDIV_MXRFE.setter
def RDIV_MXRFE(self, value):
"""
Set the value of RDIV_MXRFE<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG12', 'RDIV_MXRFE<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_RBB<7:0>
@property
def RDIV_RBB(self):
"""
Get the value of RDIV_RBB<7:0>
"""
return self._readReg('CFG12', 'RDIV_RBB<7:0>')
@RDIV_RBB.setter
def RDIV_RBB(self, value):
"""
Set the value of RDIV_RBB<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG12', 'RDIV_RBB<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG13 (0x009F)
#
# RDIV_LNA12<7:0>
@property
def RDIV_LNA12(self):
"""
Get the value of RDIV_LNA12<7:0>
"""
return self._readReg('CFG13', 'RDIV_LNA12<7:0>')
@RDIV_LNA12.setter
def RDIV_LNA12(self, value):
"""
Set the value of RDIV_LNA12<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG13', 'RDIV_LNA12<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_LNA14<7:0>
@property
def RDIV_LNA14(self):
"""
Get the value of RDIV_LNA14<7:0>
"""
return self._readReg('CFG13', 'RDIV_LNA14<7:0>')
@RDIV_LNA14.setter
def RDIV_LNA14(self, value):
"""
Set the value of RDIV_LNA14<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG13', 'RDIV_LNA14<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG14 (0x00A0)
#
# RDIV_DIVSXR<7:0>
@property
def RDIV_DIVSXR(self):
"""
Get the value of RDIV_DIVSXR<7:0>
"""
return self._readReg('CFG14', 'RDIV_DIVSXR<7:0>')
@RDIV_DIVSXR.setter
def RDIV_DIVSXR(self, value):
"""
Set the value of RDIV_DIVSXR<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG14', 'RDIV_DIVSXR<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_DIVSXT<7:0>
@property
def RDIV_DIVSXT(self):
"""
Get the value of RDIV_DIVSXT<7:0>
"""
return self._readReg('CFG14', 'RDIV_DIVSXT<7:0>')
@RDIV_DIVSXT.setter
def RDIV_DIVSXT(self, value):
"""
Set the value of RDIV_DIVSXT<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG14', 'RDIV_DIVSXT<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG15 (0x00A1)
#
# RDIV_DIGSXT<7:0>
@property
def RDIV_DIGSXT(self):
"""
Get the value of RDIV_DIGSXT<7:0>
"""
return self._readReg('CFG15', 'RDIV_DIGSXT<7:0>')
@RDIV_DIGSXT.setter
def RDIV_DIGSXT(self, value):
"""
Set the value of RDIV_DIGSXT<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG15', 'RDIV_DIGSXT<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_DIVGN<7:0>
@property
def RDIV_DIVGN(self):
"""
Get the value of RDIV_DIVGN<7:0>
"""
return self._readReg('CFG15', 'RDIV_DIVGN<7:0>')
@RDIV_DIVGN.setter
def RDIV_DIVGN(self, value):
"""
Set the value of RDIV_DIVGN<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG15', 'RDIV_DIVGN<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG16 (0x00A2)
#
# RDIV_DIGGN<7:0>
@property
def RDIV_DIGGN(self):
"""
Get the value of RDIV_DIGGN<7:0>
"""
return self._readReg('CFG16', 'RDIV_DIGGN<7:0>')
@RDIV_DIGGN.setter
def RDIV_DIGGN(self, value):
"""
Set the value of RDIV_DIGGN<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG16', 'RDIV_DIGGN<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_DIGSXR<7:0>
@property
def RDIV_DIGSXR(self):
"""
Get the value of RDIV_DIGSXR<7:0>
"""
return self._readReg('CFG16', 'RDIV_DIGSXR<7:0>')
@RDIV_DIGSXR.setter
def RDIV_DIGSXR(self, value):
"""
Set the value of RDIV_DIGSXR<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG16', 'RDIV_DIGSXR<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG17 (0x00A3)
#
# RDIV_CPSXT<7:0>
@property
def RDIV_CPSXT(self):
"""
Get the value of RDIV_CPSXT<7:0>
"""
return self._readReg('CFG17', 'RDIV_CPSXT<7:0>')
@RDIV_CPSXT.setter
def RDIV_CPSXT(self, value):
"""
Set the value of RDIV_CPSXT<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG17', 'RDIV_CPSXT<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_DIG<7:0>
@property
def RDIV_DIG(self):
"""
Get the value of RDIV_DIG<7:0>
"""
return self._readReg('CFG17', 'RDIV_DIG<7:0>')
@RDIV_DIG.setter
def RDIV_DIG(self, value):
"""
Set the value of RDIV_DIG<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG17', 'RDIV_DIG<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG18 (0x00A4)
#
# RDIV_CPGN<7:0>
@property
def RDIV_CPGN(self):
"""
Get the value of RDIV_CPGN<7:0>
"""
return self._readReg('CFG18', 'RDIV_CPGN<7:0>')
@RDIV_CPGN.setter
def RDIV_CPGN(self, value):
"""
Set the value of RDIV_CPGN<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG18', 'RDIV_CPGN<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_CPSXR<7:0>
@property
def RDIV_CPSXR(self):
"""
Get the value of RDIV_CPSXR<7:0>
"""
return self._readReg('CFG18', 'RDIV_CPSXR<7:0>')
@RDIV_CPSXR.setter
def RDIV_CPSXR(self, value):
"""
Set the value of RDIV_CPSXR<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG18', 'RDIV_CPSXR<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG19 (0x00A5)
#
# RDIV_SPIBUF<7:0>
@property
def RDIV_SPIBUF(self):
"""
Get the value of RDIV_SPIBUF<7:0>
"""
return self._readReg('CFG19', 'RDIV_SPIBUF<7:0>')
@RDIV_SPIBUF.setter
def RDIV_SPIBUF(self, value):
"""
Set the value of RDIV_SPIBUF<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG19', 'RDIV_SPIBUF<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_AFE<7:0>
@property
def RDIV_AFE(self):
"""
Get the value of RDIV_AFE<7:0>
"""
return self._readReg('CFG19', 'RDIV_AFE<7:0>')
@RDIV_AFE.setter
def RDIV_AFE(self, value):
"""
Set the value of RDIV_AFE<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG19', 'RDIV_AFE<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
#
# BIAS_CFG20 (0x00A6)
#
# ISINK_SPIBUFF<2:0>
@property
def ISINK_SPIBUFF(self):
"""
Get the value of ISINK_SPIBUFF<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG20', 'ISINK_SPIBUFF<2:0>')
else:
raise ValueError("Bitfield ISINK_SPIBUFF<2:0> is not supported on chip version "+str(self.chip.chipID))
@ISINK_SPIBUFF.setter
def ISINK_SPIBUFF(self, value):
"""
Set the value of ISINK_SPIBUFF<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in range(0,8):
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'ISINK_SPIBUFF<2:0>', value)
else:
raise ValueError("Bitfield ISINK_SPIBUFF<2:0> is not supported on chip version "+str(self.chip.chipID))
# SPDUP_LDO_SPIBUF
@property
def SPDUP_LDO_SPIBUF(self):
"""
Get the value of SPDUP_LDO_SPIBUF
"""
return self._readReg('CFG20', 'SPDUP_LDO_SPIBUF')
@SPDUP_LDO_SPIBUF.setter
def SPDUP_LDO_SPIBUF(self, value):
"""
Set the value of SPDUP_LDO_SPIBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'SPDUP_LDO_SPIBUF', value)
# SPDUP_LDO_DIGIp2
@property
def SPDUP_LDO_DIGIp2(self):
"""
Get the value of SPDUP_LDO_DIGIp2
"""
return self._readReg('CFG20', 'SPDUP_LDO_DIGIp2')
@SPDUP_LDO_DIGIp2.setter
def SPDUP_LDO_DIGIp2(self, value):
"""
Set the value of SPDUP_LDO_DIGIp2
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'SPDUP_LDO_DIGIp2', value)
# SPDUP_LDO_DIGIp1
@property
def SPDUP_LDO_DIGIp1(self):
"""
Get the value of SPDUP_LDO_DIGIp1
"""
return self._readReg('CFG20', 'SPDUP_LDO_DIGIp1')
@SPDUP_LDO_DIGIp1.setter
def SPDUP_LDO_DIGIp1(self, value):
"""
Set the value of SPDUP_LDO_DIGIp1
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'SPDUP_LDO_DIGIp1', value)
# BYP_LDO_SPIBUF
@property
def BYP_LDO_SPIBUF(self):
"""
Get the value of BYP_LDO_SPIBUF
"""
return self._readReg('CFG20', 'BYP_LDO_SPIBUF')
@BYP_LDO_SPIBUF.setter
def BYP_LDO_SPIBUF(self, value):
"""
Set the value of BYP_LDO_SPIBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'BYP_LDO_SPIBUF', value)
# BYP_LDO_DIGIp2
@property
def BYP_LDO_DIGIp2(self):
"""
Get the value of BYP_LDO_DIGIp2
"""
return self._readReg('CFG20', 'BYP_LDO_DIGIp2')
@BYP_LDO_DIGIp2.setter
def BYP_LDO_DIGIp2(self, value):
"""
Set the value of BYP_LDO_DIGIp2
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'BYP_LDO_DIGIp2', value)
# BYP_LDO_DIGIp1
@property
def BYP_LDO_DIGIp1(self):
"""
Get the value of BYP_LDO_DIGIp1
"""
return self._readReg('CFG20', 'BYP_LDO_DIGIp1')
@BYP_LDO_DIGIp1.setter
def BYP_LDO_DIGIp1(self, value):
"""
Set the value of BYP_LDO_DIGIp1
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'BYP_LDO_DIGIp1', value)
# EN_LOADIMP_LDO_SPIBUF
@property
def EN_LOADIMP_LDO_SPIBUF(self):
"""
Get the value of EN_LOADIMP_LDO_SPIBUF
"""
return self._readReg('CFG20', 'EN_LOADIMP_LDO_SPIBUF')
@EN_LOADIMP_LDO_SPIBUF.setter
def EN_LOADIMP_LDO_SPIBUF(self, value):
"""
Set the value of EN_LOADIMP_LDO_SPIBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'EN_LOADIMP_LDO_SPIBUF', value)
# EN_LOADIMP_LDO_DIGIp2
@property
def EN_LOADIMP_LDO_DIGIp2(self):
"""
Get the value of EN_LOADIMP_LDO_DIGIp2
"""
return self._readReg('CFG20', 'EN_LOADIMP_LDO_DIGIp2')
@EN_LOADIMP_LDO_DIGIp2.setter
def EN_LOADIMP_LDO_DIGIp2(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIGIp2
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'EN_LOADIMP_LDO_DIGIp2', value)
# EN_LOADIMP_LDO_DIGIp1
@property
def EN_LOADIMP_LDO_DIGIp1(self):
"""
Get the value of EN_LOADIMP_LDO_DIGIp1
"""
return self._readReg('CFG20', 'EN_LOADIMP_LDO_DIGIp1')
@EN_LOADIMP_LDO_DIGIp1.setter
def EN_LOADIMP_LDO_DIGIp1(self, value):
"""
Set the value of EN_LOADIMP_LDO_DIGIp1
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'EN_LOADIMP_LDO_DIGIp1', value)
# PD_LDO_SPIBUF
@property
def PD_LDO_SPIBUF(self):
"""
Get the value of PD_LDO_SPIBUF
"""
return self._readReg('CFG20', 'PD_LDO_SPIBUF')
@PD_LDO_SPIBUF.setter
def PD_LDO_SPIBUF(self, value):
"""
Set the value of PD_LDO_SPIBUF
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'PD_LDO_SPIBUF', value)
# PD_LDO_DIGIp2
@property
def PD_LDO_DIGIp2(self):
"""
Get the value of PD_LDO_DIGIp2
"""
return self._readReg('CFG20', 'PD_LDO_DIGIp2')
@PD_LDO_DIGIp2.setter
def PD_LDO_DIGIp2(self, value):
"""
Set the value of PD_LDO_DIGIp2
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'PD_LDO_DIGIp2', value)
# PD_LDO_DIGIp1
@property
def PD_LDO_DIGIp1(self):
"""
Get the value of PD_LDO_DIGIp1
"""
return self._readReg('CFG20', 'PD_LDO_DIGIp1')
@PD_LDO_DIGIp1.setter
def PD_LDO_DIGIp1(self, value):
"""
Set the value of PD_LDO_DIGIp1
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'PD_LDO_DIGIp1', value)
# EN_G_LDOP
@property
def EN_G_LDOP(self):
"""
Get the value of EN_G_LDOP
"""
return self._readReg('CFG20', 'EN_G_LDOP')
@EN_G_LDOP.setter
def EN_G_LDOP(self, value):
"""
Set the value of EN_G_LDOP
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG20', 'EN_G_LDOP', value)
#
# BIAS_CFG21 (0x00A7)
#
# RDIV_DIGIp2<7:0>
@property
def RDIV_DIGIp2(self):
"""
Get the value of RDIV_DIGIp2<7:0>
"""
return self._readReg('CFG21', 'RDIV_DIGIp2<7:0>')
@RDIV_DIGIp2.setter
def RDIV_DIGIp2(self, value):
"""
Set the value of RDIV_DIGIp2<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG21', 'RDIV_DIGIp2<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
# RDIV_DIGIp1<7:0>
@property
def RDIV_DIGIp1(self):
"""
Get the value of RDIV_DIGIp1<7:0>
"""
return self._readReg('CFG21', 'RDIV_DIGIp1<7:0>')
@RDIV_DIGIp1.setter
def RDIV_DIGIp1(self, value):
"""
Set the value of RDIV_DIGIp1<7:0>
"""
if self.allowLDO:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG21', 'RDIV_DIGIp1<7:0>', value)
else:
self.chip.log("Changing LDO voltage is not permitted. To change the LDO voltage, set BIAS.allowLDO=True")
| 27.604862
| 148
| 0.557736
| 75,622
| 0.993993
| 0
| 0
| 68,991
| 0.906834
| 0
| 0
| 32,965
| 0.4333
|
1876d8349cdadc13b5b12782011e2506eb566592
| 1,299
|
py
|
Python
|
NorthernLights/shapes/BaseShape.py
|
jgillick/coffeetable-programs
|
244e3cc9099993a050ed64b1d11e41c763a1cb72
|
[
"MIT"
] | null | null | null |
NorthernLights/shapes/BaseShape.py
|
jgillick/coffeetable-programs
|
244e3cc9099993a050ed64b1d11e41c763a1cb72
|
[
"MIT"
] | null | null | null |
NorthernLights/shapes/BaseShape.py
|
jgillick/coffeetable-programs
|
244e3cc9099993a050ed64b1d11e41c763a1cb72
|
[
"MIT"
] | null | null | null |
import time
# Colors
RED = (1,0,0)
YELLOW = (1,1,0)
GREEN = (0,1,0)
CYAN = (0,1,1)
BLUE = (0,0,1)
PURPLE = (1,0,1)
class BaseShape:
# A list of instance attribute names, which are animatable objects
animatable_attrs = []
# The time of the last animation update
last_update = None
# The number of LEDs in the strip
led_count = 0
# The color index we're setting (red: 0, green: 1, blue: 2)
color = 0
def __init__(self, led_count, color, time):
self.led_count = led_count
self.color = color
self.last_update = time
def update(self, now):
""" Updates the shape animatable attributes."""
elapsed = now - self.last_update
print(elapsed)
is_animating = False
for anin_attr in self.animatable_attrs:
anim = getattr(self, anin_attr)
ret = anim.update(elapsed)
if ret:
is_animating = True
self.last_update = now
return is_animating
def __len__(self):
return self.led_count
def __getitem__(self, key):
return (0,0,0)
def __setitem__(self, key, value):
""" Cannot set pixel item. """
pass
def __delitem__(self, key):
""" Cannot delete pixel color. """
pass
| 24.055556
| 70
| 0.583526
| 1,173
| 0.903002
| 0
| 0
| 0
| 0
| 0
| 0
| 316
| 0.243264
|
1878e0fb7794287a25d9e67514272eb4ae4e8c3c
| 148
|
py
|
Python
|
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | 1
|
2020-02-29T14:38:33.000Z
|
2020-02-29T14:38:33.000Z
|
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
rzymskie={'I':1,'II':2,'III':3,'IV':4,'V':5,'VI':6,'VII':7,'VIII':8}
print(rzymskie)
print('Jeden element slownika: \n')
print(rzymskie['I'])
| 24.666667
| 69
| 0.587838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 65
| 0.439189
|
187a7c6d2f82ad82d4fc1c57659cdd525e113835
| 1,791
|
py
|
Python
|
otoku.py
|
gitmori/WebTools
|
05d10f082875f1ffb0eaa6cb40f4bd028d3bf01f
|
[
"MIT"
] | null | null | null |
otoku.py
|
gitmori/WebTools
|
05d10f082875f1ffb0eaa6cb40f4bd028d3bf01f
|
[
"MIT"
] | null | null | null |
otoku.py
|
gitmori/WebTools
|
05d10f082875f1ffb0eaa6cb40f4bd028d3bf01f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from config.site_url import SiteUrl
from urllib.request import urlopen
from lxml.html import fromstring
from random import randint
from time import sleep
# 余分な改行や文字列を削除し文字列に変換する関数
def conv(arg):
arg = [item.strip().replace(' |', '') for item in arg]
arg = ''.join(arg)
return arg
# Blogの1〜4ページをスクレイピング(xPathを用いて情報取得)
for page in range(1, 5):
# スクレイピングするBlogのURLを外部ファイルから呼び出し(.gitignore済)
url = SiteUrl()[4] + str(page) + '/'
res = urlopen(url)
dom = fromstring(res.read())
# ページ構成は1ページ目は22項目それ以降は20項目表示となっているのでエンドポイントを分岐指定
if page == 1:
end = 23
else:
end = 21
for row in range(1, end):
# 日付を取得
date = dom.xpath('//*[@id="main"]/div[2]/div[' + str(row) + ']/div[2]/div/p/text()[1]')
date = conv(date)
# 内容を取得
info = dom.xpath('//*[@id="main"]/div[2]/div[' + str(row) + ']/div[2]/h3/a/text()')
info = conv(info)
# リンクのURLを取得するためにはxPathの後ろに/@hrefを追加
link = dom.xpath('//*[@id="main"]/div[2]/div[' + str(row) + ']/div[2]/h3/a/@href')
link = conv(link)
# コメントのxPathの順番が表示カテゴリ数によって左右されるので変数jでおおよその範囲(おおよそ2〜5の範囲)で条件分岐
for i in range(2, 6):
# hrefタグからコメント文字列を取得するためにはxPathの後ろに/text()を追加
cmnt = dom.xpath('//*[@id="main"]/div[2]/div[' + str(row) +']/div[2]/div/p/a[' + str(i) + ']/text()')
cmnt = conv(cmnt)
# infoに『【固定】』の文字が入っていれば広告なので出力せず,また変数rowの範囲に『コメント』の文字が入っていれば出力
if '【固定】' not in info and 'コメント' in cmnt:
print(date)
print(info)
print(link)
print(cmnt)
# サーバ負荷減のためページ切り替え時に1〜3秒間ランダム待機
if page <= 3:
time = randint(1, 3)
sleep(time)
| 29.360656
| 113
| 0.564489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,316
| 0.544702
|
187af0810cbd6c021345784f16958a06b58a35c1
| 1,077
|
py
|
Python
|
falcon/util/net.py
|
jopereira/horus-tracer
|
03671206f02c5ebea18f5682b346f59884e0a538
|
[
"MIT"
] | 21
|
2018-04-18T19:01:09.000Z
|
2021-11-24T19:22:33.000Z
|
falcon/util/net.py
|
jopereira/horus-tracer
|
03671206f02c5ebea18f5682b346f59884e0a538
|
[
"MIT"
] | 29
|
2018-04-30T16:39:27.000Z
|
2021-04-03T16:04:19.000Z
|
falcon/util/net.py
|
jopereira/horus-tracer
|
03671206f02c5ebea18f5682b346f59884e0a538
|
[
"MIT"
] | 7
|
2018-04-21T13:04:03.000Z
|
2021-03-07T08:24:26.000Z
|
import ctypes
import ctypes.util
libc = ctypes.CDLL(ctypes.util.find_library('c'))
# Get network device's name
def if_indextoname (index):
if not isinstance (index, int):
raise TypeError ('Index must be an integer.')
libc.if_indextoname.argtypes = [ctypes.c_uint32, ctypes.c_char_p]
libc.if_indextoname.restype = ctypes.c_char_p
ifname = ctypes.create_string_buffer(32)
ifname = libc.if_indextoname (index, ifname)
if not ifname:
raise RuntimeError ("Invalid network interface index.")
return ifname
# Generate socket id
def to_socket_id (addr1, addr1_str, addr2, addr2_str, port1, port2):
socket_id = None
if addr1 < addr2:
socket_id = "%s:%d-%s:%d" % (addr1_str, port1, addr2_str, port2)
elif addr2 < addr1:
socket_id = "%s:%d-%s:%d" % (addr2_str, port2, addr1_str, port1)
else:
if port1 < port2:
socket_id = "%s:%d-%s:%d" % (addr1_str, port1, addr2_str, port2)
else:
socket_id = "%s:%d-%s:%d" % (addr2_str, port2, addr1_str, port1)
return socket_id
| 32.636364
| 76
| 0.651811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 163
| 0.151346
|
187b4fbe94a221126760180a6b88a7b0450b6264
| 3,677
|
py
|
Python
|
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
import nltk,re,codecs
from nltk.tokenize import word_tokenize,sent_tokenize
from backNode import BackNode
from nltk import Tree
def trace_tree(trace):
if trace.left==None and trace.right==None:
return str(trace.root)+" "+str(trace.word)
return "("+str(trace.root)+"("+str(trace_tree(trace.left))+")"+" "+"("+str(trace_tree(trace.right))+")"+")"
def data_preprosessing():
#fp=codecs.open(f'F:/MTECH1/NLP/Assignment5/Training_set.txt','r',encoding='utf-8',errors='ignore')
#=nltk.data.load("grammars/large_grammars/atis_sentences.txt")
with open('F:/MTECH1/NLP/Assignment5/Training_set.txt') as f:
lines = f.readlines()
for i in range(0,len(lines)):
lines[i]=re.sub(r'\d+\s:\s',"",lines[i])
#print(lines[i])
lines = [line.rstrip('\n') for line in lines]
#print(lines)
#list_sentences=sent_tokenize(s)
"""parser = nltk.parse.BottomUpChartParser(grammer)
for i in list_sentences:
i=word_tokenize(i)
for tree in parser.parse(i):
result=list(tree)
print(result)
for tree in result:
tree.draw()"""
#print(lines)
return lines
lines=data_preprosessing()
def grammer_parse():
grammer=(nltk.data.load("grammars/large_grammars/atis.cfg"))
grammar=grammer.chomsky_normal_form(new_token_padding='#',flexible=False)
grammar_dict={}
for production in grammar.productions():
prod=list(production.rhs())
prod_rhs=" "
for i in prod:
prod_rhs=prod_rhs+" "+str(i)
prod_rhs=prod_rhs.strip()
if prod_rhs in grammar_dict.keys():
temp1=production.lhs()
grammar_dict[prod_rhs].append(temp1)
else:
temp1=production.lhs()
grammar_dict[prod_rhs]=[temp1]
#print(len(grammar_dict))
return grammar_dict
grammar=grammer_parse()
def parse(lines,grammar):
line=[]
line=lines[56].split()
line.insert(0," ")
#x="i need a flight from pittsburgh to newark on monday ."
#line=x.split()
#line.insert(0," ")
length=len(line)
print(line)
tree_set=set()
parse_table=[[ set() for col in range(length+1)] for row in range(length+1)]
back_table=[[ [] for col in range(length+1)] for row in range(length+1)]
#grammer=(nltk.data.load("grammars/large_grammars/atis.cfg"))
#print((grammar))
#grammar=(nltk.data.load("grammars/sample_grammars/toy.cfg"))
#print(type(grammer))
#grammar=grammer.chomsky_normal_form(new_token_padding='#',flexible=False)
#print(grammar)
for k in range(1,len(line)):
if line[k] in grammar.keys():
lhs=grammar[line[k]]
for l in lhs:
parse_table[k][k].add(l)
back_table[k][k].append(BackNode(None,None,l,line[k]))
for w in range(2,length):
#print("*")
for s in range(1,length-w+1):
#print("**")
end=w+s
for m in range(s,end-1):
#print("***")
for p in parse_table[s][m]:
for q in parse_table[m+1][end-1]:
#print(q)
x=str(p)+" "+str(q)
#print(x)
if x in grammar.keys() and (len(x.split())==2):
lhs=grammar[x]
#print(s,m)
for l in lhs:
parse_table[s][end-1].add(l)
prod=x.split()
for r1 in back_table[s][m]:
for r2 in back_table[m+1][end-1]:
#print(s,m)
#print(m+1,end-1)
if(str(r1.root)==prod[0] and str(r2.root)==prod[1]):
back_table[s][end-1].append(BackNode(r1,r2,l,None))
#print(back_table[s][end-1])
#print(back_table)
if ("SIGMA" in str(parse_table[1][length-1])):
#print(back_table)
for pointer in back_table[1][length-1]:
if(str(pointer.root)=="SIGMA"):
value=trace_tree(pointer)
tree_set.add(value)
print(tree_set)
print(len(tree_set))
for result in tree_set:
trees=Tree.fromstring(value)
trees.draw()
else:
print("No parse tree exist")
parse(lines,grammar)
| 25.894366
| 108
| 0.658689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,135
| 0.308676
|
187b747a40ae7c538023582dc3ed2250cb3040ca
| 135
|
py
|
Python
|
mullvad_python/__init__.py
|
linusg/mullpy
|
6f29c33174e30ea2ba360327daae9bafe140c997
|
[
"MIT"
] | 12
|
2018-08-02T20:05:54.000Z
|
2020-06-24T18:42:53.000Z
|
mullvad_python/__init__.py
|
linusg/mullpy
|
6f29c33174e30ea2ba360327daae9bafe140c997
|
[
"MIT"
] | 3
|
2018-08-04T13:53:01.000Z
|
2020-06-24T19:03:42.000Z
|
mullvad_python/__init__.py
|
linusg/mullpy
|
6f29c33174e30ea2ba360327daae9bafe140c997
|
[
"MIT"
] | 2
|
2018-08-05T14:06:39.000Z
|
2020-06-24T18:45:47.000Z
|
"""Initialization package."""
from .api import Mullpy
from .banner import banner
__all__ = ['Mullpy', 'banner']
__version__ = '0.3.1'
| 19.285714
| 30
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 52
| 0.385185
|
187d64baa4437d9fea0c349cebbb000fe3c38925
| 5,813
|
py
|
Python
|
tests/init.py
|
Animenosekai/yuno
|
bcc48f7ceda022e26392e653c03606d3f5f66806
|
[
"MIT"
] | 1
|
2022-02-25T13:39:18.000Z
|
2022-02-25T13:39:18.000Z
|
tests/init.py
|
Animenosekai/yuno
|
bcc48f7ceda022e26392e653c03606d3f5f66806
|
[
"MIT"
] | null | null | null |
tests/init.py
|
Animenosekai/yuno
|
bcc48f7ceda022e26392e653c03606d3f5f66806
|
[
"MIT"
] | null | null | null |
import inspect
import pathlib
import sys
import yuno
# CONSTANTS
TEST_OBJECT = {
"a": 1,
"b": 2,
"c": 3,
"test_dict": {
"a": 1,
"b": 2,
"c": 3
},
"float": 1.1,
"int": 1,
"test_list": [1, 2, 3],
"null": None,
"string": "test",
"boolean": True
}
TEST_LIST = [
"string",
1,
1.1,
None,
[1, 2, 3],
TEST_OBJECT,
True
]
TEST_DOCUMENT = {"_id": "test_document", "hello": "world", "test_list": TEST_LIST, "test_dict": TEST_OBJECT,
"boolean": True, "float": 1.1, "int": 1, "null": None, "string": "test"}
KEPT_DATABASES = {'admin', 'local', 'config'}
REALTIME_TIMEOUT = 5
# UTILITY FUNCTIONS
def get_args(func):
return inspect.signature(func).parameters.keys()
STEP = f"CI/Testing - v{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
def log(message):
try:
yuno.utils.logging.log(message, yuno.utils.logging.LogLevels.INFO, step=STEP)
except Exception:
pass
def close(mongo: yuno.MongoDB, client: yuno.YunoClient):
log("Closing the client connection")
client.close()
if mongo.__process__ is not None:
log("Stopping MongoDB")
mongo.close()
# INITIALIZATION FUNCTIONS
f = pathlib.Path("./MONGO_PORT")
if f.is_file():
MONGO_PORT = int(f.read_text().replace(" ", ""))
else:
MONGO_PORT = 27017
def init_mongo():
log("Initializing MongoDB with port: " + str(MONGO_PORT))
mongo = yuno.MongoDB(port=MONGO_PORT, db_path="./test", log_config={"path": "./test.log"})
try:
log("Starting MongoDB")
mongo.start()
except RuntimeError:
log("MongoDB seems to be already running?")
return mongo
def init_client():
mongo = init_mongo()
client = yuno.YunoClient(mongo)
for database in set(client.database_names()).difference({'admin', 'local', 'config'}):
log(f"Dropping database: {database}")
del client[database]
return mongo, client
def init_database():
mongo, client = init_client()
log("Initializing Database")
database = yuno.YunoDatabase(client, "test")
log("Cleaning up the database")
for collection in database.list_collection_names():
log(f"Dropping collection: {collection}")
del database[collection]
return mongo, client, database
def init_collection():
mongo, client, database = init_database()
log("Initializing collection")
collection = yuno.YunoCollection(database, "test")
log("Cleaning up the collection")
for document in collection.find(include=["_id"]):
log(f"Deleting document: {document.__id__}")
del collection[document.__id__]
return mongo, client, database, collection
def init_document():
mongo, client, database, collection = init_collection()
log("Initializing Document")
collection.test_document = TEST_DOCUMENT
return mongo, client, database, collection, collection.test_document
# DECORATORS
def use_mongo(func):
def wrapper(*args, **kwargs):
mongo = init_mongo()
avail = get_args(func)
if "mongo" in avail:
kwargs["mongo"] = mongo
result = func(*args, **kwargs)
if mongo.__process__ is not None:
log("Stopping MongoDB")
mongo.close()
return result
return wrapper
def use_client(func):
def wrapper(*args, **kwargs):
mongo, client = init_client()
avail = get_args(func)
for arg, value in [("mongo", mongo), ("client", client)]:
if arg in avail:
kwargs[arg] = value
result = func(*args, **kwargs)
close(mongo, client)
return result
return wrapper
def use_database(func):
def wrapper(*args, **kwargs):
mongo, client, database = init_database()
avail = get_args(func)
for arg, value in [("mongo", mongo), ("client", client), ("database", database)]:
if arg in avail:
kwargs[arg] = value
result = func(*args, **kwargs)
close(mongo, client)
return result
return wrapper
def use_collection(func):
def wrapper(*args, **kwargs):
mongo, client, database, collection = init_collection()
avail = get_args(func)
for arg, value in [("mongo", mongo), ("client", client), ("database", database), ("collection", collection)]:
if arg in avail:
kwargs[arg] = value
result = func(*args, **kwargs)
close(mongo, client)
return result
return wrapper
def verification_callback(obj):
log(f"cursor ~ Verifying object {obj}")
return obj
def use_cursor(func):
def wrapper(*args, **kwargs):
mongo, client, database, collection = init_collection()
collection.hello = {'_id': "hello", 'hello': "world"}
cursor = yuno.cursor.Cursor(collection.__collection__.find({"_id": "hello"}), verification=verification_callback)
avail = get_args(func)
for arg, value in [("mongo", mongo), ("client", client), ("database", database), ("collection", collection), ("cursor", cursor)]:
if arg in avail:
kwargs[arg] = value
result = func(*args, **kwargs)
close(mongo, client)
return result
return wrapper
def use_document(func):
def wrapper(*args, **kwargs):
mongo, client, database, collection, document = init_document()
avail = get_args(func)
for arg, value in [("mongo", mongo), ("client", client), ("database", database), ("collection", collection), ("document", document)]:
if arg in avail:
kwargs[arg] = value
result = func(*args, **kwargs)
close(mongo, client)
return result
return wrapper
| 26.543379
| 141
| 0.608636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,095
| 0.188371
|
187dce0fab5d7dab6ce2381189b7af90777ddbc1
| 732
|
py
|
Python
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/cli.py
|
go-choppy/choppy-cookiecutter-pypackage
|
b5bfc226089bba7002397c4055199b7b57c773ea
|
[
"BSD-3-Clause"
] | 2
|
2019-07-09T14:03:02.000Z
|
2019-07-09T14:18:55.000Z
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/cli.py
|
yjcyxky/cookiecutter-pypackage
|
b5bfc226089bba7002397c4055199b7b57c773ea
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/cli.py
|
yjcyxky/cookiecutter-pypackage
|
b5bfc226089bba7002397c4055199b7b57c773ea
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""
{{cookiecutter.project_slug}}.cli
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
{{ cookiecutter.project_short_description }}
:copyright: © 2019 by the Choppy Team.
:license: AGPLv3+, see LICENSE for more details.
"""
"""Console script for {{cookiecutter.project_slug}}."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for {{cookiecutter.project_slug}}."""
click.echo("Replace this message by putting your code into "
"{{cookiecutter.project_slug}}.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| 25.241379
| 68
| 0.64071
| 0
| 0
| 0
| 0
| 301
| 0.410641
| 0
| 0
| 554
| 0.755798
|
187e7d05e0f32e5a771a3ba903dffb0254e60e4c
| 891
|
py
|
Python
|
test/classes/test_players.py
|
teamvolik/teamvolik
|
35acc1405d4f0211236631d0c5bbdbf4f948fcb6
|
[
"MIT"
] | 6
|
2022-03-27T22:13:35.000Z
|
2022-03-31T22:45:02.000Z
|
test/classes/test_players.py
|
teamvolik/teamvolik
|
35acc1405d4f0211236631d0c5bbdbf4f948fcb6
|
[
"MIT"
] | 15
|
2022-03-18T09:47:31.000Z
|
2022-03-29T15:26:51.000Z
|
test/classes/test_players.py
|
teamvolik/teamvolik
|
35acc1405d4f0211236631d0c5bbdbf4f948fcb6
|
[
"MIT"
] | null | null | null |
import unittest
import src.classes.player as player
class PlayersTest(unittest.TestCase):
def test_to_string(self):
self.assertTrue(str(player.Player()) == "Player(id=-1, name=UNDEFINED, is_adm=False, games=0, pitch=0.0, hold=0.0, passing=0.0, movement=0.0, attacking=0.0, rating=0.0)")
def test_to_sqlite_table(self):
empty_player = player.Player()
self.assertTrue(empty_player.to_sqlite_table() == (-1, "UNDEFINED", False, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))
def test_from_sqlite_table(self):
self.assertTrue(str(player.Player.from_sqlite_table(None)) == str(player.Player()))
player_info = (1, "AF", True, 3, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0)
self.assertTrue(str(player.Player.from_sqlite_table(player_info)) == str(player.Player(1, "AF", True, 3, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0)))
if __name__ == "__main__":
unittest.main()
| 40.5
| 178
| 0.654321
| 786
| 0.882155
| 0
| 0
| 0
| 0
| 0
| 0
| 158
| 0.177329
|
188087e6c1a4e48475b3e61cabbe3ac47fb2c2ff
| 3,745
|
py
|
Python
|
src/asphalt/core/concurrent.py
|
agronholm/asphalt
|
7b81a71941047770612aeea67e2b3332f92b5c18
|
[
"Apache-2.0"
] | 226
|
2015-08-19T16:57:32.000Z
|
2022-03-31T22:28:18.000Z
|
src/asphalt/core/concurrent.py
|
Asphalt-framework/asphalt
|
7b81a71941047770612aeea67e2b3332f92b5c18
|
[
"Apache-2.0"
] | 31
|
2015-09-05T11:18:33.000Z
|
2019-03-25T10:51:17.000Z
|
src/asphalt/core/concurrent.py
|
Asphalt-framework/asphalt
|
7b81a71941047770612aeea67e2b3332f92b5c18
|
[
"Apache-2.0"
] | 11
|
2015-09-04T21:43:34.000Z
|
2017-12-08T19:06:20.000Z
|
from __future__ import annotations
__all__ = ("executor",)
import inspect
import sys
from asyncio import get_running_loop
from concurrent.futures import Executor
from functools import partial, wraps
from typing import Awaitable, Callable, TypeVar, overload
from asphalt.core import Context
if sys.version_info >= (3, 10):
from typing import Concatenate, ParamSpec
else:
from typing_extensions import Concatenate, ParamSpec
T_Retval = TypeVar("T_Retval")
P = ParamSpec("P")
@overload
def executor(
func_or_executor: Executor | str,
) -> Callable[
[Callable[Concatenate[Context, P], T_Retval]],
Callable[Concatenate[Context, P], T_Retval | Awaitable[T_Retval]],
]:
...
@overload
def executor(
func_or_executor: Callable[Concatenate[Context, P], T_Retval]
) -> Callable[Concatenate[Context, P], T_Retval | Awaitable[T_Retval]]:
...
def executor(
func_or_executor: Executor | str | Callable[Concatenate[Context, P], T_Retval]
) -> (
Callable[
[Callable[Concatenate[Context, P], T_Retval]],
Callable[Concatenate[Context, P], T_Retval | Awaitable[T_Retval]],
]
| Callable[Concatenate[Context, P], T_Retval | Awaitable[T_Retval]]
):
"""
Decorate a function to run in an executor.
If no executor (or ``None``) is given, the current event loop's default executor is
used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or
the name of an :class:`~concurrent.futures.Executor` instance.
If a decorated callable is called in a worker thread, the executor argument is
ignored and the wrapped function is called directly.
Callables wrapped with this decorator must be used with ``await`` when called in the
event loop thread.
Example use with the default executor (``None``)::
@executor
def this_runs_in_threadpool(ctx):
return do_something_cpu_intensive()
async def request_handler(ctx):
result = await this_runs_in_threadpool(ctx)
With a named :class:`~concurrent.futures.Executor` resource::
@executor('special_ops')
def this_runs_in_threadpool(ctx):
return do_something_cpu_intensive()
async def request_handler(ctx):
result = await this_runs_in_threadpool(ctx)
:param func_or_executor: either a callable (when used as a decorator), an executor
instance or the name of an :class:`~concurrent.futures.Executor` resource
"""
def outer(
func: Callable[Concatenate[Context, P], T_Retval]
) -> Callable[Concatenate[Context, P], T_Retval | Awaitable[T_Retval]]:
def wrapper(
ctx: Context, *args: P.args, **kwargs: P.kwargs
) -> T_Retval | Awaitable[T_Retval]:
try:
loop = get_running_loop()
except RuntimeError:
# Event loop not available -- we're in a worker thread
return func(ctx, *args, **kwargs)
# Resolve the executor resource name to an Executor instance
_executor: Executor | None
if isinstance(executor, str):
_executor = ctx.require_resource(Executor, executor)
else:
_executor = executor
callback = partial(func, ctx, *args, **kwargs)
return loop.run_in_executor(_executor, callback)
assert not inspect.iscoroutinefunction(
func
), "Cannot wrap coroutine functions to be run in an executor"
return wraps(func)(wrapper)
executor: Executor | str | None = None
if isinstance(func_or_executor, (str, Executor)):
executor = func_or_executor
return outer
else:
return outer(func_or_executor)
| 31.737288
| 88
| 0.666489
| 0
| 0
| 0
| 0
| 378
| 0.100935
| 0
| 0
| 1,469
| 0.392256
|
18817926b7a114ee1828bddf7e74ff4c0f734e43
| 2,309
|
py
|
Python
|
src/templates/rsc/year_test.py
|
bradunov/shkola
|
6ef057f5bd483318bf5763392972d48de481d0fb
|
[
"MIT"
] | 2
|
2019-08-25T09:37:27.000Z
|
2021-01-25T20:22:30.000Z
|
src/templates/rsc/year_test.py
|
bradunov/shkola
|
6ef057f5bd483318bf5763392972d48de481d0fb
|
[
"MIT"
] | 28
|
2019-07-04T19:53:36.000Z
|
2020-10-24T13:27:56.000Z
|
src/templates/rsc/year_test.py
|
bradunov/shkola
|
6ef057f5bd483318bf5763392972d48de481d0fb
|
[
"MIT"
] | null | null | null |
import jinja2
page = {}
page['title'] = 'Shkola'
page['item_path'] = '../src/'
page['google_signin_client_id'] = ""
page['google_site_verification'] = ""
page['button'] = {
'width' : '137px',
'height' : '140px',
'font_size' : '111px',
'margin' : '10px',
'choices' : []
}
page['button']['choices'].append({
'title' : '1',
'obj_type' : 'A',
'front_color' : '#ff6956',
'back_color' : '#f9f9f9',
'link' : 'href="1"'
})
page['button']['choices'].append({
'title' : '2',
'obj_type' : 'A',
'front_color' : '#489cba',
'back_color' : '#f9f9f9',
'link' : 'href="2"'
})
page['button']['choices'].append({
'title' : '3',
'obj_type' : 'A',
'front_color' : '#ff6956',
'back_color' : '#f9f9f9',
'link' : 'href="1"'
})
page['button']['choices'].append({
'title' : '4',
'obj_type' : 'A',
'front_color' : '#489cba',
'back_color' : '#f9f9f9',
'link' : 'href="2"'
})
page['menu'] = [
{
'name' : 'Zadaci',
'submenu' : {
'id' : 'zadaci',
'options' : [
{
'name' : 'Cetvrti',
'link' : 'C',
'submenu' : {
'id' : 'cetvrti',
'options' : [
{ 'name' : 'Brojevi', 'link' : '1'},
{ 'name' : 'Geometrija', 'link' : '2'},
{ 'name' : 'Razlomci', 'link' : '3'}
]
}
},
{
'name' : 'Treci',
'link' : 'T',
'submenu' : {
'id' : 'treci',
'options' : [
{ 'name' : 'Brojevi', 'link' : '1'},
{ 'name' : 'Geometrija', 'link' : '2'},
{ 'name' : 'Razlomci', 'link' : '3'}
]
}
}
]
}
},
{
'name' : 'Rezultati',
'link' : 'R'
}
]
file_loader = jinja2.FileSystemLoader("..")
env = jinja2.Environment(loader=file_loader)
template = env.get_template("rsc/year.html.j2")
print(template.render(template_params=page))
| 22.201923
| 67
| 0.374188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 920
| 0.398441
|
1882943906f0dcab9b6d642fa9c4ad632eb884ac
| 19,771
|
py
|
Python
|
merganser/conflict_prediction.py
|
ualberta-smr/merganser
|
9ce9acc2a187d165c923f4a6461bd82165cda764
|
[
"MIT"
] | 6
|
2019-12-04T06:29:52.000Z
|
2020-09-28T01:27:17.000Z
|
merganser/conflict_prediction.py
|
ualberta-smr/merganser
|
9ce9acc2a187d165c923f4a6461bd82165cda764
|
[
"MIT"
] | null | null | null |
merganser/conflict_prediction.py
|
ualberta-smr/merganser
|
9ce9acc2a187d165c923f4a6461bd82165cda764
|
[
"MIT"
] | 4
|
2019-04-25T21:07:20.000Z
|
2021-11-22T15:04:04.000Z
|
import logging
import json
import glob
import pandas as pd
import multiprocessing
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
from sklearn.model_selection import GridSearchCV, KFold
from sklearn.model_selection import cross_val_predict
from sklearn.decomposition import IncrementalPCA
from scipy.stats import spearmanr
import config
from util import *
np.random.seed(config.RANDOM_SEED)
repo_lang = Repository_language()
def store_classification_result(model_name, language, model_classification_report, classification_results):
"""
Stores the result of the classifier
:param model_name: the classification type
:param language: programming language
:param model_classification_report: results
:param classification_results: results
"""
open('{}classification_result_raw_{}_{}.txt'.format(config.PREDICTION_RESULT_PATH, model_name, language), 'w')\
.write(model_classification_report)
open('{}classification_result_json_{}_{}.json'.format(config.PREDICTION_RESULT_PATH, model_name, language), 'w')\
.write(json.dumps(classification_results))
def data_classification_wo_cv(language, repo, data_train, label_train, data_test, label_test, random_seed=config.RANDOM_SEED, job_num=multiprocessing.cpu_count()):
"""
Trains the classifier
:param language: programming language
:param data: input data
:param label: input labels
:param random_seed: the random_seed
:param job_num: the number of cores to use
"""
# CV
inner_cv = KFold(n_splits=config.FOLD_NUM, shuffle=True, random_state=random_seed)
outer_cv = KFold(n_splits=config.FOLD_NUM, shuffle=True, random_state=random_seed)
# Hyper-parameters
tree_param = {'min_samples_leaf': config.MIN_SAMPLE_LEAVES, 'min_samples_split': config.MIN_SAMPLE_SPLIT,
'max_depth': config.TREE_MAX_DEPTH}
forest_param = {'n_estimators': config.ESTIMATOR_NUM, 'min_samples_leaf': config.MIN_SAMPLE_LEAVES,
'min_samples_split': config.MIN_SAMPLE_SPLIT}
boosting_param = {'n_estimators': config.ESTIMATOR_NUM, 'learning_rate': config.LEARNING_RATE}
# Grid search definition
grid_searches = [
GridSearchCV(DecisionTreeClassifier(class_weight='balanced', random_state = random_seed),
tree_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION)
, GridSearchCV(RandomForestClassifier(class_weight='balanced', n_jobs=job_num, random_state=random_seed),
forest_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION)
, GridSearchCV(ExtraTreesClassifier(n_jobs=job_num, class_weight='balanced', random_state=random_seed),
forest_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION),
GridSearchCV(AdaBoostClassifier(base_estimator=DecisionTreeClassifier(class_weight = 'balanced',
random_state=random_seed,
max_depth=2),
algorithm='SAMME.R', random_state=random_seed),
boosting_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION)
]
# Fitting the classifiers
classification_results = {}
res = []
for model in grid_searches:
# Model training/testing
model.score_sample_weight = True
model.fit(data_train, label_train)
model_name = str(type(model.best_estimator_)).replace('<class \'', '').replace('\'>', '').split('.')[-1]
model_best_param = model.best_params_
predicted_label = model.best_estimator_.predict(data_test)
t = get_metrics(label_test, predicted_label)
t['model_name'] = model_name
t['language'] = language
t['repository'] = repo
res.append(t)
return res
def data_classification(language, data, label, random_seed=config.RANDOM_SEED, job_num=multiprocessing.cpu_count()):
"""
Trains the classifier
:param language: programming language
:param data: input data
:param label: input labels
:param random_seed: the random_seed
:param job_num: the number of cores to use
"""
# CV
inner_cv = KFold(n_splits=config.FOLD_NUM, shuffle=True, random_state=random_seed)
outer_cv = KFold(n_splits=config.FOLD_NUM, shuffle=True, random_state=random_seed)
# Hyper-parameters
tree_param = {'min_samples_leaf': config.MIN_SAMPLE_LEAVES, 'min_samples_split': config.MIN_SAMPLE_SPLIT,
'max_depth': config.TREE_MAX_DEPTH}
forest_param = {'n_estimators': config.ESTIMATOR_NUM, 'min_samples_leaf': config.MIN_SAMPLE_LEAVES,
'min_samples_split': config.MIN_SAMPLE_SPLIT}
boosting_param = {'n_estimators': config.ESTIMATOR_NUM, 'learning_rate': config.LEARNING_RATE}
# Grid search definition
grid_searches = [
GridSearchCV(DecisionTreeClassifier(class_weight='balanced', random_state = random_seed),
tree_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION),
GridSearchCV(RandomForestClassifier(class_weight='balanced', n_jobs=job_num, random_state = random_seed),
forest_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION),
GridSearchCV(ExtraTreesClassifier(n_jobs=job_num, class_weight='balanced', random_state = random_seed),
forest_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION),
GridSearchCV(AdaBoostClassifier(base_estimator=DecisionTreeClassifier(class_weight = 'balanced',
random_state = random_seed,
max_depth=2),
algorithm='SAMME.R', random_state=random_seed),
boosting_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION)
]
# Fitting the classifiers
classification_results = {}
for model in grid_searches:
# Model training/testing
model.score_sample_weight = True
model.fit(data, label)
model_name = str(type(model.best_estimator_)).replace('<class \'', '').replace('\'>', '').split('.')[-1]
model_best_param = model.best_params_
predicted_label = cross_val_predict(model.best_estimator_, X=data, y=label, cv=outer_cv, n_jobs=job_num)
model_accuracy = accuracy_score(label, predicted_label)
model_confusion_matrix = confusion_matrix(label, predicted_label)
model_classification_report = classification_report(label, predicted_label)
classification_results[model_name] = {}
classification_results[model_name]['best_params'] = model_best_param
classification_results[model_name]['accuracy'] = model_accuracy
classification_results[model_name]['confusion_matrix'] = model_confusion_matrix.tolist()
classification_results[model_name]['classification_report'] = model_classification_report
print(model_classification_report)
## Save the classification result
#store_classification_result(model_name, language, model_classification_report, classification_results)
def get_best_decision_tree(data, label, random_seed=config.RANDOM_SEED, job_num=multiprocessing.cpu_count()):
"""
Trains the best decision tree
:param data: the data
:param label: the labels
:param random_seed: the random seed
:param job_num:
:return: the number of cores to use
"""
# CV
inner_cv = KFold(n_splits=config.FOLD_NUM, shuffle=True, random_state=random_seed)
# Train/test
tree_param = {'min_samples_leaf': config.MIN_SAMPLE_LEAVES, 'min_samples_split': config.MIN_SAMPLE_SPLIT,
'max_depth': config.TREE_MAX_DEPTH}
grid_search = GridSearchCV(DecisionTreeClassifier(class_weight='balanced', random_state=random_seed),
tree_param, cv=inner_cv, n_jobs=job_num, scoring=config.SCORING_FUNCTION)
grid_search.score_sample_weight = True
grid_search.fit(data, label)
return grid_search.best_estimator_
def get_feature_importance_by_model(model):
"""
Returns the features importance of a model
:param model: the classifier
:return: The list of feature importance
"""
return model.feature_importances_
def get_feature_set(data):
"""
Returns the feature sets separately
:param data: The input data
"""
# Data separation of feature sets
parallel_changes = data[:, 0].reshape(-1, 1)
commit_num = data[:, 1].reshape(-1, 1)
commit_density = data[:, 2].reshape(-1, 1)
file_edits = IncrementalPCA(n_components=1).fit_transform(data[:, 3:8])
line_edits = IncrementalPCA(n_components=1).fit_transform(data[:, 8:10])
dev_num = data[:, 10].reshape(-1, 1)
keywords = IncrementalPCA(n_components=1).fit_transform(data[:, 11:23])
message = IncrementalPCA(n_components=1).fit_transform(data[:, 23:27])
duration = data[:, 27].reshape(-1, 1)
feature_sets = ['prl_changes', 'commit_num', 'commit_density', 'file_edits', 'line_edits', 'dev_num',
'keywords', 'message', 'duration']
return feature_sets, parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords\
, message, duration
def save_feature_correlation(language, data, label):
"""
Store the feature correlation of the data with the label
:param language: the programming language
:param data: the data
:param label: the label
"""
feature_sets, parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message\
, duration = get_feature_set(data)
features = [parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message
, duration]
for i, feature in enumerate(features):
corr, p_value = spearmanr(feature, label)
open('{}feature_correlation_{}.txt'.format(config.PREDICTION_RESULT_PATH, language), 'a') \
.write('{}:\t\t{} \t {}\n'.format(feature_sets[i], round(corr, 2), round(p_value, 2)))
def save_feature_correlation_dict(data, label):
"""
Store the feature correlation of the data with the label
:param data: the data
:param label: the label
"""
feature_sets = ['prl_changes', 'commit_num', 'commit_density', 'file_edits', 'line_edits', 'dev_num',
'keywords', 'message', 'duration']
feature_sets, parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message\
, duration = get_feature_set(data)
features = [parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message
, duration]
correlation = {}
try:
for i, feature in enumerate(features):
corr, p_value = spearmanr(feature, label)
correlation[feature_sets[i] + '_corr'] = corr
correlation[feature_sets[i] + '_p_value'] = p_value
except:
pass
finally:
return correlation
def save_feature_importance(repo_name, data, label):
"""
Store the feature importance
:param language: the programming language
:param data: the data
:param label: the label
"""
data = data.values
feature_sets, parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message, duration \
= get_feature_set(data)
feature_data = np.concatenate((parallel_changes, commit_num, commit_density, file_edits, line_edits,
dev_num, keywords, message, duration), axis=1)
return get_feature_importance_by_model(get_best_decision_tree(feature_data, label))
def baseline_classification(language, data, label):
"""
Classify the baseline data (parallel changed files)
:param language: The programming language
:param data: The data
:param label: The labels
"""
feature_sets, parallel_changes, commit_num, commit_density, file_edits, line_edits, dev_num, keywords, message \
, duration = get_feature_set(data)
language = language + '__baseline'
data_classification(language, parallel_changes, label)
############################################
############################################
from sklearn import metrics
import autosklearn.classification
from sklearn.svm import SVC
def get_metrics(label_test, predicted_labels):
result = {}
result['roc_curve'] = metrics.roc_curve(label_test, predicted_labels)
result['confusion_matrix'] = metrics.confusion_matrix(label_test, predicted_labels)
result['classification_report'] = metrics.classification_report(label_test, predicted_labels)
result['accuracy_score'] = metrics.accuracy_score(label_test, predicted_labels)
result['roc_auc_score'] = metrics.roc_auc_score(label_test, predicted_labels)
result['precision_score_conflict'] = metrics.precision_score(label_test, predicted_labels)
result['precision_score_not_conflict'] = metrics.precision_score(label_test, predicted_labels,pos_label=0)
result['precision_score_average'] = metrics.precision_score(label_test, predicted_labels, average='weighted')
result['recall_score_conflict'] = metrics.recall_score(label_test, predicted_labels)
result['recall_score_not_conflict'] = metrics.recall_score(label_test, predicted_labels,pos_label=0)
result['recall_score_average'] = metrics.recall_score(label_test, predicted_labels, average='weighted')
result['f1_score_conflict'] = metrics.f1_score(label_test, predicted_labels)
result['f1_score_not_conflict'] = metrics.f1_score(label_test, predicted_labels,pos_label=0)
result['f1_score_average'] = metrics.f1_score(label_test, predicted_labels, average='weighted')
result['conflict_rate'] = len([i for i in label_test if i == 1]) / len(label_test)
return result
def get_decision_tree_result(data_train, label_train, data_test, label_test):
clf = DecisionTreeClassifier(class_weight='balanced').fit(data_train, label_train)
predicted_labels = clf.predict(data_test)
return get_metrics(label_test, predicted_labels)
def get_random_forest_result(data_train, label_train, data_test, label_test):
clf = RandomForestClassifier(class_weight='balanced').fit(data_train, label_train)
predicted_labels = clf.predict(data_test)
return get_metrics(label_test, predicted_labels)
def get_svm_result(data_train, label_train, data_test, label_test):
clf = SVC(C=1.0, kernel='linear', class_weight='balanced').fit(data_train, label_train)
predicted_labels = clf.predict(data_test)
return get_metrics(label_test, predicted_labels)
def get_auto_scikit_result(data_train, label_train, data_test, label_test):
automl = autosklearn.classification.AutoSklearnClassifier(
time_left_for_this_task= 60 * 60,
per_run_time_limit=300,
tmp_folder='/tmp/autosklearn_sequential_example_tmp1111',
output_folder='/tmp/autosklearn_sequential_example_out1111',
)
automl.fit(data_train, label_train, metric=autosklearn.metrics.roc_auc)
predicted_labels = automl.predict(data_test)
result = get_metrics(label_test, predicted_labels)
result['show_models'] = automl.show_models()
result['sprint_statistics'] = automl.sprint_statistics()
return result
if __name__ == "__main__":
# Logging
logging.basicConfig(level=logging.INFO,
format='%(levelname)s in %(threadName)s - %(asctime)s by %(name)-12s : %(message)s',
datefmt='%y-%m-%d %H:%M:%S')
logging.info('Train/test of merge conflict prediction')
# Data classification
data_files = glob.glob(config.PREDICTION_CSV_PATH + 'data_*')
label_files = glob.glob(config.PREDICTION_CSV_PATH + 'label_*')
repos_set = [files.split('/')[-1].split('_')[3].replace('.csv', '') for files in data_files]
classification_result = []
feature_importance = []
languages = []
corr = []
for ind, data_path in enumerate(data_files):
data_tmp = pd.read_csv(data_path).sort_values(by=['merge_commit_date'])
label_tmp = pd.read_csv(data_path.replace('data_prediction', 'label_prediction')).sort_values(by=['merge_commit_date'])
data_tmp = data_tmp.drop('merge_commit_date', axis=1)
label_tmp = label_tmp.drop('merge_commit_date', axis=1)
# Correlation
try:
tmp_corr = save_feature_correlation_dict(data_tmp.to_numpy(), label_tmp.to_numpy())
if len(tmp_corr) > 0:
tmp_corr['langugae'] = repo_lang.get_lang(repos_set[ind].lower())
tmp_corr['repository'] = repos_set[ind]
corr.append(tmp_corr)
except:
pass
continue
train_ind = int(data_tmp.shape[0] * config.TRAIN_RATE)
data_train = data_tmp.iloc[0:train_ind, :]
data_test = data_tmp.iloc[train_ind:-1, :]
label_train = label_tmp.iloc[0:train_ind, :]['is_conflict'].tolist()
label_test = label_tmp.iloc[train_ind:-1, :]['is_conflict'].tolist()
if len(label_test) != data_test.shape[0]:
print('Inconsistent data: {}'.format(repos_set[ind]))
continue
if data_test.shape[0] < 50:
print('Not enough merge scenarios: {}'.format(repos_set[ind]))
continue
if len(set(label_test)) != 2 or len(set(label_train)) != 2:
print('One class is missed: {}'.format(repos_set[ind]))
continue
if len([i for i in label_test if i == 1]) < 10:
print('Nor enough conflicting merge in the test batch for evaluation: {}'.format(repos_set[ind]))
continue
# k = k + data_tmp.shape[0]
try:
res = data_classification_wo_cv(repo_lang.get_lang(repos_set[ind].lower()), repos_set[ind] ,data_train, label_train, data_test, label_test)
classification_result = classification_result + res
feature_importance.append(save_feature_importance(repos_set[ind], data_train, label_train))
languages.append(repo_lang.get_lang(repos_set[ind].lower()))
except Exception as e:
print('Error - {}'.format(e))
continue
corr_df = pd.DataFrame(corr)
corr_df.to_csv(f'corr_{config.RANDOM_SEED}.csv')
exit()
# Feature importance
feature_importance = pd.DataFrame(feature_importance, columns=['prl_changes', 'commit_num', 'commit_density', 'file_edits', 'line_edits', 'dev_num',
'keywords', 'message', 'duration'])
feature_importance['language'] = pd.Series(languages)
feature_importance['repository'] = pd.Series(repos_set)
feature_importance.dropna()
feature_importance.to_csv(f'feature_importance_{config.RANDOM_SEED}.csv')
feature_importance_summery = feature_importance.drop('repository', axis=1).groupby('language').agg('median')
feature_importance_summery.to_csv(f'feature_importance_summery_{config.RANDOM_SEED}.csv')
# Classification result
classification_result_df = pd.DataFrame(classification_result)
classification_result_df.to_csv(f'res_{config.RANDOM_SEED}.csv')
| 43.452747
| 163
| 0.694401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,529
| 0.229073
|
188415bc541aaa91a4194a25f98e0ed82bdb2af2
| 25,321
|
py
|
Python
|
lib/wx_lib.py
|
liveonnet/p3_server
|
2dab6eab6e98b3ef0d26093eb461c635f5bc07b4
|
[
"Apache-2.0"
] | null | null | null |
lib/wx_lib.py
|
liveonnet/p3_server
|
2dab6eab6e98b3ef0d26093eb461c635f5bc07b4
|
[
"Apache-2.0"
] | null | null | null |
lib/wx_lib.py
|
liveonnet/p3_server
|
2dab6eab6e98b3ef0d26093eb461c635f5bc07b4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import time
import json
import random
import string
import asyncio
import aiohttp
from aiohttp.resolver import AsyncResolver
from hashlib import md5
from urllib.parse import quote
#-#from operator import itemgetter
#-#from itertools import chain
#-#from cStringIO import StringIO
if __name__ == '__main__':
import sys
import os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
from lib.conf_lib import conf
from lib.WXBizMsgCrypt import WXBizMsgCrypt
from lib.tools_lib import pcformat
from lib.tools_lib import parseXml2Dict
from lib.applog import app_log
info, debug, error = app_log.info, app_log.debug, app_log.error
class WXManager(object):
"""微信公众号功能管理类
"""
# 文本消息模板
TPL_RETURN_TEXT = '''<xml>
<ToUserName><![CDATA[{TOUSER}]]></ToUserName>
<FromUserName><![CDATA[{FROMUSER}]]></FromUserName>
<CreateTime>{TIME}</CreateTime>
<MsgType><![CDATA[text]]></MsgType>
<Content><![CDATA[{CONTENT}]]></Content>
</xml>'''
# 图片消息模板
TPL_RETURN_IMAGE = '''<xml>
<ToUserName><![CDATA[{TOUSER}]]></ToUserName>
<FromUserName><![CDATA[{FROMUSER}]]></FromUserName>
<CreateTime>{TIME}</CreateTime>
<MsgType><![CDATA[image]]></MsgType>
<Image>
<MediaId><![CDATA[{MEDIA_ID}]]></MediaId>
</Image>
</xml>'''
# 统一下单模板
TPL_UNIFIED_ORDER = '''<xml>
<appid><![CDATA[{appid}]]</appid>
<attach><![CDATA[{attach}]]</attach>
<body><![CDATA[{body}]]</body>
<detail><![CDATA[{detail}]]</detail>
<mch_id><![CDATA[{mch_id}]]</mch_id>
<nonce_str><![CDATA[{nonce_str}]]</nonce_str>
<notify_url><![CDATA[{notify_url}]]</notify_url>
<openid><![CDATA[{openid}]]</openid>
<out_trade_no><![CDATA[{out_trade_no}]]</out_trade_no>
<spbill_create_ip><![CDATA[{spbill_create_ip}]]</spbill_create_ip>
<total_fee><![CDATA[{total_fee}]]</total_fee>
<trade_type><![CDATA[{trade_type}]]</trade_type>
<trade_expire><![CDATA[{trade_expire}]]</trade_expire>
<time_start><![CDATA[{time_start}]]</time_start>
<device_info><![CDATA[{device_info}]]</device_info>
<fee_type><![CDATA[{fee_type}]]</fee_type>
<goods_tag><![CDATA[{goods_tag}]]</goods_tag>
<product_id><![CDATA[{product_id}]]</product_id>
<limit_pay><![CDATA[{limit_pay}]]</limit_pay>
<sign><![CDATA[{sign}]]</sign>
</xml>'''
def __init__(self, loop, ch):
#-# self.APPID = conf['wx_appid'].encode('utf8')
#-# self.APPSECRET = conf['wx_appsecret'].encode('utf8')
#-# self.TOKEN = conf['wx_token'].encode('utf8')
#-# self.ENCODINGAESKEY = conf['wx_encodingaeskey'].encode('utf8')
self.APPID = conf['wx_appid']
self.APPSECRET = conf['wx_appsecret']
self.TOKEN = conf['wx_token']
self.ENCODINGAESKEY = conf['wx_encodingaeskey']
resolver = AsyncResolver(nameservers=['8.8.8.8', '8.8.4.4'])
conn = aiohttp.TCPConnector(resolver=resolver, limit=10)
self.loop = loop
if self.loop:
self.sess = aiohttp.ClientSession(connector=conn, headers={'User-Agent': conf['user_agent']}, loop=self.loop)
else:
self.sess = aiohttp.ClientSession(connector=conn, headers={'User-Agent': conf['user_agent']})
self.ch = ch # CommonHandler 实例 提供 db/cache 支持
async def clean(self):
if self.sess:
await self.sess.close()
def setCommHandler(self, ch):
"""用于延后设置或中途替换
"""
self.ch = ch
async def getAccessToken(self):
"""取access token
"""
access_token = None
r = await self.ch.getCache()
c_k = '_Z_WX_ACCESS_TOKEN'
access_token = await r.getObj(c_k)
if access_token is None:
url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={APPID}&secret={APPSECRET}'.format(APPID=self.APPID, APPSECRET=self.APPSECRET)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if 'errcode' in j_data:
info('errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
access_token = j_data['access_token']
expires_in = j_data['expires_in']
info('access token[:20]%s expire in %s', access_token[:20], expires_in)
await r.setObj(c_k, access_token, expires_in)
else:
info('cache hit %s', c_k)
return access_token
async def getIpList(self):
"""取微信服务器ip列表
"""
ip_list = []
r = await self.ch.getCache()
c_k = '_Z_WX_IP_LIST'
ip_list = await r.getObj(c_k)
if ip_list is None:
access_token = await self.getAccessToken()
if access_token:
url = 'https://api.weixin.qq.com/cgi-bin/getcallbackip?access_token={ACCESS_TOKEN}'.format(ACCESS_TOKEN=access_token)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if 'errcode' in j_data:
info('errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
# 出错情况下 5秒内不重试
await r.setObj(c_k, ip_list, 5)
else:
ip_list = j_data['ip_list']
#-# info('ip_list: %s', ip_list)
await r.setObj(c_k, ip_list, 3600)
else:
info('can\'t get access_token, no ip list returned.')
else:
info('cache hit %s', c_k)
return ip_list
def createText(self, nonce, encrypt_type, from_user, to_user, content):
u'''构造文本消息
``content`` 为文本内容
'''
ret_data = b'success'
to_xml = WXManager.TPL_RETURN_TEXT.format(TOUSER=from_user, FROMUSER=to_user, TIME=int(time.time()), CONTENT=content)
#-# info('to_xml %s', to_xml)
#-# info('encrypt_type %s', encrypt_type)
if encrypt_type == 'aes':
encryp_helper = WXBizMsgCrypt(self.TOKEN, self.ENCODINGAESKEY, self.APPID)
ret, encrypt_xml = encryp_helper.EncryptMsg(to_xml, nonce)
if not ret:
#-# info('encrypt_xml %s', encrypt_xml)
ret_data = encrypt_xml
else:
info('加密失败 %s %s', ret, encrypt_xml)
return ret_data
def createImage(self, nonce, encrypt_type, from_user, to_user, media_id):
u'''构造图片消息
``media_id`` 为图片素材id
'''
ret_data = 'success'
to_xml = WXManager.TPL_RETURN_IMAGE.format(TOUSER=from_user, FROMUSER=to_user, TIME=int(time.time()), MEDIA_ID=media_id)
if encrypt_type == 'aes':
encryp_helper = WXBizMsgCrypt(self.TOKEN, self.ENCODINGAESKEY, self.APPID)
ret, encrypt_xml = encryp_helper.EncryptMsg(to_xml, nonce)
if not ret:
ret_data = encrypt_xml
else:
info('加密失败 %s %s', ret, encrypt_xml)
return ret_data
def extractXml(self, nonce, encrypt_type, msg_sign, timestamp, from_xml):
u'''解析接收的消息,以字典形式返回
'''
d_data = ''
#-# info('nonc %s encrypt_type %s msg_sign %s timestamp %s', nonce, encrypt_type, msg_sign, timestamp)
#-# info('raw data: %s', from_xml)
if encrypt_type == 'aes':
decrypt_helper = WXBizMsgCrypt(self.TOKEN, self.ENCODINGAESKEY, self.APPID)
ret, decryp_xml = decrypt_helper.DecryptMsg(from_xml, msg_sign, timestamp, nonce)
if not ret:
from_xml = decryp_xml
else:
info('解密失败 %s %s', ret, decryp_xml)
return d_data
# parse to dict
if from_xml:
d_data = parseXml2Dict(from_xml)
#-# info('接收:\n%s', pcformat(d_data))
return d_data
async def getUserInfo(self, openid):
u'''获取用户基本信息
'''
wx_user = None
access_token = await self.getAccessToken()
if access_token:
url = 'https://api.weixin.qq.com/cgi-bin/user/info?access_token={ACCESS_TOKEN}&openid={OPENID}&lang=zh_CN'.format(ACCESS_TOKEN=access_token, OPENID=openid)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if j_data.get('errcode', None):
info('获取用户基本信息出错: errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
wx_user = j_data
else:
info('can\'t get access_token, no ip list returned.')
return wx_user
async def createSelfMenu(self):
u'''创建自定义菜单
* True 成功
* False 失败
'''
ret_data = False
access_token = await self.getAccessToken()
url = 'https://api.weixin.qq.com/cgi-bin/menu/create?access_token={ACCESS_TOKEN}'.format(ACCESS_TOKEN=access_token)
#-# data = {'button': [{'type': 'view', 'name': u'快速下载', 'url': 'http://www.hongbaosuoping.com/client_share/download/download.html'},
#-# {'type': 'view', 'name': u'自助服务', 'url': 'http://www.hongbaosuoping.com/portal.php?mod=topic&topicid=9'},
#-# {'type': 'click', 'name': u'获取验证码', 'key': 'vcode'},
#-# ]
#-# }
login_cfg = {'APPID': self.APPID,
'REDIRECT_URI': quote('http://liveonnet.f3322.net:7777/'),
'SCOPE': 'snsapi_userinfo',
'STATE': 'login',
}
test_cfg = {'APPID': self.APPID,
'REDIRECT_URI': quote('http://weixin.aa123bb.com/wx_auth'),
'SCOPE': 'snsapi_userinfo',
'STATE': 'test',
}
data = {'button': [{'name': u'菜单',
'sub_button': [{'type': 'view',
'name': '商城',
'url': 'https://open.weixin.qq.com/connect/oauth2/authorize?appid={APPID}&redirect_uri={REDIRECT_URI}&response_type=code&scope={SCOPE}&state={STATE}#wechat_redirect'.format(**login_cfg)
},
{'type': 'view',
'name': 'test',
'url': 'https://open.weixin.qq.com/connect/oauth2/authorize?appid={APPID}&redirect_uri={REDIRECT_URI}&response_type=code&scope={SCOPE}&state={STATE}#wechat_redirect'.format(**test_cfg)
},
{'type': 'click',
'name': '二维码',
'key': 'qc_subscribe'
},
{'type': 'click',
'name': '不存在',
'key': 'not_exist'
},
]
},
{'type': 'view',
'name': u'快速下载',
'url': 'http://cn.bing.com/'
},
{'type': 'view',
'name': u'自助服务',
'url': 'http://m.baidu.com/'
},
]
}
#-# info('url: %s', url) # debug only
#-# info('body: %s', json.dumps(data))
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if j_data['errcode']:
info('创建菜单出错: errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
ret_data = True
return ret_data
async def getSelfMenu(self):
u'''获取自定义菜单配置
* 成功则返回json格式菜单配置信息
* 失败则返回 None
'''
ret_data = None
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/cgi-bin/get_current_selfmenu_info?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
info('url: %s', url)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
ret_data = j_data
except:
error('', exc_info=True)
return ret_data
async def sendTplMsg(self, tpl_id, openid, url, in_data):
u'''发送模板消息
* True 成功
* False 失败
'''
ret_data = False
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
data = {'touser': openid,
'template_id': tpl_id,
'url': url,
'data': in_data,
}
info('url: %s', url)
info('data: %s', pcformat(data))
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if j_data['errcode']:
info('发送模板消息出错: errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
ret_data = True
return ret_data
async def getMediaId(self, media_type, media_data, key):
u'''获取素材id
* ``media_type`` 素材类型 image/voice/video/thumb 之一
* ``media_data`` 素材数据,如果 ``media_data`` 不为空 且 ``key`` 在缓存中查不到,则上传素材
* ``key`` 指定的key值,以后可以设置 ``media_data`` 为空的情况下获取已经上传的素材id
返回 media_id ,此数值可以用于构造图片消息
'''
media_id = None
d_content_type = {'image': 'image/jpg', # bmp/png/jpeg/jpg/gif
'voice': 'voice/mp3', # mp3/wma/wav/amr
'video': 'video/mp4',
'thumb': 'thumb/jpg',
}
if media_type not in d_content_type:
info('unknown media_type %s', media_type)
return media_id
if not key:
if not media_data:
info('media_data 为空')
return media_id
key = md5(media_data).hexdigest()
c_k = '_Z_WX_M_%s_%s' % (media_type, key)
r = await self.ch.getCache()
media_id = await r.getObj(c_k)
if not media_id:
if not media_data: # 缓存里面没有查到,必须先上传,media_type必须非空
info('media_data 为空')
return media_id
access_token = await self.getAccessToken()
url = 'https://api.weixin.qq.com/cgi-bin/media/upload?access_token={ACCESS_TOKEN}&type={MEDIA_TYPE}'.format(ACCESS_TOKEN=access_token, MEDIA_TYPE=media_type)
#-# info('url: %s', url) # debug only
nr_try = 1
while 1:
boundary = ''.join((random.choice(string.digits) for _ in xrange(32)))
if media_data.find(boundary) == -1:
break
nr_try += 1
headers = {'Content-Type': 'multipart/form-data;boundary=%s' % boundary}
form_body = '--%s\r\n' \
'Content-Disposition: form-data; name="media"; filename="upload.%s"\r\n' \
'Content-Type: %s\r\n' \
'FileLength: %s\r\n\r\n' \
'%s\r\n' \
'--%s--\r\n' \
% (boundary, d_content_type[media_type].split('/')[1], d_content_type[media_type], len(media_data), media_data, boundary)
#-# info('form_body(header part):\n%s', form_body[:form_body.find('\r\n\r\n')]) # debug only
try:
resp = await self.sess.post(url, data=form_body, headers=headers)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if j_data.get('errcode', None):
info('上传素材出错: errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
media_id = j_data['media_id']
await r.setObj(c_k, media_id, 86400 * 3)
else:
info('cache hit %s', c_k)
return media_id
async def getOAuthAccessTokenOpenId(self, code):
u'''通过code换取网页授权access_token 和 openid
'''
access_token, openid = None, None
r = await self.ch.getCache()
c_k = '_Z_WX_O_ACCESS_TOKEN_%s' % code
c_data = await r.getObj(c_k)
if c_data is None:
url = 'https://api.weixin.qq.com/sns/oauth2/access_token?appid={APPID}&secret={APPSECRET}&code={CODE}&grant_type=authorization_code'.format(APPID=self.APPID, APPSECRET=self.APPSECRET, CODE=code)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if 'errcode' in j_data:
info('errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
access_token = j_data['access_token']
expires_in = j_data['expires_in']
openid = j_data['openid']
#-# scope = j_data['scope']
#-# unionid = j_data.get('unionid', '')
info('access token[:20]%s expire in %s openid %s', access_token[:20], expires_in, openid)
await r.setObj(c_k, (access_token, openid), expires_in)
else:
info('cache hit %s', c_k)
access_token, openid = c_data
return access_token, openid
async def getOAuthUserInfo(self, access_token, openid):
"""取用户信息(需scope为 snsapi_userinfo)
**参数**
* ``access_token``
* ``openid``
**返回**
* ``openid`` 用户的唯一标识
* ``nickname`` 用户昵称
* ``sex`` 用户的性别,值为1时是男性,值为2时是女性,值为0时是未知
* ``province`` 用户个人资料填写的省份
* ``city`` 普通用户个人资料填写的城市
* ``country`` 国家,如中国为CN
* ``headimgurl`` 用户头像,最后一个数值代表正方形头像大小(有0、46、64、96、132数值可选,0代表640*640正方形头像),用户没有头像时该项为空。若用户更换头像,原有头像URL将失效。
* ``privilege`` 用户特权信息,json 数组,如微信沃卡用户为(chinaunicom)
* ``unionid`` 只有在用户将公众号绑定到微信开放平台帐号后,才会出现该字段。详见:获取用户个人信息(UnionID机制)
"""
wx_user = None
url = 'https://api.weixin.qq.com/sns/userinfo?access_token={ACCESS_TOKEN}&openid={OPENID}&lang=zh_CN'.format(ACCESS_TOKEN=access_token, OPENID=openid)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if 'errcode' in j_data:
info('errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
wx_user = j_data
return wx_user
async def getOAuthUserInfoByCode(self, code, ch):
"""据code获取用户基本信息
"""
wx_user = None
access_token, openid = await self.getOAuthAccessTokenOpenId(code, ch)
if access_token:
wx_user = await self.getOAuthUserInfo(access_token, openid)
return wx_user
async def checkOAuthAccessToken(self, access_token, openid):
u'''检查access_token有效性
'''
rtn = False
url = 'https://api.weixin.qq.com/sns/auth?access_token={ACCESS_TOKEN}&openid={OPENID}'.format(ACCESS_TOKEN=access_token, OPENID=openid)
try:
resp = await self.sess.get(url)
j_data = await resp.json()
except:
error('', exc_info=True)
else:
if j_data['errcode']:
info('errcode %s, errmsg: %s', j_data['errcode'], j_data.get('errmsg', ''))
else:
rtn = True
return rtn
async def addKf(self, kf_account, nickname, password):
u'''添加客服帐号
'''
ret_data = None
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/customservice/kfaccount/add?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
info('url: %s', url)
data = {'kf_account': kf_account,
'nickname': nickname,
'password': password
}
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
ret_data = j_data
except:
error('', exc_info=True)
return ret_data
async def updKf(self, kf_account, nickname, password):
"""改客服帐号
"""
ret_data = None
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/customservice/kfaccount/update?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
info('url: %s', url)
data = {'kf_account': kf_account,
'nickname': nickname,
'password': password
}
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
ret_data = j_data
except:
error('', exc_info=True)
return ret_data
async def delKf(self, kf_account, nickname, password):
"""除客服帐号
"""
ret_data = None
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/customservice/kfaccount/del?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
info('url: %s', url)
data = {'kf_account': kf_account,
'nickname': nickname,
'password': password
}
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
ret_data = j_data
except:
error('', exc_info=True)
return ret_data
async def kfSendMsg(self, msg_data):
"""发消息 不需要直接调用
"""
ret_data = None
access_token = await self.getAccessToken()
url = '''https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token={ACCESS_TOKEN}'''.format(ACCESS_TOKEN=access_token)
info('url: %s', url)
data = msg_data
try:
resp = await self.sess.post(url, data=json.dumps(data, ensure_ascii=False))
j_data = await resp.json()
ret_data = j_data
except:
error('', exc_info=True)
return ret_data
async def kfSendImageMsg(self, openid, media_id):
"""发送图片消息
"""
data = {'touser': openid,
'msgtype': 'image',
'image': {'media_id': media_id},
}
return await self.kfSendMsg(data)
async def kfSendTextMsg(self, openid, content):
"""发送文本消息
"""
data = {'touser': openid,
'msgtype': 'text',
'text': {'content': content},
}
return await self.kfSendMsg(data)
if __name__ == '__main__':
from lib.handler_lib import CommonHandler
async def test_main(loop):
conf['loop'] = loop
ch = CommonHandler(None, None)
mgr = WXManager(loop, ch)
#-# pass
#-# yield WXManager.sendTplMsg(TPL_SEND_VC, 'owD3VszZ1r115U-DVYLMdCWU1AVE', '',
#-# {'first': {'value': u'尊敬的用户'}, 'number': {'value': str(random.randint(1000, 9999)), 'color': '#FF3300'}, 'remark': {'value': u'该验证码有效期30分钟可输入1次,转发无效。'}})
#-# pic_data = yield WXManager.getQrPicBySceneId(1)
#-# open('/tmp/t.jpg', 'wb').write(pic_data)
#-# pic_data = open('/tmp/t.jpg', 'rb').read()
#-# media_id = yield WXManager.getMediaId('image', pic_data, 'test_qr')
#-# image_data, ticket, expire_at = yield WXManager.getQrPicBySceneId(1)
#-# media_id = await mgr.getMediaId('image', None, key='qrcode_subs')
#-# info('media_id %s', media_id)
#-# r = await mgr.kfSendImageMsg('olQcFt_RHZqgL9CyNuDuyy21hhKg', media_id)
#-# info('r: %s', r)
#-# mgr.getAccessToken()
#-# r = await mgr.getIpList()
r = await mgr.createSelfMenu()
info('r: %s', pcformat(r))
await mgr.clean()
info('ch %s', ch)
ch.clean()
#-# mgr.getSelfMenu()
loop = asyncio.get_event_loop()
try:
task = asyncio.ensure_future(test_main(loop))
loop.run_until_complete(task)
except KeyboardInterrupt:
info('cancel on KeyboardInterrupt..')
task.cancel()
loop.run_forever()
task.exception()
finally:
loop.stop()
sys.exit(0)
| 39.50234
| 229
| 0.529995
| 24,190
| 0.910117
| 0
| 0
| 0
| 0
| 20,232
| 0.761202
| 10,664
| 0.401219
|
43e657cee1737539636db5f58dee3a853afc6290
| 1,565
|
py
|
Python
|
django_fuzzytest/management/commands/fuzzytest.py
|
creotiv/django-fuzzytest
|
6102ac6e7aee3bf81ff5186fbe5bfb01e688acdc
|
[
"BSD-3-Clause"
] | 8
|
2015-08-23T19:28:52.000Z
|
2021-12-03T06:36:58.000Z
|
django_fuzzytest/management/commands/fuzzytest.py
|
creotiv/django-fuzzytest
|
6102ac6e7aee3bf81ff5186fbe5bfb01e688acdc
|
[
"BSD-3-Clause"
] | null | null | null |
django_fuzzytest/management/commands/fuzzytest.py
|
creotiv/django-fuzzytest
|
6102ac6e7aee3bf81ff5186fbe5bfb01e688acdc
|
[
"BSD-3-Clause"
] | 1
|
2021-12-03T06:37:00.000Z
|
2021-12-03T06:37:00.000Z
|
# coding: utf-8
from __future__ import unicode_literals
import time
import logging
import traceback
from optparse import make_option
import json
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_fuzzytest.runner import FuzzyRunner
logger = logging.getLogger(__file__)
class Command(BaseCommand):
help = 'Run fuzzytest'
option_list = BaseCommand.option_list + (
make_option("--exclude", "-e", action="append", default=[],
dest="exclude", help="Exclude applications from test"),
make_option("--cache", "-c", action="store_true", default='.fuzzycache',
dest="cache", help="Cache path. Default: .fuzzycache"),
)
params = {}
def _merge_cache(self, path):
fp = file(path)
for line in fp:
d = json.loads(line)
self.params.setdefault(
d['path'],
{"get":[],"post":[],"files":[]}
)
self.params[d['path']]['get'] = list(set( \
self.params[d['path']]['get'] + d['get']))
self.params[d['path']]['post'] = list(set( \
self.params[d['path']]['post'] + d['post']))
self.params[d['path']]['post'] = list(set( \
self.params[d['path']]['post'] + d['post']))
def handle(self, *args, **options):
exclude = options.get('exclude')
cache_path = options.get('cache')
self._merge_cache(cache_path)
runner = FuzzyRunner(self.params)
runner.run()
| 30.686275
| 81
| 0.578275
| 1,230
| 0.785942
| 0
| 0
| 0
| 0
| 0
| 0
| 300
| 0.191693
|
43e9c5052f55a709d60fa878953b3e380fa1ce96
| 6,727
|
py
|
Python
|
save_sim/maker.py
|
jrbourbeau/composition
|
f8debd81b0467a6094d5ba56a5f0fc6047369d30
|
[
"MIT"
] | null | null | null |
save_sim/maker.py
|
jrbourbeau/composition
|
f8debd81b0467a6094d5ba56a5f0fc6047369d30
|
[
"MIT"
] | null | null | null |
save_sim/maker.py
|
jrbourbeau/composition
|
f8debd81b0467a6094d5ba56a5f0fc6047369d30
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import glob
import re
import os
import argparse
import time
import getpass
import composition.support_functions.paths as paths
import composition.support_functions.simfunctions as simfunctions
from composition.support_functions.checkdir import checkdir
def get_argdict(comp_data_dir, **args):
argdict = dict.fromkeys(args['sim'])
for sim in args['sim']:
arglist = []
# Get config and simulation files
config = simfunctions.sim2cfg(sim)
gcd, files = simfunctions.get_level3_files(sim)
# Default parameters
outdir = '{}/{}_sim/files'.format(comp_data_dir, config)
checkdir(outdir + '/')
if args['test']:
args['n'] = 2
# List of existing files to possibly check against
existing_files = glob.glob('{}/sim_{}_*.hdf5'.format(outdir, sim))
existing_files.sort()
# Split into batches
batches = [files[i:i + args['n']]
for i in range(0, len(files), args['n'])]
if args['test']:
batches = batches[:2]
for bi, batch in enumerate(batches):
# Name output hdf5 file
start_index = batch[0].find('Run') + 3
end_index = batch[0].find('.i3.gz')
start = batch[0][start_index:end_index]
end = batch[-1][start_index:end_index]
out = '{}/sim_{}_part{}-{}.hdf5'.format(outdir, sim, start, end)
# Don't forget to insert GCD file at beginning of FileNameList
batch.insert(0, gcd)
batch = ' '.join(batch)
arg = '--files {} -s {} -o {}'.format(batch, sim, out)
arglist.append(arg)
argdict[sim] = arglist
return argdict
def get_merge_argdict(**args):
# Build arglist for condor submission
merge_argdict = dict.fromkeys(args['sim'])
for sim in args['sim']:
merge_args = '-s {}'.format(sim)
if args['overwrite']:
merge_args += ' --overwrite'
if args['remove']:
merge_args += ' --remove'
merge_argdict[sim] = merge_args
return merge_argdict
def make_submit_script(executable, jobID, script_path, condor_dir):
checkdir(script_path)
lines = ["universe = vanilla\n",
"getenv = true\n",
"executable = {}\n".format(executable),
"arguments = $(ARGS)\n",
"log = {}/logs/{}.log\n".format(condor_dir, jobID),
"output = /data/user/jbourbeau/composition/condor/outs/{}.out\n".format(jobID),
# "output = {}/outs/{}.out\n".format(condor_dir, jobID),
"error = /data/user/jbourbeau/composition/condor/errors/{}.error\n".format(jobID),
# "error = {}/errors/{}.error\n".format(condor_dir, jobID),
"notification = Never\n",
"queue \n"]
condor_script = script_path
with open(condor_script, 'w') as f:
f.writelines(lines)
return
def getjobID(jobID, condor_dir):
jobID += time.strftime('_%Y%m%d')
othersubmits = glob.glob(
'{}/submit_scripts/{}_??.submit'.format(condor_dir, jobID))
jobID += '_{:02d}'.format(len(othersubmits) + 1)
return jobID
if __name__ == "__main__":
# Setup global path names
mypaths = paths.Paths()
checkdir(mypaths.comp_data_dir)
# Set up condor directory
condor_dir = '/scratch/{}/condor_composition'.format(getpass.getuser())
for directory in ['errors', 'logs', 'outs', 'submit_scripts']:
checkdir(condor_dir + '/' + directory + '/')
simoutput = simfunctions.getSimOutput()
default_sim_list = ['7006', '7579', '7241', '7263', '7791',
'7242', '7262', '7851', '7007', '7784']
p = argparse.ArgumentParser(
description='Runs save_sim.py on cluster en masse',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=simoutput)
p.add_argument('-s', '--sim', dest='sim', nargs='*',
choices=default_sim_list,
default=default_sim_list,
help='Simulation to run over')
p.add_argument('-n', '--n', dest='n', type=int,
default=800,
help='Number of files to run per batch')
p.add_argument('--test', dest='test', action='store_true',
default=False,
help='Option for running test off cluster')
p.add_argument('--maxjobs', dest='maxjobs', type=int,
default=3000,
help='Maximum number of jobs to run at a given time.')
p.add_argument('--overwrite', dest='overwrite',
default=False, action='store_true',
help='Overwrite existing merged files')
p.add_argument('--remove', dest='remove',
default=False, action='store_true',
help='Remove unmerged hdf5 files')
args = p.parse_args()
cwd = os.getcwd()
jobID = 'save_sim'
jobID = getjobID(jobID, condor_dir)
cmd = '{}/save_sim.py'.format(cwd)
argdict = get_argdict(mypaths.comp_data_dir, **vars(args))
condor_script = '{}/submit_scripts/{}.submit'.format(condor_dir, jobID)
make_submit_script(cmd, jobID, condor_script, condor_dir)
merge_jobID = 'merge_sim'
merge_jobID = getjobID(merge_jobID, condor_dir)
merge_cmd = '{}/merge.py'.format(cwd)
merge_argdict = get_merge_argdict(**vars(args))
merge_condor_script = '{}/submit_scripts/{}.submit'.format(
condor_dir, merge_jobID)
make_submit_script(merge_cmd, merge_jobID, merge_condor_script, condor_dir)
# Set up dag file
jobID = 'save_sim_merge'
jobID = getjobID(jobID, condor_dir)
dag_file = '{}/submit_scripts/{}.submit'.format(condor_dir, jobID)
checkdir(dag_file)
with open(dag_file, 'w') as dag:
for sim in argdict.keys():
parent_string = 'Parent '
if len(argdict[sim]) < 1:
continue
for i, arg in enumerate(argdict[sim]):
dag.write('JOB sim_{}_p{} '.format(sim, i) +
condor_script + '\n')
dag.write('VARS sim_{}_p{} '.format(sim, i) +
'ARGS="' + arg + '"\n')
parent_string += 'sim_{}_p{} '.format(sim, i)
dag.write('JOB merge_{} '.format(
sim) + merge_condor_script + '\n')
dag.write('VARS merge_{} '.format(sim) +
'ARGS="' + merge_argdict[sim] + '"\n')
child_string = 'Child merge_{}'.format(sim)
dag.write(parent_string + child_string + '\n')
# Submit jobs
os.system('condor_submit_dag -maxjobs {} {}'.format(args.maxjobs, dag_file))
| 36.166667
| 95
| 0.581388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,843
| 0.273971
|
43e9f411bc2778ec1b8d67dbf67237a43e84adad
| 7,257
|
py
|
Python
|
xlsxwriter_tables/xlsxwriter_tables.py
|
johncmacy/xlsxwriter-tables
|
8e4db55d8d4bbc66209e23f0852d7351f40db587
|
[
"MIT"
] | null | null | null |
xlsxwriter_tables/xlsxwriter_tables.py
|
johncmacy/xlsxwriter-tables
|
8e4db55d8d4bbc66209e23f0852d7351f40db587
|
[
"MIT"
] | null | null | null |
xlsxwriter_tables/xlsxwriter_tables.py
|
johncmacy/xlsxwriter-tables
|
8e4db55d8d4bbc66209e23f0852d7351f40db587
|
[
"MIT"
] | null | null | null |
from typing import Union
class ExcelTable:
def _get_column(self, column_name:str, column_props:Union[dict, str]) -> dict:
'''
Defaults to the title-cased `column_name`:
my_favorite_color: 'My Favorite Color'
For acronyms, abbreviations, provide the correct capitalization in the `column_name`:
NFL_franchise_name: 'NFL Franchise Name'
'''
column = {'header': column_name.replace('_', ' ').title()}
'''
If other attributes were provided, such as `formula`, or `format`, pass them along.
'''
if isinstance(column_props, dict):
column.update(**column_props)
return column
def _get_data(self, item, column_name:str, column_props:Union[None, dict, str, tuple], separator, raise_attribute_errors):
'''
Set the default value to `column_name`:
columns = {
'date_is_tuesday': ...
}
'''
data_accessor = column_name
'''
For each column key, its value `column_props` could be None, or a string, tuple, function, or dictionary:
columns = {
'column_with_none': None,
'column_with_string': 'deeply.nested.property',
'column_with_tuple': ('deeply', 'nested', 'property'),
'column_with_function': lambda item: ...,
'column_with_dict': {
'data_accessor': ...,
},
}
If `column_props` is a dictionary, it may have a `data_accessor` key.
If it does, `data_accessor` could be a string, tuple, or function.
If not, continue to use the `column_name` as the `data_accessor`.
'''
if column_props:
'''
If `column_props` is a dict, look for a `data_accessor` property.
columns = {
'date_is_tuesday': {
'data_accessor': ...
}
}
`data_accessor` could be a function, str, or tuple.
'''
if isinstance(column_props, dict):
if 'data_accessor' in column_props.keys():
data_accessor = column_props['data_accessor']
else:
'''
If `column_props` is a dict, but it doesn't have a
`data_accessor` key, then use the `column_name` as
a string as the `data_accessor`.
'''
pass
else:
'''
If not a dict, it's either a string, tuple, or function.
'''
data_accessor = column_props
'''
If `data_accessor` is a function, call the function and
return the resulting value.
Note: The function should expect a single kwarg, `item`.
Example:
def day_of_week_is_tuesday(item):
return item.start_date.weekday() == 1
columns = {
'date_is_tuesday': {
'data_accessor': day_of_week_is_tuesday,
}
}
Or, as an inline (lambda) function:
columns = {
'date_is_tuesday': {
'data_accessor': lambda item: item.start_date.weekday() == 1
}
}
'''
if callable(data_accessor):
return data_accessor(item)
'''
If we've made it this far, it's either a tuple or a string.
If it's a string, split it using the separator, and convert to a tuple.
For the following examples, assume each item has a data structure like so:
{
'alpha': {
'bravo': {
'charlie': 123,
}
}
}
The default attribute separator is dot ('.'):
alpha.bravo.charlie'
Custom separators can be used. For instance, to resemble Django's ORM, set the separator to '__':
'alpha__bravo__charlie'
'''
if isinstance(data_accessor, str):
data_accessor = tuple(data_accessor.split(separator))
'''
By now, we should have a tuple, which is a list
of nested attributes that point to where the data is.
This code recursively traverses through the tuple of
nested attributes and returns the value that is deeply
nested inside the data structure.
'''
if isinstance(data_accessor, tuple):
# need to deepcopy here?
nested_data = item
for key in data_accessor:
try:
if isinstance(nested_data, dict):
nested_data = nested_data[key]
else:
nested_data = getattr(nested_data, key)
if callable(nested_data):
nested_data = nested_data()
except (KeyError, AttributeError) as e:
if raise_attribute_errors:
return f'{type(e)}: {str(e)}'
else:
return None
except Exception as e:
'''
If an exception other than (KeyError, AttributeError) is encountered, the error message
is returned and displayed in the cell to aid in troubleshooting.
'''
return f'{type(e)}: {str(e)}'
return nested_data
'''
If we reach this point, we don't know how to access data from the item, so raise an error.
'''
raise ValueError(f'''
Unable to detect the `data_accessor`. Please provide a function, string, or tuple.
- column_name={column_name}
- column_props={column_props}
''')
def __init__(self, columns:dict, data:list, separator='.', include_total_row=True, raise_attribute_errors=False):
columns_dict = {
name: self._get_column(name, props)
for name, props
in columns.items()
}
columns_and_headers = {
key: f'[@[{value["header"]}]]'
for key, value
in columns_dict.items()
}
for column in columns_dict.values():
if 'formula' in column.keys():
formula_str:str = column['formula']
column['formula'] = formula_str.format(**columns_and_headers)
self.columns:list[dict] = tuple(columns_dict.values())
self.data:list = [
[
self._get_data(item, column_name, column_props, separator, raise_attribute_errors)
for column_name, column_props
in columns.items()
]
for item
in data
]
self.top_left = (0,0)
self.bottom_right = (
len(self.data) - 1 + 1 + (1 if include_total_row else 0),
len(self.columns) - 1
)
self.coordinates = (*self.top_left, *self.bottom_right)
self.include_total_row = include_total_row
| 30.2375
| 126
| 0.51564
| 7,230
| 0.996279
| 0
| 0
| 0
| 0
| 0
| 0
| 4,120
| 0.567728
|
43eab223999e2604b87fae88107217a209d85e53
| 859
|
py
|
Python
|
teachers_toolkit/grading_system/migrations/0003_auto_20180706_1923.py
|
luiscberrocal/teachers_toolkit
|
078c55c4a9ad9c5a74e1484d80ac34f3b26b69c9
|
[
"MIT"
] | null | null | null |
teachers_toolkit/grading_system/migrations/0003_auto_20180706_1923.py
|
luiscberrocal/teachers_toolkit
|
078c55c4a9ad9c5a74e1484d80ac34f3b26b69c9
|
[
"MIT"
] | null | null | null |
teachers_toolkit/grading_system/migrations/0003_auto_20180706_1923.py
|
luiscberrocal/teachers_toolkit
|
078c55c4a9ad9c5a74e1484d80ac34f3b26b69c9
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.7 on 2018-07-06 19:23
from django.db import migrations, models
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('grading_system', '0002_student_email'),
]
operations = [
migrations.RenameField(
model_name='assignment',
old_name='assingment_date',
new_name='assignment_date',
),
migrations.AddField(
model_name='course',
name='slug',
field=django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=models.CharField(max_length=60)),
),
migrations.AlterField(
model_name='assignmentresult',
name='grade',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=5),
),
]
| 28.633333
| 135
| 0.620489
| 731
| 0.85099
| 0
| 0
| 0
| 0
| 0
| 0
| 168
| 0.195576
|
43ebc0969b2793f79841f3adb90ba457341afae3
| 67,834
|
py
|
Python
|
sdk/python/pulumi_google_native/vmmigration/v1alpha1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/vmmigration/v1alpha1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/vmmigration/v1alpha1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AppliedLicenseResponse',
'CloneJobResponse',
'ComputeEngineTargetDefaultsResponse',
'ComputeEngineTargetDetailsResponse',
'ComputeSchedulingResponse',
'CutoverJobResponse',
'NetworkInterfaceResponse',
'ReplicationCycleResponse',
'ReplicationSyncResponse',
'SchedulePolicyResponse',
'SchedulingNodeAffinityResponse',
'StatusResponse',
'VmUtilizationInfoResponse',
'VmUtilizationMetricsResponse',
'VmwareSourceDetailsResponse',
'VmwareVmDetailsResponse',
]
@pulumi.output_type
class AppliedLicenseResponse(dict):
"""
AppliedLicense holds the license data returned by adaptation module report.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "osLicense":
suggest = "os_license"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AppliedLicenseResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AppliedLicenseResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AppliedLicenseResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
os_license: str,
type: str):
"""
AppliedLicense holds the license data returned by adaptation module report.
:param str os_license: The OS license returned from the adaptation module's report.
:param str type: The license type that was used in OS adaptation.
"""
pulumi.set(__self__, "os_license", os_license)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="osLicense")
def os_license(self) -> str:
"""
The OS license returned from the adaptation module's report.
"""
return pulumi.get(self, "os_license")
@property
@pulumi.getter
def type(self) -> str:
"""
The license type that was used in OS adaptation.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class CloneJobResponse(dict):
"""
CloneJob describes the process of creating a clone of a MigratingVM to the requested target based on the latest successful uploaded snapshots. While the migration cycles of a MigratingVm take place, it is possible to verify the uploaded VM can be started in the cloud, by creating a clone. The clone can be created without any downtime, and it is created using the latest snapshots which are already in the cloud. The cloneJob is only responsible for its work, not its products, which means once it is finished, it will never touch the instance it created. It will only delete it in case of the CloneJob being cancelled or upon failure to clone.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "computeEngineTargetDetails":
suggest = "compute_engine_target_details"
elif key == "createTime":
suggest = "create_time"
elif key == "stateTime":
suggest = "state_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CloneJobResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CloneJobResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CloneJobResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compute_engine_target_details: 'outputs.ComputeEngineTargetDetailsResponse',
create_time: str,
error: 'outputs.StatusResponse',
name: str,
state: str,
state_time: str):
"""
CloneJob describes the process of creating a clone of a MigratingVM to the requested target based on the latest successful uploaded snapshots. While the migration cycles of a MigratingVm take place, it is possible to verify the uploaded VM can be started in the cloud, by creating a clone. The clone can be created without any downtime, and it is created using the latest snapshots which are already in the cloud. The cloneJob is only responsible for its work, not its products, which means once it is finished, it will never touch the instance it created. It will only delete it in case of the CloneJob being cancelled or upon failure to clone.
:param 'ComputeEngineTargetDetailsResponse' compute_engine_target_details: Details of the target VM in Compute Engine.
:param str create_time: The time the clone job was created (as an API call, not when it was actually created in the target).
:param 'StatusResponse' error: Provides details for the errors that led to the Clone Job's state.
:param str name: The name of the clone.
:param str state: State of the clone job.
:param str state_time: The time the state was last updated.
"""
pulumi.set(__self__, "compute_engine_target_details", compute_engine_target_details)
pulumi.set(__self__, "create_time", create_time)
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "state_time", state_time)
@property
@pulumi.getter(name="computeEngineTargetDetails")
def compute_engine_target_details(self) -> 'outputs.ComputeEngineTargetDetailsResponse':
"""
Details of the target VM in Compute Engine.
"""
return pulumi.get(self, "compute_engine_target_details")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The time the clone job was created (as an API call, not when it was actually created in the target).
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def error(self) -> 'outputs.StatusResponse':
"""
Provides details for the errors that led to the Clone Job's state.
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the clone.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
State of the clone job.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateTime")
def state_time(self) -> str:
"""
The time the state was last updated.
"""
return pulumi.get(self, "state_time")
@pulumi.output_type
class ComputeEngineTargetDefaultsResponse(dict):
"""
ComputeEngineTargetDefaults is a collection of details for creating a VM in a target Compute Engine project.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "additionalLicenses":
suggest = "additional_licenses"
elif key == "appliedLicense":
suggest = "applied_license"
elif key == "bootOption":
suggest = "boot_option"
elif key == "computeScheduling":
suggest = "compute_scheduling"
elif key == "diskType":
suggest = "disk_type"
elif key == "licenseType":
suggest = "license_type"
elif key == "machineType":
suggest = "machine_type"
elif key == "machineTypeSeries":
suggest = "machine_type_series"
elif key == "networkInterfaces":
suggest = "network_interfaces"
elif key == "networkTags":
suggest = "network_tags"
elif key == "secureBoot":
suggest = "secure_boot"
elif key == "serviceAccount":
suggest = "service_account"
elif key == "targetProject":
suggest = "target_project"
elif key == "vmName":
suggest = "vm_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ComputeEngineTargetDefaultsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ComputeEngineTargetDefaultsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ComputeEngineTargetDefaultsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
additional_licenses: Sequence[str],
applied_license: 'outputs.AppliedLicenseResponse',
boot_option: str,
compute_scheduling: 'outputs.ComputeSchedulingResponse',
disk_type: str,
labels: Mapping[str, str],
license_type: str,
machine_type: str,
machine_type_series: str,
metadata: Mapping[str, str],
network_interfaces: Sequence['outputs.NetworkInterfaceResponse'],
network_tags: Sequence[str],
secure_boot: bool,
service_account: str,
target_project: str,
vm_name: str,
zone: str):
"""
ComputeEngineTargetDefaults is a collection of details for creating a VM in a target Compute Engine project.
:param Sequence[str] additional_licenses: Additional licenses to assign to the VM.
:param 'AppliedLicenseResponse' applied_license: The OS license returned from the adaptation module report.
:param str boot_option: The VM Boot Option, as set in the source vm.
:param 'ComputeSchedulingResponse' compute_scheduling: Compute instance scheduling information (if empty default is used).
:param str disk_type: The disk type to use in the VM.
:param Mapping[str, str] labels: A map of labels to associate with the VM.
:param str license_type: The license type to use in OS adaptation.
:param str machine_type: The machine type to create the VM with.
:param str machine_type_series: The machine type series to create the VM with.
:param Mapping[str, str] metadata: The metadata key/value pairs to assign to the VM.
:param Sequence['NetworkInterfaceResponse'] network_interfaces: List of NICs connected to this VM.
:param Sequence[str] network_tags: A map of network tags to associate with the VM.
:param bool secure_boot: Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
:param str service_account: The service account to associate the VM with.
:param str target_project: The full path of the resource of type TargetProject which represents the Compute Engine project in which to create this VM.
:param str vm_name: The name of the VM to create.
:param str zone: The zone in which to create the VM.
"""
pulumi.set(__self__, "additional_licenses", additional_licenses)
pulumi.set(__self__, "applied_license", applied_license)
pulumi.set(__self__, "boot_option", boot_option)
pulumi.set(__self__, "compute_scheduling", compute_scheduling)
pulumi.set(__self__, "disk_type", disk_type)
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "license_type", license_type)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "machine_type_series", machine_type_series)
pulumi.set(__self__, "metadata", metadata)
pulumi.set(__self__, "network_interfaces", network_interfaces)
pulumi.set(__self__, "network_tags", network_tags)
pulumi.set(__self__, "secure_boot", secure_boot)
pulumi.set(__self__, "service_account", service_account)
pulumi.set(__self__, "target_project", target_project)
pulumi.set(__self__, "vm_name", vm_name)
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="additionalLicenses")
def additional_licenses(self) -> Sequence[str]:
"""
Additional licenses to assign to the VM.
"""
return pulumi.get(self, "additional_licenses")
@property
@pulumi.getter(name="appliedLicense")
def applied_license(self) -> 'outputs.AppliedLicenseResponse':
"""
The OS license returned from the adaptation module report.
"""
return pulumi.get(self, "applied_license")
@property
@pulumi.getter(name="bootOption")
def boot_option(self) -> str:
"""
The VM Boot Option, as set in the source vm.
"""
return pulumi.get(self, "boot_option")
@property
@pulumi.getter(name="computeScheduling")
def compute_scheduling(self) -> 'outputs.ComputeSchedulingResponse':
"""
Compute instance scheduling information (if empty default is used).
"""
return pulumi.get(self, "compute_scheduling")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> str:
"""
The disk type to use in the VM.
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
A map of labels to associate with the VM.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> str:
"""
The license type to use in OS adaptation.
"""
return pulumi.get(self, "license_type")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The machine type to create the VM with.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter(name="machineTypeSeries")
def machine_type_series(self) -> str:
"""
The machine type series to create the VM with.
"""
return pulumi.get(self, "machine_type_series")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, str]:
"""
The metadata key/value pairs to assign to the VM.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="networkInterfaces")
def network_interfaces(self) -> Sequence['outputs.NetworkInterfaceResponse']:
"""
List of NICs connected to this VM.
"""
return pulumi.get(self, "network_interfaces")
@property
@pulumi.getter(name="networkTags")
def network_tags(self) -> Sequence[str]:
"""
A map of network tags to associate with the VM.
"""
return pulumi.get(self, "network_tags")
@property
@pulumi.getter(name="secureBoot")
def secure_boot(self) -> bool:
"""
Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
"""
return pulumi.get(self, "secure_boot")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> str:
"""
The service account to associate the VM with.
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="targetProject")
def target_project(self) -> str:
"""
The full path of the resource of type TargetProject which represents the Compute Engine project in which to create this VM.
"""
return pulumi.get(self, "target_project")
@property
@pulumi.getter(name="vmName")
def vm_name(self) -> str:
"""
The name of the VM to create.
"""
return pulumi.get(self, "vm_name")
@property
@pulumi.getter
def zone(self) -> str:
"""
The zone in which to create the VM.
"""
return pulumi.get(self, "zone")
@pulumi.output_type
class ComputeEngineTargetDetailsResponse(dict):
"""
ComputeEngineTargetDetails is a collection of details for creating a VM in a target Compute Engine project.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "additionalLicenses":
suggest = "additional_licenses"
elif key == "appliedLicense":
suggest = "applied_license"
elif key == "bootOption":
suggest = "boot_option"
elif key == "computeScheduling":
suggest = "compute_scheduling"
elif key == "diskType":
suggest = "disk_type"
elif key == "licenseType":
suggest = "license_type"
elif key == "machineType":
suggest = "machine_type"
elif key == "machineTypeSeries":
suggest = "machine_type_series"
elif key == "networkInterfaces":
suggest = "network_interfaces"
elif key == "networkTags":
suggest = "network_tags"
elif key == "secureBoot":
suggest = "secure_boot"
elif key == "serviceAccount":
suggest = "service_account"
elif key == "vmName":
suggest = "vm_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ComputeEngineTargetDetailsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ComputeEngineTargetDetailsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ComputeEngineTargetDetailsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
additional_licenses: Sequence[str],
applied_license: 'outputs.AppliedLicenseResponse',
boot_option: str,
compute_scheduling: 'outputs.ComputeSchedulingResponse',
disk_type: str,
labels: Mapping[str, str],
license_type: str,
machine_type: str,
machine_type_series: str,
metadata: Mapping[str, str],
network_interfaces: Sequence['outputs.NetworkInterfaceResponse'],
network_tags: Sequence[str],
project: str,
secure_boot: bool,
service_account: str,
vm_name: str,
zone: str):
"""
ComputeEngineTargetDetails is a collection of details for creating a VM in a target Compute Engine project.
:param Sequence[str] additional_licenses: Additional licenses to assign to the VM.
:param 'AppliedLicenseResponse' applied_license: The OS license returned from the adaptation module report.
:param str boot_option: The VM Boot Option, as set in the source vm.
:param 'ComputeSchedulingResponse' compute_scheduling: Compute instance scheduling information (if empty default is used).
:param str disk_type: The disk type to use in the VM.
:param Mapping[str, str] labels: A map of labels to associate with the VM.
:param str license_type: The license type to use in OS adaptation.
:param str machine_type: The machine type to create the VM with.
:param str machine_type_series: The machine type series to create the VM with.
:param Mapping[str, str] metadata: The metadata key/value pairs to assign to the VM.
:param Sequence['NetworkInterfaceResponse'] network_interfaces: List of NICs connected to this VM.
:param Sequence[str] network_tags: A map of network tags to associate with the VM.
:param str project: The GCP target project ID or project name.
:param bool secure_boot: Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
:param str service_account: The service account to associate the VM with.
:param str vm_name: The name of the VM to create.
:param str zone: The zone in which to create the VM.
"""
pulumi.set(__self__, "additional_licenses", additional_licenses)
pulumi.set(__self__, "applied_license", applied_license)
pulumi.set(__self__, "boot_option", boot_option)
pulumi.set(__self__, "compute_scheduling", compute_scheduling)
pulumi.set(__self__, "disk_type", disk_type)
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "license_type", license_type)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "machine_type_series", machine_type_series)
pulumi.set(__self__, "metadata", metadata)
pulumi.set(__self__, "network_interfaces", network_interfaces)
pulumi.set(__self__, "network_tags", network_tags)
pulumi.set(__self__, "project", project)
pulumi.set(__self__, "secure_boot", secure_boot)
pulumi.set(__self__, "service_account", service_account)
pulumi.set(__self__, "vm_name", vm_name)
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="additionalLicenses")
def additional_licenses(self) -> Sequence[str]:
"""
Additional licenses to assign to the VM.
"""
return pulumi.get(self, "additional_licenses")
@property
@pulumi.getter(name="appliedLicense")
def applied_license(self) -> 'outputs.AppliedLicenseResponse':
"""
The OS license returned from the adaptation module report.
"""
return pulumi.get(self, "applied_license")
@property
@pulumi.getter(name="bootOption")
def boot_option(self) -> str:
"""
The VM Boot Option, as set in the source vm.
"""
return pulumi.get(self, "boot_option")
@property
@pulumi.getter(name="computeScheduling")
def compute_scheduling(self) -> 'outputs.ComputeSchedulingResponse':
"""
Compute instance scheduling information (if empty default is used).
"""
return pulumi.get(self, "compute_scheduling")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> str:
"""
The disk type to use in the VM.
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
A map of labels to associate with the VM.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> str:
"""
The license type to use in OS adaptation.
"""
return pulumi.get(self, "license_type")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The machine type to create the VM with.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter(name="machineTypeSeries")
def machine_type_series(self) -> str:
"""
The machine type series to create the VM with.
"""
return pulumi.get(self, "machine_type_series")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, str]:
"""
The metadata key/value pairs to assign to the VM.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="networkInterfaces")
def network_interfaces(self) -> Sequence['outputs.NetworkInterfaceResponse']:
"""
List of NICs connected to this VM.
"""
return pulumi.get(self, "network_interfaces")
@property
@pulumi.getter(name="networkTags")
def network_tags(self) -> Sequence[str]:
"""
A map of network tags to associate with the VM.
"""
return pulumi.get(self, "network_tags")
@property
@pulumi.getter
def project(self) -> str:
"""
The GCP target project ID or project name.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="secureBoot")
def secure_boot(self) -> bool:
"""
Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
"""
return pulumi.get(self, "secure_boot")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> str:
"""
The service account to associate the VM with.
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="vmName")
def vm_name(self) -> str:
"""
The name of the VM to create.
"""
return pulumi.get(self, "vm_name")
@property
@pulumi.getter
def zone(self) -> str:
"""
The zone in which to create the VM.
"""
return pulumi.get(self, "zone")
@pulumi.output_type
class ComputeSchedulingResponse(dict):
"""
Scheduling information for VM on maintenance/restart behaviour and node allocation in sole tenant nodes.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "automaticRestart":
suggest = "automatic_restart"
elif key == "minNodeCpus":
suggest = "min_node_cpus"
elif key == "nodeAffinities":
suggest = "node_affinities"
elif key == "onHostMaintenance":
suggest = "on_host_maintenance"
elif key == "restartType":
suggest = "restart_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ComputeSchedulingResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ComputeSchedulingResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ComputeSchedulingResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
automatic_restart: bool,
min_node_cpus: int,
node_affinities: Sequence['outputs.SchedulingNodeAffinityResponse'],
on_host_maintenance: str,
restart_type: str):
"""
Scheduling information for VM on maintenance/restart behaviour and node allocation in sole tenant nodes.
:param int min_node_cpus: The minimum number of virtual CPUs this instance will consume when running on a sole-tenant node. Ignored if no node_affinites are configured.
:param Sequence['SchedulingNodeAffinityResponse'] node_affinities: A set of node affinity and anti-affinity configurations for sole tenant nodes.
:param str on_host_maintenance: How the instance should behave when the host machine undergoes maintenance that may temporarily impact instance performance.
:param str restart_type: Whether the Instance should be automatically restarted whenever it is terminated by Compute Engine (not terminated by user). This configuration is identical to `automaticRestart` field in Compute Engine create instance under scheduling. It was changed to an enum (instead of a boolean) to match the default value in Compute Engine which is automatic restart.
"""
pulumi.set(__self__, "automatic_restart", automatic_restart)
pulumi.set(__self__, "min_node_cpus", min_node_cpus)
pulumi.set(__self__, "node_affinities", node_affinities)
pulumi.set(__self__, "on_host_maintenance", on_host_maintenance)
pulumi.set(__self__, "restart_type", restart_type)
@property
@pulumi.getter(name="automaticRestart")
def automatic_restart(self) -> bool:
return pulumi.get(self, "automatic_restart")
@property
@pulumi.getter(name="minNodeCpus")
def min_node_cpus(self) -> int:
"""
The minimum number of virtual CPUs this instance will consume when running on a sole-tenant node. Ignored if no node_affinites are configured.
"""
return pulumi.get(self, "min_node_cpus")
@property
@pulumi.getter(name="nodeAffinities")
def node_affinities(self) -> Sequence['outputs.SchedulingNodeAffinityResponse']:
"""
A set of node affinity and anti-affinity configurations for sole tenant nodes.
"""
return pulumi.get(self, "node_affinities")
@property
@pulumi.getter(name="onHostMaintenance")
def on_host_maintenance(self) -> str:
"""
How the instance should behave when the host machine undergoes maintenance that may temporarily impact instance performance.
"""
return pulumi.get(self, "on_host_maintenance")
@property
@pulumi.getter(name="restartType")
def restart_type(self) -> str:
"""
Whether the Instance should be automatically restarted whenever it is terminated by Compute Engine (not terminated by user). This configuration is identical to `automaticRestart` field in Compute Engine create instance under scheduling. It was changed to an enum (instead of a boolean) to match the default value in Compute Engine which is automatic restart.
"""
return pulumi.get(self, "restart_type")
@pulumi.output_type
class CutoverJobResponse(dict):
"""
CutoverJob message describes a cutover of a migrating VM. The CutoverJob is the operation of shutting down the VM, creating a snapshot and clonning the VM using the replicated snapshot.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "computeEngineTargetDetails":
suggest = "compute_engine_target_details"
elif key == "createTime":
suggest = "create_time"
elif key == "progressPercent":
suggest = "progress_percent"
elif key == "stateMessage":
suggest = "state_message"
elif key == "stateTime":
suggest = "state_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CutoverJobResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CutoverJobResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CutoverJobResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compute_engine_target_details: 'outputs.ComputeEngineTargetDetailsResponse',
create_time: str,
error: 'outputs.StatusResponse',
name: str,
progress: int,
progress_percent: int,
state: str,
state_message: str,
state_time: str):
"""
CutoverJob message describes a cutover of a migrating VM. The CutoverJob is the operation of shutting down the VM, creating a snapshot and clonning the VM using the replicated snapshot.
:param 'ComputeEngineTargetDetailsResponse' compute_engine_target_details: Details of the target VM in Compute Engine.
:param str create_time: The time the cutover job was created (as an API call, not when it was actually created in the target).
:param 'StatusResponse' error: Provides details for the errors that led to the Cutover Job's state.
:param str name: The name of the cutover job.
:param int progress: The current progress in percentage of the cutover job.
:param int progress_percent: The current progress in percentage of the cutover job.
:param str state: State of the cutover job.
:param str state_message: A message providing possible extra details about the current state.
:param str state_time: The time the state was last updated.
"""
pulumi.set(__self__, "compute_engine_target_details", compute_engine_target_details)
pulumi.set(__self__, "create_time", create_time)
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "progress", progress)
pulumi.set(__self__, "progress_percent", progress_percent)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "state_message", state_message)
pulumi.set(__self__, "state_time", state_time)
@property
@pulumi.getter(name="computeEngineTargetDetails")
def compute_engine_target_details(self) -> 'outputs.ComputeEngineTargetDetailsResponse':
"""
Details of the target VM in Compute Engine.
"""
return pulumi.get(self, "compute_engine_target_details")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The time the cutover job was created (as an API call, not when it was actually created in the target).
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def error(self) -> 'outputs.StatusResponse':
"""
Provides details for the errors that led to the Cutover Job's state.
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the cutover job.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def progress(self) -> int:
"""
The current progress in percentage of the cutover job.
"""
return pulumi.get(self, "progress")
@property
@pulumi.getter(name="progressPercent")
def progress_percent(self) -> int:
"""
The current progress in percentage of the cutover job.
"""
return pulumi.get(self, "progress_percent")
@property
@pulumi.getter
def state(self) -> str:
"""
State of the cutover job.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateMessage")
def state_message(self) -> str:
"""
A message providing possible extra details about the current state.
"""
return pulumi.get(self, "state_message")
@property
@pulumi.getter(name="stateTime")
def state_time(self) -> str:
"""
The time the state was last updated.
"""
return pulumi.get(self, "state_time")
@pulumi.output_type
class NetworkInterfaceResponse(dict):
"""
NetworkInterface represents a NIC of a VM.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "externalIp":
suggest = "external_ip"
elif key == "internalIp":
suggest = "internal_ip"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkInterfaceResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkInterfaceResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkInterfaceResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
external_ip: str,
internal_ip: str,
network: str,
subnetwork: str):
"""
NetworkInterface represents a NIC of a VM.
:param str external_ip: The external IP to define in the NIC.
:param str internal_ip: The internal IP to define in the NIC. The formats accepted are: `ephemeral` \ ipv4 address \ a named address resource full path.
:param str network: The network to connect the NIC to.
:param str subnetwork: The subnetwork to connect the NIC to.
"""
pulumi.set(__self__, "external_ip", external_ip)
pulumi.set(__self__, "internal_ip", internal_ip)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "subnetwork", subnetwork)
@property
@pulumi.getter(name="externalIp")
def external_ip(self) -> str:
"""
The external IP to define in the NIC.
"""
return pulumi.get(self, "external_ip")
@property
@pulumi.getter(name="internalIp")
def internal_ip(self) -> str:
"""
The internal IP to define in the NIC. The formats accepted are: `ephemeral` \ ipv4 address \ a named address resource full path.
"""
return pulumi.get(self, "internal_ip")
@property
@pulumi.getter
def network(self) -> str:
"""
The network to connect the NIC to.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter
def subnetwork(self) -> str:
"""
The subnetwork to connect the NIC to.
"""
return pulumi.get(self, "subnetwork")
@pulumi.output_type
class ReplicationCycleResponse(dict):
"""
ReplicationCycle contains information about the current replication cycle status.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "progressPercent":
suggest = "progress_percent"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ReplicationCycleResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ReplicationCycleResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ReplicationCycleResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
progress: int,
progress_percent: int,
start_time: str):
"""
ReplicationCycle contains information about the current replication cycle status.
:param int progress: The current progress in percentage of this cycle.
:param int progress_percent: The current progress in percentage of this cycle.
:param str start_time: The time the replication cycle has started.
"""
pulumi.set(__self__, "progress", progress)
pulumi.set(__self__, "progress_percent", progress_percent)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter
def progress(self) -> int:
"""
The current progress in percentage of this cycle.
"""
return pulumi.get(self, "progress")
@property
@pulumi.getter(name="progressPercent")
def progress_percent(self) -> int:
"""
The current progress in percentage of this cycle.
"""
return pulumi.get(self, "progress_percent")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
The time the replication cycle has started.
"""
return pulumi.get(self, "start_time")
@pulumi.output_type
class ReplicationSyncResponse(dict):
"""
ReplicationSync contain information about the last replica sync to the cloud.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lastSyncTime":
suggest = "last_sync_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ReplicationSyncResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ReplicationSyncResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ReplicationSyncResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
last_sync_time: str):
"""
ReplicationSync contain information about the last replica sync to the cloud.
:param str last_sync_time: The most updated snapshot created time in the source that finished replication.
"""
pulumi.set(__self__, "last_sync_time", last_sync_time)
@property
@pulumi.getter(name="lastSyncTime")
def last_sync_time(self) -> str:
"""
The most updated snapshot created time in the source that finished replication.
"""
return pulumi.get(self, "last_sync_time")
@pulumi.output_type
class SchedulePolicyResponse(dict):
"""
A policy for scheduling replications.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "idleDuration":
suggest = "idle_duration"
elif key == "skipOsAdaptation":
suggest = "skip_os_adaptation"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SchedulePolicyResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SchedulePolicyResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SchedulePolicyResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
idle_duration: str,
skip_os_adaptation: bool):
"""
A policy for scheduling replications.
:param str idle_duration: The idle duration between replication stages.
:param bool skip_os_adaptation: A flag to indicate whether to skip OS adaptation during the replication sync. OS adaptation is a process where the VM's operating system undergoes changes and adaptations to fully function on Compute Engine.
"""
pulumi.set(__self__, "idle_duration", idle_duration)
pulumi.set(__self__, "skip_os_adaptation", skip_os_adaptation)
@property
@pulumi.getter(name="idleDuration")
def idle_duration(self) -> str:
"""
The idle duration between replication stages.
"""
return pulumi.get(self, "idle_duration")
@property
@pulumi.getter(name="skipOsAdaptation")
def skip_os_adaptation(self) -> bool:
"""
A flag to indicate whether to skip OS adaptation during the replication sync. OS adaptation is a process where the VM's operating system undergoes changes and adaptations to fully function on Compute Engine.
"""
return pulumi.get(self, "skip_os_adaptation")
@pulumi.output_type
class SchedulingNodeAffinityResponse(dict):
"""
Node Affinity: the configuration of desired nodes onto which this Instance could be scheduled. Based on https://cloud.google.com/compute/docs/reference/rest/v1/instances/setScheduling
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Sequence[str]):
"""
Node Affinity: the configuration of desired nodes onto which this Instance could be scheduled. Based on https://cloud.google.com/compute/docs/reference/rest/v1/instances/setScheduling
:param str key: The label key of Node resource to reference.
:param str operator: The operator to use for the node resources specified in the `values` parameter.
:param Sequence[str] values: Corresponds to the label values of Node resource.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key of Node resource to reference.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
The operator to use for the node resources specified in the `values` parameter.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Corresponds to the label values of Node resource.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class StatusResponse(dict):
"""
The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).
"""
def __init__(__self__, *,
code: int,
details: Sequence[Mapping[str, str]],
message: str):
"""
The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).
:param int code: The status code, which should be an enum value of google.rpc.Code.
:param Sequence[Mapping[str, str]] details: A list of messages that carry the error details. There is a common set of message types for APIs to use.
:param str message: A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "details", details)
pulumi.set(__self__, "message", message)
@property
@pulumi.getter
def code(self) -> int:
"""
The status code, which should be an enum value of google.rpc.Code.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def details(self) -> Sequence[Mapping[str, str]]:
"""
A list of messages that carry the error details. There is a common set of message types for APIs to use.
"""
return pulumi.get(self, "details")
@property
@pulumi.getter
def message(self) -> str:
"""
A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.
"""
return pulumi.get(self, "message")
@pulumi.output_type
class VmUtilizationInfoResponse(dict):
"""
Utilization information of a single VM.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "vmId":
suggest = "vm_id"
elif key == "vmwareVmDetails":
suggest = "vmware_vm_details"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VmUtilizationInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VmUtilizationInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VmUtilizationInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
utilization: 'outputs.VmUtilizationMetricsResponse',
vm_id: str,
vmware_vm_details: 'outputs.VmwareVmDetailsResponse'):
"""
Utilization information of a single VM.
:param 'VmUtilizationMetricsResponse' utilization: Utilization metrics for this VM.
:param str vm_id: The VM's ID in the source.
:param 'VmwareVmDetailsResponse' vmware_vm_details: The description of the VM in a Source of type Vmware.
"""
pulumi.set(__self__, "utilization", utilization)
pulumi.set(__self__, "vm_id", vm_id)
pulumi.set(__self__, "vmware_vm_details", vmware_vm_details)
@property
@pulumi.getter
def utilization(self) -> 'outputs.VmUtilizationMetricsResponse':
"""
Utilization metrics for this VM.
"""
return pulumi.get(self, "utilization")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> str:
"""
The VM's ID in the source.
"""
return pulumi.get(self, "vm_id")
@property
@pulumi.getter(name="vmwareVmDetails")
def vmware_vm_details(self) -> 'outputs.VmwareVmDetailsResponse':
"""
The description of the VM in a Source of type Vmware.
"""
return pulumi.get(self, "vmware_vm_details")
@pulumi.output_type
class VmUtilizationMetricsResponse(dict):
"""
Utilization metrics values for a single VM.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cpuAverage":
suggest = "cpu_average"
elif key == "cpuAveragePercent":
suggest = "cpu_average_percent"
elif key == "cpuMax":
suggest = "cpu_max"
elif key == "cpuMaxPercent":
suggest = "cpu_max_percent"
elif key == "diskIoRateAverage":
suggest = "disk_io_rate_average"
elif key == "diskIoRateAverageKbps":
suggest = "disk_io_rate_average_kbps"
elif key == "diskIoRateMax":
suggest = "disk_io_rate_max"
elif key == "diskIoRateMaxKbps":
suggest = "disk_io_rate_max_kbps"
elif key == "memoryAverage":
suggest = "memory_average"
elif key == "memoryAveragePercent":
suggest = "memory_average_percent"
elif key == "memoryMax":
suggest = "memory_max"
elif key == "memoryMaxPercent":
suggest = "memory_max_percent"
elif key == "networkThroughputAverage":
suggest = "network_throughput_average"
elif key == "networkThroughputAverageKbps":
suggest = "network_throughput_average_kbps"
elif key == "networkThroughputMax":
suggest = "network_throughput_max"
elif key == "networkThroughputMaxKbps":
suggest = "network_throughput_max_kbps"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VmUtilizationMetricsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VmUtilizationMetricsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VmUtilizationMetricsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu_average: int,
cpu_average_percent: int,
cpu_max: int,
cpu_max_percent: int,
disk_io_rate_average: str,
disk_io_rate_average_kbps: str,
disk_io_rate_max: str,
disk_io_rate_max_kbps: str,
memory_average: int,
memory_average_percent: int,
memory_max: int,
memory_max_percent: int,
network_throughput_average: str,
network_throughput_average_kbps: str,
network_throughput_max: str,
network_throughput_max_kbps: str):
"""
Utilization metrics values for a single VM.
:param int cpu_average: Average CPU usage, percent.
:param int cpu_average_percent: Average CPU usage, percent.
:param int cpu_max: Max CPU usage, percent.
:param int cpu_max_percent: Max CPU usage, percent.
:param str disk_io_rate_average: Average disk IO rate, in kilobytes per second.
:param str disk_io_rate_average_kbps: Average disk IO rate, in kilobytes per second.
:param str disk_io_rate_max: Max disk IO rate, in kilobytes per second.
:param str disk_io_rate_max_kbps: Max disk IO rate, in kilobytes per second.
:param int memory_average: Average memory usage, percent.
:param int memory_average_percent: Average memory usage, percent.
:param int memory_max: Max memory usage, percent.
:param int memory_max_percent: Max memory usage, percent.
:param str network_throughput_average: Average network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
:param str network_throughput_average_kbps: Average network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
:param str network_throughput_max: Max network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
:param str network_throughput_max_kbps: Max network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
"""
pulumi.set(__self__, "cpu_average", cpu_average)
pulumi.set(__self__, "cpu_average_percent", cpu_average_percent)
pulumi.set(__self__, "cpu_max", cpu_max)
pulumi.set(__self__, "cpu_max_percent", cpu_max_percent)
pulumi.set(__self__, "disk_io_rate_average", disk_io_rate_average)
pulumi.set(__self__, "disk_io_rate_average_kbps", disk_io_rate_average_kbps)
pulumi.set(__self__, "disk_io_rate_max", disk_io_rate_max)
pulumi.set(__self__, "disk_io_rate_max_kbps", disk_io_rate_max_kbps)
pulumi.set(__self__, "memory_average", memory_average)
pulumi.set(__self__, "memory_average_percent", memory_average_percent)
pulumi.set(__self__, "memory_max", memory_max)
pulumi.set(__self__, "memory_max_percent", memory_max_percent)
pulumi.set(__self__, "network_throughput_average", network_throughput_average)
pulumi.set(__self__, "network_throughput_average_kbps", network_throughput_average_kbps)
pulumi.set(__self__, "network_throughput_max", network_throughput_max)
pulumi.set(__self__, "network_throughput_max_kbps", network_throughput_max_kbps)
@property
@pulumi.getter(name="cpuAverage")
def cpu_average(self) -> int:
"""
Average CPU usage, percent.
"""
return pulumi.get(self, "cpu_average")
@property
@pulumi.getter(name="cpuAveragePercent")
def cpu_average_percent(self) -> int:
"""
Average CPU usage, percent.
"""
return pulumi.get(self, "cpu_average_percent")
@property
@pulumi.getter(name="cpuMax")
def cpu_max(self) -> int:
"""
Max CPU usage, percent.
"""
return pulumi.get(self, "cpu_max")
@property
@pulumi.getter(name="cpuMaxPercent")
def cpu_max_percent(self) -> int:
"""
Max CPU usage, percent.
"""
return pulumi.get(self, "cpu_max_percent")
@property
@pulumi.getter(name="diskIoRateAverage")
def disk_io_rate_average(self) -> str:
"""
Average disk IO rate, in kilobytes per second.
"""
return pulumi.get(self, "disk_io_rate_average")
@property
@pulumi.getter(name="diskIoRateAverageKbps")
def disk_io_rate_average_kbps(self) -> str:
"""
Average disk IO rate, in kilobytes per second.
"""
return pulumi.get(self, "disk_io_rate_average_kbps")
@property
@pulumi.getter(name="diskIoRateMax")
def disk_io_rate_max(self) -> str:
"""
Max disk IO rate, in kilobytes per second.
"""
return pulumi.get(self, "disk_io_rate_max")
@property
@pulumi.getter(name="diskIoRateMaxKbps")
def disk_io_rate_max_kbps(self) -> str:
"""
Max disk IO rate, in kilobytes per second.
"""
return pulumi.get(self, "disk_io_rate_max_kbps")
@property
@pulumi.getter(name="memoryAverage")
def memory_average(self) -> int:
"""
Average memory usage, percent.
"""
return pulumi.get(self, "memory_average")
@property
@pulumi.getter(name="memoryAveragePercent")
def memory_average_percent(self) -> int:
"""
Average memory usage, percent.
"""
return pulumi.get(self, "memory_average_percent")
@property
@pulumi.getter(name="memoryMax")
def memory_max(self) -> int:
"""
Max memory usage, percent.
"""
return pulumi.get(self, "memory_max")
@property
@pulumi.getter(name="memoryMaxPercent")
def memory_max_percent(self) -> int:
"""
Max memory usage, percent.
"""
return pulumi.get(self, "memory_max_percent")
@property
@pulumi.getter(name="networkThroughputAverage")
def network_throughput_average(self) -> str:
"""
Average network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
"""
return pulumi.get(self, "network_throughput_average")
@property
@pulumi.getter(name="networkThroughputAverageKbps")
def network_throughput_average_kbps(self) -> str:
"""
Average network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
"""
return pulumi.get(self, "network_throughput_average_kbps")
@property
@pulumi.getter(name="networkThroughputMax")
def network_throughput_max(self) -> str:
"""
Max network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
"""
return pulumi.get(self, "network_throughput_max")
@property
@pulumi.getter(name="networkThroughputMaxKbps")
def network_throughput_max_kbps(self) -> str:
"""
Max network throughput (combined transmit-rates and receive-rates), in kilobytes per second.
"""
return pulumi.get(self, "network_throughput_max_kbps")
@pulumi.output_type
class VmwareSourceDetailsResponse(dict):
"""
VmwareSourceDetails message describes a specific source details for the vmware source type.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "vcenterIp":
suggest = "vcenter_ip"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VmwareSourceDetailsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VmwareSourceDetailsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VmwareSourceDetailsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
password: str,
thumbprint: str,
username: str,
vcenter_ip: str):
"""
VmwareSourceDetails message describes a specific source details for the vmware source type.
:param str password: Input only. The credentials password. This is write only and can not be read in a GET operation.
:param str thumbprint: The thumbprint representing the certificate for the vcenter.
:param str username: The credentials username.
:param str vcenter_ip: The ip address of the vcenter this Source represents.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "thumbprint", thumbprint)
pulumi.set(__self__, "username", username)
pulumi.set(__self__, "vcenter_ip", vcenter_ip)
@property
@pulumi.getter
def password(self) -> str:
"""
Input only. The credentials password. This is write only and can not be read in a GET operation.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def thumbprint(self) -> str:
"""
The thumbprint representing the certificate for the vcenter.
"""
return pulumi.get(self, "thumbprint")
@property
@pulumi.getter
def username(self) -> str:
"""
The credentials username.
"""
return pulumi.get(self, "username")
@property
@pulumi.getter(name="vcenterIp")
def vcenter_ip(self) -> str:
"""
The ip address of the vcenter this Source represents.
"""
return pulumi.get(self, "vcenter_ip")
@pulumi.output_type
class VmwareVmDetailsResponse(dict):
"""
VmwareVmDetails describes a VM in vCenter.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bootOption":
suggest = "boot_option"
elif key == "committedStorage":
suggest = "committed_storage"
elif key == "committedStorageMb":
suggest = "committed_storage_mb"
elif key == "cpuCount":
suggest = "cpu_count"
elif key == "datacenterDescription":
suggest = "datacenter_description"
elif key == "datacenterId":
suggest = "datacenter_id"
elif key == "diskCount":
suggest = "disk_count"
elif key == "displayName":
suggest = "display_name"
elif key == "guestDescription":
suggest = "guest_description"
elif key == "memoryMb":
suggest = "memory_mb"
elif key == "powerState":
suggest = "power_state"
elif key == "vmId":
suggest = "vm_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VmwareVmDetailsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VmwareVmDetailsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VmwareVmDetailsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
boot_option: str,
committed_storage: str,
committed_storage_mb: str,
cpu_count: int,
datacenter_description: str,
datacenter_id: str,
disk_count: int,
display_name: str,
guest_description: str,
memory_mb: int,
power_state: str,
uuid: str,
vm_id: str):
"""
VmwareVmDetails describes a VM in vCenter.
:param str boot_option: The VM Boot Option.
:param str committed_storage: The total size of the storage allocated to the VM in MB.
:param str committed_storage_mb: The total size of the storage allocated to the VM in MB.
:param int cpu_count: The number of cpus in the VM.
:param str datacenter_description: The descriptive name of the vCenter's datacenter this VM is contained in.
:param str datacenter_id: The id of the vCenter's datacenter this VM is contained in.
:param int disk_count: The number of disks the VM has.
:param str display_name: The display name of the VM. Note that this is not necessarily unique.
:param str guest_description: The VM's OS. See for example https://pubs.vmware.com/vi-sdk/visdk250/ReferenceGuide/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html for types of strings this might hold.
:param int memory_mb: The size of the memory of the VM in MB.
:param str power_state: The power state of the VM at the moment list was taken.
:param str uuid: The unique identifier of the VM in vCenter.
:param str vm_id: The VM's id in the source (note that this is not the MigratingVm's id). This is the moref id of the VM.
"""
pulumi.set(__self__, "boot_option", boot_option)
pulumi.set(__self__, "committed_storage", committed_storage)
pulumi.set(__self__, "committed_storage_mb", committed_storage_mb)
pulumi.set(__self__, "cpu_count", cpu_count)
pulumi.set(__self__, "datacenter_description", datacenter_description)
pulumi.set(__self__, "datacenter_id", datacenter_id)
pulumi.set(__self__, "disk_count", disk_count)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "guest_description", guest_description)
pulumi.set(__self__, "memory_mb", memory_mb)
pulumi.set(__self__, "power_state", power_state)
pulumi.set(__self__, "uuid", uuid)
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter(name="bootOption")
def boot_option(self) -> str:
"""
The VM Boot Option.
"""
return pulumi.get(self, "boot_option")
@property
@pulumi.getter(name="committedStorage")
def committed_storage(self) -> str:
"""
The total size of the storage allocated to the VM in MB.
"""
return pulumi.get(self, "committed_storage")
@property
@pulumi.getter(name="committedStorageMb")
def committed_storage_mb(self) -> str:
"""
The total size of the storage allocated to the VM in MB.
"""
return pulumi.get(self, "committed_storage_mb")
@property
@pulumi.getter(name="cpuCount")
def cpu_count(self) -> int:
"""
The number of cpus in the VM.
"""
return pulumi.get(self, "cpu_count")
@property
@pulumi.getter(name="datacenterDescription")
def datacenter_description(self) -> str:
"""
The descriptive name of the vCenter's datacenter this VM is contained in.
"""
return pulumi.get(self, "datacenter_description")
@property
@pulumi.getter(name="datacenterId")
def datacenter_id(self) -> str:
"""
The id of the vCenter's datacenter this VM is contained in.
"""
return pulumi.get(self, "datacenter_id")
@property
@pulumi.getter(name="diskCount")
def disk_count(self) -> int:
"""
The number of disks the VM has.
"""
return pulumi.get(self, "disk_count")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
The display name of the VM. Note that this is not necessarily unique.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="guestDescription")
def guest_description(self) -> str:
"""
The VM's OS. See for example https://pubs.vmware.com/vi-sdk/visdk250/ReferenceGuide/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html for types of strings this might hold.
"""
return pulumi.get(self, "guest_description")
@property
@pulumi.getter(name="memoryMb")
def memory_mb(self) -> int:
"""
The size of the memory of the VM in MB.
"""
return pulumi.get(self, "memory_mb")
@property
@pulumi.getter(name="powerState")
def power_state(self) -> str:
"""
The power state of the VM at the moment list was taken.
"""
return pulumi.get(self, "power_state")
@property
@pulumi.getter
def uuid(self) -> str:
"""
The unique identifier of the VM in vCenter.
"""
return pulumi.get(self, "uuid")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> str:
"""
The VM's id in the source (note that this is not the MigratingVm's id). This is the moref id of the VM.
"""
return pulumi.get(self, "vm_id")
| 38.542045
| 653
| 0.637306
| 66,571
| 0.981381
| 0
| 0
| 66,891
| 0.986098
| 0
| 0
| 34,755
| 0.512354
|
43ed227cd2674901d74eb5739cfb902ec959b334
| 6,300
|
py
|
Python
|
tests/test_utils.py
|
yoshikyoto/django-filter-0.14
|
b5166e93f4c0fec5f5e8a73b6d1e8e0550b3929b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_utils.py
|
yoshikyoto/django-filter-0.14
|
b5166e93f4c0fec5f5e8a73b6d1e8e0550b3929b
|
[
"BSD-3-Clause"
] | 1
|
2016-08-23T18:20:47.000Z
|
2016-08-23T19:16:07.000Z
|
tests/test_utils.py
|
yoshikyoto/django-filter-0.14
|
b5166e93f4c0fec5f5e8a73b6d1e8e0550b3929b
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import django
from django.test import TestCase
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django_filters.utils import get_model_field, resolve_field
from django_filters.exceptions import FieldLookupError
from .models import User
from .models import Article
from .models import Book
from .models import HiredWorker
from .models import Business
class GetModelFieldTests(TestCase):
def test_non_existent_field(self):
result = get_model_field(User, 'unknown__name')
self.assertIsNone(result)
def test_related_field(self):
result = get_model_field(Business, 'hiredworker__worker')
self.assertEqual(result, HiredWorker._meta.get_field('worker'))
class ResolveFieldTests(TestCase):
def test_resolve_plain_lookups(self):
"""
Check that the standard query terms can be correctly resolved.
eg, an 'EXACT' lookup on a user's username
"""
model_field = User._meta.get_field('username')
lookups = model_field.class_lookups.keys()
# This is simple - the final ouput of an untransformed field is itself.
# The lookups are the default lookups registered to the class.
for term in lookups:
field, lookup = resolve_field(model_field, term)
self.assertIsInstance(field, models.CharField)
self.assertEqual(lookup, term)
def test_resolve_forward_related_lookups(self):
"""
Check that lookups can be resolved for related fields
in the forwards direction.
"""
lookups = ['exact', 'gte', 'gt', 'lte', 'lt', 'in', 'isnull', ]
# ForeignKey
model_field = Article._meta.get_field('author')
for term in lookups:
field, lookup = resolve_field(model_field, term)
self.assertIsInstance(field, models.ForeignKey)
self.assertEqual(lookup, term)
# ManyToManyField
model_field = User._meta.get_field('favorite_books')
for term in lookups:
field, lookup = resolve_field(model_field, term)
self.assertIsInstance(field, models.ManyToManyField)
self.assertEqual(lookup, term)
@unittest.skipIf(django.VERSION < (1, 9), "version does not reverse lookups")
def test_resolve_reverse_related_lookups(self):
"""
Check that lookups can be resolved for related fields
in the reverse direction.
"""
lookups = ['exact', 'gte', 'gt', 'lte', 'lt', 'in', 'isnull', ]
# ManyToOneRel
model_field = User._meta.get_field('article')
for term in lookups:
field, lookup = resolve_field(model_field, term)
self.assertIsInstance(field, models.ManyToOneRel)
self.assertEqual(lookup, term)
# ManyToManyRel
model_field = Book._meta.get_field('lovers')
for term in lookups:
field, lookup = resolve_field(model_field, term)
self.assertIsInstance(field, models.ManyToManyRel)
self.assertEqual(lookup, term)
@unittest.skipIf(django.VERSION < (1, 9), "version does not support transformed lookup expressions")
def test_resolve_transformed_lookups(self):
"""
Check that chained field transforms are correctly resolved.
eg, a 'date__year__gte' lookup on an article's 'published' timestamp.
"""
# Use a DateTimeField, so we can check multiple transforms.
# eg, date__year__gte
model_field = Article._meta.get_field('published')
standard_lookups = [
'exact',
'iexact',
'gte',
'gt',
'lte',
'lt',
]
date_lookups = [
'year',
'month',
'day',
'week_day',
]
datetime_lookups = date_lookups + [
'hour',
'minute',
'second',
]
# ex: 'date__gt'
for lookup in standard_lookups:
field, resolved_lookup = resolve_field(model_field, LOOKUP_SEP.join(['date', lookup]))
self.assertIsInstance(field, models.DateField)
self.assertEqual(resolved_lookup, lookup)
# ex: 'year__iexact'
for part in datetime_lookups:
for lookup in standard_lookups:
field, resolved_lookup = resolve_field(model_field, LOOKUP_SEP.join([part, lookup]))
self.assertIsInstance(field, models.IntegerField)
self.assertEqual(resolved_lookup, lookup)
# ex: 'date__year__lte'
for part in date_lookups:
for lookup in standard_lookups:
field, resolved_lookup = resolve_field(model_field, LOOKUP_SEP.join(['date', part, lookup]))
self.assertIsInstance(field, models.IntegerField)
self.assertEqual(resolved_lookup, lookup)
@unittest.skipIf(django.VERSION < (1, 9), "version does not support transformed lookup expressions")
def test_resolve_implicit_exact_lookup(self):
# Use a DateTimeField, so we can check multiple transforms.
# eg, date__year__gte
model_field = Article._meta.get_field('published')
field, lookup = resolve_field(model_field, 'date')
self.assertIsInstance(field, models.DateField)
self.assertEqual(lookup, 'exact')
field, lookup = resolve_field(model_field, 'date__year')
self.assertIsInstance(field, models.IntegerField)
self.assertEqual(lookup, 'exact')
def test_invalid_lookup_expression(self):
model_field = Article._meta.get_field('published')
with self.assertRaises(FieldLookupError) as context:
resolve_field(model_field, 'invalid_lookup')
exc = str(context.exception)
self.assertIn(str(model_field), exc)
self.assertIn('invalid_lookup', exc)
def test_invalid_transformed_lookup_expression(self):
model_field = Article._meta.get_field('published')
with self.assertRaises(FieldLookupError) as context:
resolve_field(model_field, 'date__invalid_lookup')
exc = str(context.exception)
self.assertIn(str(model_field), exc)
self.assertIn('date__invalid_lookup', exc)
| 36
| 108
| 0.644762
| 5,890
| 0.934921
| 0
| 0
| 3,328
| 0.528254
| 0
| 0
| 1,495
| 0.237302
|
43ee04853e52a2ff347eaf6785c0c115ae6ad8aa
| 164
|
py
|
Python
|
agc/agc007/agc007a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
agc/agc007/agc007a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
agc/agc007/agc007a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
H, W = map(int, input().split())
A = [input() for _ in range(H)]
if H + W - 1 == sum(a.count('#') for a in A):
print('Possible')
else:
print('Impossible')
| 20.5
| 45
| 0.542683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 25
| 0.152439
|
43f06ebbb7637e1e6c0f53bef04ad021c74daf38
| 2,188
|
py
|
Python
|
relfs/relfs/fuse/mount_root.py
|
matus-chochlik/various
|
2a9f5eddd964213f7d1e1ce8328e2e0b2a8e998b
|
[
"MIT"
] | 1
|
2020-10-25T12:28:50.000Z
|
2020-10-25T12:28:50.000Z
|
relfs/relfs/fuse/mount_root.py
|
matus-chochlik/various
|
2a9f5eddd964213f7d1e1ce8328e2e0b2a8e998b
|
[
"MIT"
] | null | null | null |
relfs/relfs/fuse/mount_root.py
|
matus-chochlik/various
|
2a9f5eddd964213f7d1e1ce8328e2e0b2a8e998b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
#------------------------------------------------------------------------------#
import os
import time
import fuse
import errno
from .item import RelFuseItem
from .static_dir import StaticDirectory
#------------------------------------------------------------------------------#
class MountRoot(RelFuseItem):
# --------------------------------------------------------------------------
def __init__(self):
RelFuseItem.__init__(self)
self._mount_time = time.time()
self._relfs_dir = StaticDirectory()
self._repos_backstage = self._relfs_dir.add("repos", StaticDirectory())
self._repos = dict()
# --------------------------------------------------------------------------
def add_repo_root(self, name, item):
self._repos[name] = item
# --------------------------------------------------------------------------
def repos_backstage(self):
return self._repos_backstage
# --------------------------------------------------------------------------
def find_item(self, split_path):
if not split_path or split_path == ["."]:
return self
if split_path[0] == ".relfs":
return self._relfs_dir.find_item(split_path[1:])
try:
repo = self._repos[split_path[0]]
return repo.find_item(split_path[1:])
except KeyError:
pass
# --------------------------------------------------------------------------
def readdir(self, fh):
yield ".."
yield "."
yield ".relfs"
for name in self._repos:
yield name
# --------------------------------------------------------------------------
def _modify_time(self):
return self._mount_time
# --------------------------------------------------------------------------
def access(self, mode):
if mode & os.X_OK:
return 0
return RelFuseItem.access(self, mode)
# --------------------------------------------------------------------------
def _get_mode(self):
return 0o40550
#------------------------------------------------------------------------------#
| 35.290323
| 80
| 0.359232
| 1,811
| 0.827697
| 138
| 0.063071
| 0
| 0
| 0
| 0
| 895
| 0.409049
|
43f1172d32150bd985177a2463faa8dd3ab137f9
| 3,935
|
py
|
Python
|
clip_onnx/clip_converter.py
|
EmbarkStudios/CLIP-ONNX
|
52f4ce4d603722cb934d27b570f7523f26f1ef7f
|
[
"MIT"
] | null | null | null |
clip_onnx/clip_converter.py
|
EmbarkStudios/CLIP-ONNX
|
52f4ce4d603722cb934d27b570f7523f26f1ef7f
|
[
"MIT"
] | null | null | null |
clip_onnx/clip_converter.py
|
EmbarkStudios/CLIP-ONNX
|
52f4ce4d603722cb934d27b570f7523f26f1ef7f
|
[
"MIT"
] | null | null | null |
import torch
import onnx
from torch import nn
from onnxruntime.quantization import quantize_qat, quantize_dynamic, QuantType
from .utils import Textual, DEFAULT_EXPORT
class clip_converter(nn.Module):
def __init__(self, model, visual_path: str = "clip_visual.onnx",
textual_path: str = "clip_textual.onnx"):
super().__init__()
self.model = model
self.visual_path = visual_path
self.textual_path = textual_path
self.visual_flag = False
self.textual_flag = False
self.logit_scale = self.model.logit_scale.exp()
self.model.eval()
for x in self.model.parameters():
x.requires_grad = False
def quantization(self, mode: str = "dynamic"):
assert mode in ["dynamic"]
if mode == "dynamic":
model_quant_visual = f"{self.visual_path}.quant"
quantize_dynamic(self.visual_path,
model_quant_visual,
weight_type=QuantType.QUInt8)
self.visual_path = model_quant_visual
model_quant_textual = f"{self.textual_path}.quant"
quantize_dynamic(self.textual_path,
model_quant_textual,
weight_type=QuantType.QUInt8)
self.textual_path = model_quant_textual
def torch_export(self, model, dummy_input, path: str, export_params=DEFAULT_EXPORT):
torch.onnx.export(model, dummy_input, path, **export_params)
def onnx_checker(self, path: str):
model = onnx.load(path)
onnx.checker.check_model(model)
del model
def convert_visual(self, dummy_input, wrapper=lambda x: x,
export_params=DEFAULT_EXPORT):
visual = wrapper(self.model.visual)
self.torch_export(visual, dummy_input, self.visual_path,
export_params=export_params)
self.onnx_checker(self.visual_path)
def convert_textual(self, dummy_input, wrapper=Textual,
export_params=DEFAULT_EXPORT):
textual = wrapper(self.model)
self.torch_export(textual, dummy_input, self.textual_path,
export_params=export_params)
self.onnx_checker(self.textual_path)
def convert2onnx(self, visual_input=None, textual_input=None, verbose=True,
visual_wrapper=lambda x: x,
textual_wrapper=Textual,
visual_export_params=DEFAULT_EXPORT,
textual_export_params=DEFAULT_EXPORT):
isinstance_visual_input = isinstance(visual_input, (torch.Tensor))
isinstance_textual_input = isinstance(textual_input, (torch.Tensor))
if (not isinstance_visual_input) and (not isinstance_textual_input):
raise Exception("[CLIP ONNX] Please, choose a dummy input")
elif not isinstance_visual_input:
print("[CLIP ONNX] Convert only textual model")
elif not isinstance_textual_input:
print("[CLIP ONNX] Convert only visual model")
if isinstance_visual_input:
self.visual_flag = True
if verbose:
print("[CLIP ONNX] Start convert visual model")
self.convert_visual(visual_input, visual_wrapper, visual_export_params)
if verbose:
print("[CLIP ONNX] Start check visual model")
self.onnx_checker(self.visual_path)
if isinstance_textual_input:
self.textual_flag = True
if verbose:
print("[CLIP ONNX] Start convert textual model")
self.convert_textual(textual_input, textual_wrapper, textual_export_params)
if verbose:
print("[CLIP ONNX] Start check textual model")
self.onnx_checker(self.textual_path)
if verbose:
print("[CLIP ONNX] Models converts successfully")
| 41.421053
| 88
| 0.629225
| 3,764
| 0.956544
| 0
| 0
| 0
| 0
| 0
| 0
| 440
| 0.111817
|
43f1186dd806bfa7da9c44b01e37a130943f2f23
| 6,493
|
py
|
Python
|
electrum/gui/kivy/uix/dialogs/add_token_dialog.py
|
VIPSTARCOIN-electrum/electrum-vips
|
ebe93c09717ea44c049fcb9c3f366af64dc87b37
|
[
"MIT"
] | 2
|
2019-07-17T23:09:42.000Z
|
2019-10-25T05:44:04.000Z
|
electrum/gui/kivy/uix/dialogs/add_token_dialog.py
|
VIPSTARCOIN-electrum/electrum-vips
|
ebe93c09717ea44c049fcb9c3f366af64dc87b37
|
[
"MIT"
] | null | null | null |
electrum/gui/kivy/uix/dialogs/add_token_dialog.py
|
VIPSTARCOIN-electrum/electrum-vips
|
ebe93c09717ea44c049fcb9c3f366af64dc87b37
|
[
"MIT"
] | 3
|
2019-08-10T15:14:29.000Z
|
2021-05-26T20:02:02.000Z
|
from datetime import datetime
from kivy.app import App
from kivy.factory import Factory
from kivy.lang import Builder
from kivy.clock import Clock
from kivy.uix.button import Button
from electrum.gui.kivy.i18n import _
from electrum.bitcoin import Token
from electrum.util import parse_token_URI, InvalidTokenURI
from .choice_dialog import ChoiceDialog
Builder.load_string('''
#:import partial functools.partial
#:import _ electrum.gui.kivy.i18n._
<AddTokenDialog>
id: popup
title: _('Add Token')
contract_addr: ''
BoxLayout:
orientation: 'vertical'
BoxLabel:
text: _('Contract Address')
SendReceiveBlueBottom:
size_hint: 1, None
height: self.minimum_height
BlueButton:
text: popup.contract_addr
shorten: True
on_release: Clock.schedule_once(lambda dt: app.show_info(_('Copy and paste the contract address using the Paste button, or use the camera to scan a QR code.')))
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
text: _('Paste')
on_release: popup.do_paste()
IconButton:
id: qr
size_hint: 0.6, 1
on_release: Clock.schedule_once(lambda dt: app.scan_qr(on_complete=popup.on_qr))
icon: 'atlas://electrum/gui/kivy/theming/light/camera'
AddTokenItem:
my_addr: app.wallet.get_addresses_sort_by_balance()[0]
title: _('My Address:')
description: str(self.my_addr)
action: partial(root.address_select_dialog, self)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: popup.dismiss()
Button:
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.add_token()
popup.dismiss()
''')
class AddTokenDialog(Factory.Popup):
def __init__(self, app):
Factory.Popup.__init__(self)
self.app = app
self.wallet = self.app.wallet
self.addresses = self.wallet.get_addresses_sort_by_balance()
self.my_address = self.wallet.get_addresses_sort_by_balance()[0]
self._address_select_dialog = None
self.contract_addr = ''
def address_select_dialog(self, item, dt):
shorten_addresses = []
for address in self.addresses:
shorten_address = ''
shorten_address = address[0:7] + '.....' + address[-7:]
shorten_addresses.append(shorten_address)
address_number = self.addresses.index(self.my_address)
if self._address_select_dialog is None:
def cb(addr):
return_number = shorten_addresses.index(addr)
my_address = self.addresses[return_number]
item.my_addr = my_address
self.my_address = my_address
self._address_select_dialog = ChoiceDialog(_('My Address'), shorten_addresses, shorten_addresses[address_number], cb)
self._address_select_dialog.open()
def add_token(self):
contract_addr = self.contract_addr
bind_addr = self.my_address
if contract_addr == '':
self.app.show_info(_("Contract Address is empty"))
return
try:
r = self.app.network.run_from_another_thread(self.app.network.get_token_info(contract_addr))
name = r.get('name')
decimals = r.get('decimals')
symbol = r.get('symbol')
if not name or not symbol or not isinstance(decimals, int) or decimals is None:
self.app.show_info(_("token info not valid: {} {} {}").format(name, symbol, decimals))
return
token = Token(contract_addr, bind_addr, name, symbol, decimals, 0)
self.app.set_token(token)
except BaseException as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
self.app.show_info(e)
def search_token(self, contract_addr):
try:
token_data = self.app.network.run_from_another_thread(self.app.network.get_token_info(contract_addr))
except:
try:
token_data = self.app.network.run_from_another_thread(self.app.network.get_token_info(contract_addr))
except:
token_data = None
if token_data:
return True
return False
def do_paste(self):
from electrum.bitcoin import base_decode, is_address
data = self.app._clipboard.paste().strip()
if not data:
self.app.show_info(_("Clipboard is empty"))
return
if is_address(data) or data.startswith('vipstarcoin:'):
self.app.show_info(_("QR data is bitcoin URI."))
return
self.set_URI(data)
def set_URI(self, text):
if not self.app.wallet:
self.payment_request_queued = text
return
try:
uri = parse_token_URI(text)
except InvalidTokenURI as e:
self.app.show_error(_("Error parsing URI") + f":\n{e}")
return
address = uri.get('contract_addr', '')
if not self.search_token(address):
self.app.show_error(_("token not found"))
self.contract_addr = ''
return
self.contract_addr = address
def on_qr(self, data):
from electrum.bitcoin import base_decode, is_address
data = data.strip()
if is_address(data) or data.startswith('vipstarcoin:'):
self.app.show_info(_("QR data is bitcoin URI."))
return
if self.search_token(data) or data.startswith('vipstoken:'):
self.set_URI(data)
return
# try to decode transaction
from electrum.transaction import Transaction
from electrum.util import bh2u
try:
text = bh2u(base_decode(data, None, base=43))
tx = Transaction(text)
tx.deserialize()
except:
tx = None
if tx:
self.app.show_info(_("QR data is transaction."))
return
# show error
self.app.show_error(_("Unable to decode QR data"))
| 36.273743
| 176
| 0.588788
| 4,352
| 0.67026
| 0
| 0
| 0
| 0
| 0
| 0
| 2,130
| 0.328046
|
43f27c688e68efd3839a07cc972cfa2dd88cc2cc
| 17,625
|
py
|
Python
|
statey/syms/encoders.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | 4
|
2021-02-16T19:34:38.000Z
|
2022-01-31T16:44:14.000Z
|
statey/syms/encoders.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | null | null | null |
statey/syms/encoders.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | null | null | null |
import abc
import base64
from datetime import date, datetime
import dataclasses as dc
from typing import Type as PyType, Any, Dict, Optional
import marshmallow as ma
import pickle
import pluggy
import statey as st
from statey.syms import types, utils, Object
class Encoder(abc.ABC):
"""
An encoder encodes data of some with possibly native types info some format
"""
type: types.Type
@abc.abstractmethod
def encode(self, value: Any) -> Any:
"""
Given some _non-validated_ value, convert it to a serializable value
"""
raise NotImplementedError
@abc.abstractmethod
def decode(self, value: Any) -> Any:
"""
Given a freshly deserialized dictionary, potentially apply some post-processing or wrap
it in a native type
"""
raise NotImplementedError
class EncoderHooks:
"""
Hooks to wrap encoder functionality
"""
@st.hookspec(firstresult=True)
def encode(self, value: Any) -> Any:
"""
Optionally apply some logic to encode the given value. return None if the given value is not handled.
"""
@st.hookspec(firstresult=True)
def decode(self, value: Any) -> Any:
"""
Opposite of the encode() hook
"""
def create_encoder_plugin_manager():
"""
Factory function to create the default plugin manager for encoders
"""
pm = st.create_plugin_manager()
pm.add_hookspecs(EncoderHooks)
return pm
class HookHandlingEncoder(Encoder):
"""
Handles hooks properly in encode() and decode()
"""
def encode(self, value: Any) -> Any:
result = self.pm.hook.encode(value=value)
return value if result is None else result
def decode(self, value: Any) -> Any:
result = self.pm.hook.decode(value=value)
return value if result is None else result
@dc.dataclass(frozen=True)
class DefaultEncoder(HookHandlingEncoder, utils.Cloneable):
"""
The default encoder just handles hooks properly, doesn't do any actual encoding
"""
type: types.Type
pm: pluggy.PluginManager = dc.field(
init=False,
default_factory=create_encoder_plugin_manager,
compare=False,
repr=False,
)
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
"""
The basic encoder behavior just calls hooks, but we should pass through plugins too.
"""
if serializable:
return None
inst = cls(type)
for plugin in type.meta.get("plugins", []):
inst.pm.register(plugin)
return inst
@dc.dataclass(frozen=True)
class MarshmallowEncoder(HookHandlingEncoder):
"""
Encodeable helper to get all functionality from a field factory
"""
type: types.Type
registry: "Registry"
pm: pluggy.PluginManager = dc.field(
init=False,
compare=False,
repr=False,
default_factory=create_encoder_plugin_manager,
)
@abc.abstractmethod
def base_marshmallow_field(self, encoding: bool) -> ma.fields.Field:
"""
Return the marshmallow field for this type
"""
raise NotImplementedError
def marshmallow_field(self, encoding: bool) -> ma.fields.Field:
kws = self._marshmallow_field_kws(self.type.nullable, self.type.meta)
base = self.base_marshmallow_field(encoding)
return utils.PossiblySymbolicField(base, self.type, self.registry, **kws)
def encode(self, data: Any) -> Any:
# Allow pre-encoding hooks
data = super().encode(data)
field = self.marshmallow_field(True)
with utils.reraise_ma_validation_error():
# This does the validation
data = field.deserialize(data)
# This allows us to leverage marshmallow to do things like encoding
# dates as strings w/ symmetrical encoding/decoding logic
return field.serialize("tmp", {"tmp": data})
def decode(self, data: Any) -> Any:
with utils.reraise_ma_validation_error():
value = self.marshmallow_field(False).deserialize(data)
# Allow post-decoding hooks
return super().decode(value)
@staticmethod
def _marshmallow_field_kws(nullable: bool, meta: Dict[str, Any]) -> Dict[str, Any]:
default = meta.get("default", utils.MISSING)
validate = meta.get("validator", utils.MISSING)
if nullable:
kws = {
"required": False,
"default": None if utils.is_missing(default) else default,
"missing": None if utils.is_missing(default) else default,
"allow_none": True,
}
elif utils.is_missing(default):
kws = {"required": True}
else:
kws = {"missing": default, "default": default}
if not utils.is_missing(validate):
kws["validate"] = validate
return kws
class MarshmallowValueEncoder(MarshmallowEncoder):
"""
Simple marshmallow encoder for value types
"""
base_field: ma.fields.Field
type_cls: PyType[types.Type]
serializable: bool
def base_marshmallow_field(self, encoding: bool) -> ma.fields.Field:
return self.base_field
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if serializable and not cls.serializable:
return None
if isinstance(type, cls.type_cls):
instance = cls(type, registry)
for plugin in type.meta.get("plugins", []):
instance.pm.register(plugin)
return instance
return None
@dc.dataclass(frozen=True)
class IntegerEncoder(MarshmallowValueEncoder):
type_cls = types.IntegerType
base_field = ma.fields.Int()
serializable = True
@dc.dataclass(frozen=True)
class FloatEncoder(MarshmallowValueEncoder):
type_cls = types.FloatType
base_field = ma.fields.Float()
serializable = True
@dc.dataclass(frozen=True, repr=False)
class BooleanEncoder(MarshmallowValueEncoder):
type_cls = types.BooleanType
base_field = ma.fields.Bool()
serializable = True
@dc.dataclass(frozen=True, repr=False)
class StringEncoder(MarshmallowValueEncoder):
type_cls = types.StringType
base_field = ma.fields.Str()
serializable = True
class DateLikeFuzzyDeserialize:
""""""
def _deserialize(self, value, attr, data, **kwargs):
error = None
try:
return super()._deserialize(value, attr, data, **kwargs)
except ma.ValidationError as err:
error = err
fmt = self.format
try:
for new_fmt, func in self.DESERIALIZATION_FUNCS.items():
self.format = new_fmt
try:
return super()._deserialize(value, attr, data, **kwargs)
except ma.ValidationError:
pass
finally:
self.format = fmt
raise error
class DateField(DateLikeFuzzyDeserialize, ma.fields.Date):
""""""
def from_date(value: Any) -> date:
if isinstance(value, date):
return value
raise ma.ValidationError("Not a valid date.")
def from_datetime(value: Any) -> date:
if isinstance(value, datetime):
return value.date()
raise ma.ValidationError("Not a valid date.")
DESERIALIZATION_FUNCS = {
**ma.fields.Date.DESERIALIZATION_FUNCS,
"date": from_date,
"datetime": from_datetime,
}
class DateTimeField(DateLikeFuzzyDeserialize, ma.fields.DateTime):
""""""
def from_datetime(value: Any) -> datetime:
if isinstance(value, datetime):
return value
raise ma.ValidationError("Not a valid datetime.")
def from_date(value: Any) -> datetime:
if isinstance(value, date):
return datetime(value.year, value.month, value.day)
raise ma.ValidationError("Not a valid datetime.")
DESERIALIZATION_FUNCS = {
**ma.fields.DateTime.DESERIALIZATION_FUNCS,
"datetime": from_datetime,
"date": from_date,
}
@dc.dataclass(frozen=True, repr=False)
class DateEncoder(MarshmallowValueEncoder):
type_cls = types.DateType
base_field = DateField()
serializable = True
@dc.dataclass(frozen=True, repr=False)
class DateTimeEncoder(MarshmallowValueEncoder):
type_cls = types.DateTimeType
base_field = DateTimeField()
serializable = True
@dc.dataclass(frozen=True, repr=False)
class ArrayEncoder(MarshmallowEncoder):
"""
An array with some element type
"""
element_encoder: Encoder
def base_marshmallow_field(self, encoding: bool) -> ma.fields.Field:
kws = self._marshmallow_field_kws(
self.element_encoder.type.nullable, self.element_encoder.type.meta
)
if encoding:
kws["serialize"] = lambda x: x
kws["deserialize"] = self.element_encoder.encode
else:
kws["serialize"] = lambda x: x
kws["deserialize"] = self.element_encoder.decode
element_field = utils.SingleValueFunction(**kws)
return ma.fields.List(element_field)
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if not isinstance(type, types.ArrayType):
return None
element_encoder = registry.get_encoder(type.element_type, serializable)
instance = cls(type, registry, element_encoder)
for plugin in type.meta.get("plugins", []):
instance.pm.register(plugin)
return instance
@dc.dataclass(frozen=True, repr=False)
class StructEncoder(MarshmallowEncoder):
field_encoders: Dict[str, Encoder]
def base_marshmallow_field(self, encoding: bool) -> ma.fields.Field:
return ma.fields.Nested(self.marshmallow_schema(encoding))
def marshmallow_schema(self, encoding: bool) -> ma.Schema:
fields = {}
for name, encoder in self.field_encoders.items():
kws = self._marshmallow_field_kws(encoder.type.nullable, encoder.type.meta)
if encoding:
kws["serialize"] = lambda x: x
kws["deserialize"] = encoder.encode
else:
kws["serialize"] = lambda x: x
kws["deserialize"] = encoder.decode
fields[name] = utils.SingleValueFunction(**kws)
return type("StructSchema", (ma.Schema,), fields)()
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if not isinstance(type, types.StructType):
return None
encoders = {}
for field in type.fields:
encoders[field.name] = registry.get_encoder(field.type, serializable)
instance = cls(type, registry, encoders)
for plugin in type.meta.get("plugins", []):
instance.pm.register(plugin)
return instance
@dc.dataclass(frozen=True)
class NativeFunctionEncoder(StructEncoder):
"""
Encoder for native python functions
"""
module: Any = pickle
def encode(self, value: Any) -> Any:
if isinstance(value, Object) or value is None:
return super().encode(value)
serialized_bytes = self.module.dumps(value.func)
converted = {
"serialized": base64.b64encode(serialized_bytes),
"name": value.name,
}
return super().encode(converted)
def decode(self, value: Any) -> Any:
from statey.syms import func
value = super().decode(value)
if isinstance(value, Object) or value is None:
return value
function_ob = self.module.loads(base64.b64decode(value["serialized"]))
return func.NativeFunction(self.type, function_ob, value["name"])
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if not isinstance(type, types.NativeFunctionType):
return None
as_struct = types.StructType(type.fields, type.nullable, type.meta)
struct_encoder = registry.get_encoder(as_struct, serializable)
return cls(type, registry, struct_encoder.field_encoders)
@dc.dataclass(frozen=True, repr=False)
class MapEncoder(MarshmallowEncoder):
"""
An array with some element type
"""
key_encoder: Encoder
value_encoder: Encoder
def base_marshmallow_field(self, encoding: bool) -> ma.fields.Field:
key_kws = self._marshmallow_field_kws(
self.key_encoder.type.nullable, self.key_encoder.type.meta
)
if encoding:
key_kws["serialize"] = lambda x: x
key_kws["deserialize"] = self.key_encoder.encode
else:
key_kws["serialize"] = lambda x: x
key_kws["deserialize"] = self.key_encoder.decode
key_field = utils.SingleValueFunction(**key_kws)
value_kws = self._marshmallow_field_kws(
self.value_encoder.type.nullable, self.value_encoder.type.meta
)
if encoding:
value_kws["serialize"] = lambda x: x
value_kws["deserialize"] = self.value_encoder.encode
else:
value_kws["serialize"] = lambda x: x
value_kws["deserialize"] = self.value_encoder.decode
value_field = utils.SingleValueFunction(**value_kws)
return ma.fields.Dict(keys=key_field, values=value_field)
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if not isinstance(type, types.MapType):
return None
key_encoder = registry.get_encoder(type.key_type, serializable)
value_encoder = registry.get_encoder(type.value_type, serializable)
instance = cls(type, registry, key_encoder, value_encoder)
for plugin in type.meta.get("plugins", []):
instance.pm.register(plugin)
return instance
@dc.dataclass(frozen=True)
class TypeEncoder(HookHandlingEncoder):
"""
encodes a types.TypeType
"""
type: types.Type
registry: "Registry"
pm: pluggy.PluginManager = dc.field(
init=False,
compare=False,
repr=False,
default_factory=create_encoder_plugin_manager,
)
def encode(self, value: Any) -> Any:
super_encoded = super().encode(value)
if value is None:
if self.type.nullable:
return None
raise st.exc.InputValidationError({"_schema": ["Invalid input type."]})
if isinstance(value, dict):
return value
try:
type_serializer = self.registry.get_type_serializer(value)
except st.exc.NoTypeSerializerFound as err:
raise st.exc.InputValidationError(
{"_schema": ["Unable to find type serializer."]}
)
return type_serializer.serialize(value)
def decode(self, value: Any) -> Any:
if value is None:
if self.type.nullable:
return super().decode(value)
raise st.exc.InputValidationError({"_schema": ["Invalid input type."]})
if isinstance(value, types.Type):
return super().decode(value)
try:
type_serializer = self.registry.get_type_serializer_from_data(value)
except st.exc.NoTypeSerializerFound as err:
raise st.exc.InputValidationError(
{"_schema": ["Unable to find type serializer."]}
)
typ = type_serializer.deserialize(value)
return super().decode(typ)
@classmethod
@st.hookimpl
def get_encoder(
cls, type: types.Type, registry: "Registry", serializable: bool
) -> Encoder:
if not isinstance(type, types.TypeType):
return None
instance = cls(type, registry)
for plugin in type.meta.get("plugins", []):
instance.pm.register(plugin)
return instance
ENCODER_CLASSES = [
DefaultEncoder,
IntegerEncoder,
FloatEncoder,
BooleanEncoder,
StringEncoder,
ArrayEncoder,
StructEncoder,
NativeFunctionEncoder,
MapEncoder,
TypeEncoder,
DateEncoder,
DateTimeEncoder,
]
# We'll prefer a better pickling module if we have one.
try:
import dill
except ImportError:
import warnings
warnings.warn("Dill is not installed.", RuntimeWarning)
else:
@dc.dataclass(frozen=True)
class DillFunctionEncoder(NativeFunctionEncoder):
"""
dill-based python function encoder
"""
module: Any = dill
ENCODER_CLASSES.append(DillFunctionEncoder)
try:
import cloudpickle
except ImportError:
import warnings
warnings.warn("Cloudpickle is not installed.", RuntimeWarning)
else:
@dc.dataclass(frozen=True)
class CloudPickleFunctionEncoder(NativeFunctionEncoder):
"""
cloudpickle-based python function encoder
"""
module: Any = cloudpickle
ENCODER_CLASSES.append(CloudPickleFunctionEncoder)
def register(registry: Optional["Registry"] = None) -> None:
"""
Replace default encoder with encoders defined here
"""
if registry is None:
registry = st.registry
for cls in ENCODER_CLASSES:
registry.register(cls)
| 29.228856
| 109
| 0.633816
| 15,638
| 0.887262
| 0
| 0
| 13,400
| 0.760284
| 0
| 0
| 2,571
| 0.145872
|
43f28356d6bbc800add9ebabe90e54e8e11a08d4
| 13,558
|
py
|
Python
|
src/data.py
|
saattrupdan/danish-asr-models
|
967e558d0032d67afbe72b625f3cad0eca65cc2a
|
[
"MIT"
] | 2
|
2022-03-10T10:47:43.000Z
|
2022-03-11T09:24:34.000Z
|
src/data.py
|
saattrupdan/danish-asr-models
|
967e558d0032d67afbe72b625f3cad0eca65cc2a
|
[
"MIT"
] | null | null | null |
src/data.py
|
saattrupdan/danish-asr-models
|
967e558d0032d67afbe72b625f3cad0eca65cc2a
|
[
"MIT"
] | null | null | null |
'''Functions related to the data loading and processing'''
from transformers import (Wav2Vec2CTCTokenizer,
Wav2Vec2FeatureExtractor,
Wav2Vec2Processor)
from datasets import (load_dataset as ds_load_dataset,
Dataset,
DatasetDict,
Audio)
from unicodedata import normalize
from typing import Optional, Tuple
from pathlib import Path
import json
import re
class AudioDataset:
'''A dataset containing audio data.
Args:
dataset_id (str, optional):
The HF dataset id. Defaults to
'mozilla-foundation/common_voice_8_0'.
dataset_subset (str, optional):
The HF dataset subset. Defaults to 'da'.
sampling_rate (int, optional):
The sampling rate of the audio data. Defaults to 16_000.
train_name (str, optional):
The name of the train split. Defaults to 'train'.
validation_name (str or None, optional):
The name of the validation split. If None then the validation set
is created from the train split. Defaults to 'validation'.
test_name (str or None, optional):
The name of the test split. If None then the test set is created
from the validation or train split. Defaults to 'test'.
'''
def __init__(self,
dataset_id: str = 'mozilla-foundation/common_voice_8_0',
dataset_subset: Optional[str] = 'da',
sampling_rate: int = 16_000,
train_name: str = 'train',
validation_name: Optional[str] = 'validation',
test_name: Optional[str] = 'test'):
self.dataset_id = dataset_id
self.dataset_subset = dataset_subset
self.sampling_rate = sampling_rate
self.train_name = train_name
self.validation_name = validation_name
self.test_name = test_name
# Load the dataset
self.train, self.val, self.test = self._load_dataset()
def preprocess(self):
'''Preprocess the dataset'''
# Clean the transcriptions
self.train = self.train.map(self._clean_examples,
keep_in_memory=True,
load_from_cache_file=False,)
self.val = self.val.map(self._clean_examples,
keep_in_memory=True,
load_from_cache_file=False)
self.test = self.test.map(self._clean_examples,
keep_in_memory=True,
load_from_cache_file=False)
# Resample the audio
audio = Audio(sampling_rate=self.sampling_rate)
self.train = self.train.cast_column('audio', audio)
self.val = self.val.cast_column('audio', audio)
self.test = self.test.cast_column('audio', audio)
# Extract and dump the vocabulary from the training dataset
self._dump_vocabulary(self.train)
# Intitialise the preprocessor
self.initialise_preprocessor()
# Tokenize the transcriptions
self.train = self.train.map(self._tokenize_examples,
keep_in_memory=True,
load_from_cache_file=False)
self.val = self.val.map(self._tokenize_examples,
keep_in_memory=True,
load_from_cache_file=False)
self.test = self.test.map(self._tokenize_examples,
keep_in_memory=True,
load_from_cache_file=False)
return self
def initialise_preprocessor(self):
'''Initialise the preprocessor'''
# Intialise the tokenizer
self.tokenizer = Wav2Vec2CTCTokenizer.from_pretrained(
'./',
unk_token='<unk>',
pad_token='<pad>',
bos_token='<s>',
eos_token='</s>',
word_delimiter_token='|'
)
# Initialise the feature extractor
self.extractor = Wav2Vec2FeatureExtractor(
feature_size=1,
sampling_rate=self.sampling_rate,
padding_value=0.0,
do_normalize=True,
return_attention_mask=True
)
# Initialise the processor, which wraps the tokenizer and the extractor
self.processor = Wav2Vec2Processor(
feature_extractor=self.extractor,
tokenizer=self.tokenizer
)
return self
@staticmethod
def _load_dataset_split(dataset_id: str,
name: Optional[str] = None,
split: str = 'train',
use_auth_token: bool = True) -> Dataset:
'''Load a dataset split.
Args:
dataset_id (str):
The HF dataset id.
name (str or None, optional):
The name of the dataset split. If None then the dataset split
is created from the train split. Defaults to None.
split (str, optional):
The HF dataset split. Defaults to 'train'.
use_auth_token (bool, optional):
Whether to use the auth token. Defaults to True.
Returns:
Dataset:
The loaded dataset split.
'''
try:
return ds_load_dataset(path=dataset_id,
name=name,
split=split,
use_auth_token=use_auth_token)
except ValueError:
return DatasetDict.load_from_disk(dataset_id)[split]
def _load_dataset(self) -> Tuple[Dataset, Dataset, Dataset]:
'''Loads a dataset.
Returns:
tuple:
A triple (train, val, test), containing the three splits of the
dataset.
'''
# Load train dataset
train = self._load_dataset_split(dataset_id=self.dataset_id,
name=self.dataset_subset,
split=self.train_name)
# Load validation and test datasets. If both `validation_name` and
# `test_name` are not None then these are simply loaded. If only
# `test_name` is not None then a validation set is created from the
# train dataset.
if self.test_name is not None:
test = self._load_dataset_split(dataset_id=self.dataset_id,
name=self.dataset_subset,
split=self.test_name)
if self.validation_name is not None:
val = self._load_dataset_split(dataset_id=self.dataset_id,
name=self.dataset_subset,
split=self.validation_name)
else:
split_dict = train.train_test_split(test_size=0.1, seed=4242)
train = split_dict['train']
val = split_dict['test']
# If only `validation_name` is not None then the validation set is used
# as a test set and a new validation set is created from the train
# dataset.
elif self.validation_name is not None:
test = self._load_dataset_split(dataset_id=self.dataset_id,
name=self.dataset_subset,
split=self.validation_name)
split_dict = train.train_test_split(test_size=0.1, seed=4242)
train = split_dict['train']
val = split_dict['test']
# If both `validation_name` and `test_name` are None then validation
# and test sets are created from the train dataset.
else:
# Split train dataset into train and a combined validation and test
# set
split_dict = train.train_test_split(test_size=0.2, seed=4242)
train = split_dict['train']
val_test = split_dict['test']
# Create validation set from the combined validation and test set
split_dict = val_test.train_test_split(test_size=0.5, seed=4242)
val = split_dict['train']
test = split_dict['test']
return train, val, test
@staticmethod
def _clean_examples(examples: dict) -> dict:
'''Cleans the transcription of an example.
Args:
examples (dict):
A dictionary containing the examples to preprocess.
Returns:
dict:
A dictionary containing the cleaned transcription.
'''
# Clean the transcription
examples['sentence'] = clean_transcription(examples['sentence'])
return examples
def _tokenize_examples(self, examples: dict) -> dict:
'''Tokenizes the transcription of an example.
Args:
examples (dict):
A dictionary containing the examples to preprocess.
Returns:
dict:
A dictionary containing the cleaned transcription.
'''
# Preprocess labels
with self.processor.as_target_processor():
examples["labels"] = self.processor(examples["sentence"]).input_ids
# Add input_length column
examples['input_length'] = len(examples['labels'])
return examples
def _preprocess_one(self, example: dict) -> dict:
'''Preprocess the audio of an example.
Args:
examples (dict):
A dictionary containing the examples to preprocess.
Returns:
dict:
A dictionary containing the preprocessed examples.
'''
# Get the dictionary from the examples containing the audio data
audio = example['audio']
# Preprocess the audio
example['input_values'] = (
self.processor(audio['array'],
sampling_rate=audio['sampling_rate'])
.input_values[0]
)
example['input_length'] = len(example['input_values'])
# Preprocess labels
with self.processor.as_target_processor():
example["labels"] = self.processor(example["sentence"]).input_ids
# Return the preprocessed examples
return example
@staticmethod
def _dump_vocabulary(dataset: Dataset):
'''Extracts the vocabulary from the dataset and dumps it to a file.
Args:
dataset (Dataset):
The dataset from which to extract the vocabulary. Needs to
contain a feature named 'sentence'.
'''
# Get all the text in the transcriptions
all_text = '|'.join(dataset['sentence'])
# Get the unique characters in the text
unique_characters = set(all_text)
# Form the vocabulary dictionary
vocab = {char: idx for idx, char in enumerate(unique_characters)}
# Manually add special tokens
vocab['<unk>'] = len(vocab)
vocab['<pad>'] = len(vocab)
vocab['<s>'] = len(vocab)
vocab['</s>'] = len(vocab)
# Dump the vocabulary to a json file
with Path('vocab.json').open('w') as f:
json.dump(vocab, f)
def clean_transcription(doc: str) -> str:
'''Cleans the transcription of a document.
Args:
doc (str):
A document to be cleaned.
Returns:
str:
The cleaned document.
'''
# NFKC normalize the transcriptions
doc = normalize('NFKC', doc)
# Remove punctuation
regex = r'[\[\]\{\}\(\)\,\?\.\!\-\—\–\;\:\"\“\'\’\%\”\�\•\n\r\⁄\’]'
doc = re.sub(regex, '', doc)
# Remove non-vocabulary characters
conversion_dict = {
'aa': 'å',
'ğ': 'g',
'ñ': 'n',
'ń': 'n',
'è': 'e',
'μ': 'mikro',
'§': ' paragraf ',
'‰': ' promille ',
'ú': 'u',
'ş': 's',
'ê': 'e',
'ã': 'a',
'ü': 'ue',
'ë': 'e',
'ć': 'c',
'ä': 'æ',
'í': 'i',
'š': 's',
'î': 'i',
'ě': 'e',
'ð': 'd',
'á': 'a',
'ó': 'o',
'þ': 'th',
'ı': 'i',
'ö': 'ø',
'ç': 'c',
'ș': 's',
'(?<![0-9])(18|19|20)([0-9]{2})(?![0-9])': '\1 \2',
'1000': ' tusind ',
'[2-9]000': ' \1 tusind',
'100': ' hundrede ',
'[2-9]00': ' \1 hundrede',
'(?<![0-9])([0-9])([0-9])(?![0-9])': '\2 og \1\0',
'10': ' ti ',
'20': ' tyve ',
'30': ' tredive ',
'40': ' fyrre ',
'50': ' halvtreds ',
'60': ' treds ',
'70': ' halvfjerds ',
'80': ' firs ',
'90': ' halvfems ',
'0': ' nul ',
'1': ' et ',
'2': ' to ',
'3': ' tre ',
'4': ' fire ',
'5': ' fem ',
'6': ' seks ',
'7': ' syv ',
'8': ' otte ',
'9': ' ni ',
}
for key, value in conversion_dict.items():
doc = re.sub(key, value, doc)
# Remove empty whitespace
doc = re.sub(u'\u0301', ' ', doc)
doc = re.sub(u'\u200b', ' ', doc)
# Replace spaces with a pipe, to emphasise the word boundaries
doc = re.sub(r' +', '|', doc)
# Make the transcription lowercase and strip whitespace
doc = doc.lower().strip().strip('|')
return doc
| 34.411168
| 79
| 0.52906
| 10,943
| 0.804159
| 0
| 0
| 2,549
| 0.187316
| 0
| 0
| 5,673
| 0.416887
|
43f298d87e261cc2cbf422453d37df22dea68372
| 1,604
|
py
|
Python
|
etravel/urls.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | 1
|
2020-12-06T17:49:11.000Z
|
2020-12-06T17:49:11.000Z
|
etravel/urls.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | null | null | null |
etravel/urls.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | 1
|
2020-12-07T14:20:41.000Z
|
2020-12-07T14:20:41.000Z
|
"""etravel URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
from main import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.homepage, name = 'home'),
path('login/', views.loginPage, name = 'login'),
path('logout/', views.logoutUser, name = 'logout'),
path('signup/', views.signupPage, name = 'signup'),
path('browsehotel/', views.filterhotel, name = 'browsehotel'),
path('myaccount/', views.accountpage, name='myaccount'),
path('editprofile/', views.edit_profile, name='editprofile'),
path('change-password/', views.change_password, name='editpassword'),
path('hotel_booking/', views.bookhotel, name='bookhotel'),
path('hotel/<int:hotel_id>', views.hotelpage, name='hotelpage'),
path('cancelbooking/', views.cancelbooking, name='cancelbooking'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 42.210526
| 77
| 0.706983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 895
| 0.55798
|
43f37d4e6dabec0097acd8b5f0892f346b8200d5
| 4,447
|
py
|
Python
|
adet/data/video_data/yvos_annot_condinst.py
|
Tanveer81/BoxVOS
|
c30aa319f18f3fbee2a25e0ed25cb006a4598300
|
[
"BSD-2-Clause"
] | 4
|
2022-02-16T02:48:27.000Z
|
2022-03-08T06:54:32.000Z
|
adet/data/video_data/yvos_annot_condinst.py
|
Tanveer81/BoxVOS
|
c30aa319f18f3fbee2a25e0ed25cb006a4598300
|
[
"BSD-2-Clause"
] | null | null | null |
adet/data/video_data/yvos_annot_condinst.py
|
Tanveer81/BoxVOS
|
c30aa319f18f3fbee2a25e0ed25cb006a4598300
|
[
"BSD-2-Clause"
] | null | null | null |
import json
import time
from glob import glob
from pathlib import Path
from adet.data.video_data.util import *
from PIL import Image, ImageFont, ImageDraw
import os
import random
categories = ['airplane', 'ape', 'bear', 'bike', 'bird', 'boat', 'bucket', 'bus', 'camel', 'cat',
'cow', 'crocodile', 'deer', 'dog', 'dolphin', 'duck', 'eagle', 'earless_seal',
'elephant', 'fish', 'fox', 'frisbee', 'frog', 'giant_panda', 'giraffe', 'hand',
'hat', 'hedgehog', 'horse', 'knife', 'leopard', 'lion', 'lizard', 'monkey',
'motorbike', 'mouse', 'others', 'owl', 'paddle', 'parachute', 'parrot', 'penguin',
'person', 'plant', 'rabbit', 'raccoon', 'sedan', 'shark', 'sheep', 'sign',
'skateboard', 'snail', 'snake', 'snowboard', 'squirrel', 'surfboard', 'tennis_racket',
'tiger', 'toilet', 'train', 'truck', 'turtle', 'umbrella', 'whale', 'zebra']
def generate_detectron2_annotations(annot_path, detectron2_annos_path, meta_path, split):
f = open(meta_path, )
data = json.load(f)
f.close()
video_id_names = list(data.keys())
video_id_names.sort()
detectron2_annos = {}
detectron2_annos['annos'] = []
detectron2_annos['bbox_mode'] = 'BoxMode.XYXY_ABS'
errors = {}
start_time = time.time()
cont = False
for i, video_id in enumerate(video_id_names):
annotations_paths = np.sort(glob(os.path.join(annot_path, video_id, '*.png'))).tolist()
for j in range(0, len(annotations_paths)):
try:
file_name = annotations_paths[j] # path
mask_image = Image.open(file_name)
# Create the annotations
sub_masks = create_sub_masks(mask_image)
annotations = []
for object_id, sub_mask in sub_masks.items():
segmentation = create_sub_mask_annotation(sub_mask)
segmentation = [s for s in segmentation if len(s) >= 6]
if len(segmentation)==0 and split=='val':
cont=True
break
bbox, area = submask_to_box(sub_mask, True) # xyxy format
category_id = categories.index(data[video_id]['objects'][object_id]['category'])
iscrowd = 0 # TODO: calculate this
annotation = {
'iscrowd': iscrowd,
'bbox': bbox,
'category_id': category_id,
'segmentation': segmentation,
'object_id': object_id
}
annotations.append(annotation)
if cont:
cont = False
continue
anno = {
'video_id': video_id,
'frame_id': file_name.split('/')[-1].split('.')[0],
'height': mask_image.height,
'width': mask_image.width,
'image_id': i + j,
'annotations': annotations
}
detectron2_annos['annos'].append(anno)
except Exception as e:
frame = file_name.split('/')[-1].split('.')[0]
try:
errors[video_id].append(frame)
except KeyError:
errors[video_id] = [frame]
print(f'video_id: {video_id}, frame_id: {frame}')
print(f"An exception occurred: {e}")
print(f'{i + 1}/{len(video_id_names)}: {video_id} : {(time.time() - start_time)} seconds')
print(f'Total Time : {(time.time() - start_time)} seconds')
with open(f'{detectron2_annos_path}/detectron2-annotations-{split}-balanced-2.json', 'w') as outfile:
json.dump(detectron2_annos, outfile)
def main():
root = '../data'
# img_path = f'{root}/youtubeVOS/train/JPEGImages/'
annot_path = f'{root}/youtubeVOS/train/Annotations/'
detectron2_annos_path = f'{root}/youtubeVOS/train/'
for split in ['valid', 'train', 'test']:
meta_path = f'{root}/youtubeVOS/train/train-{split}-meta-balanced.json'
generate_detectron2_annotations(annot_path, detectron2_annos_path, meta_path, split)
if __name__ == '__main__':
main()
| 43.174757
| 106
| 0.527097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,275
| 0.28671
|
43f63cbc9ceb8f44b281dc9e30baf482c1545385
| 1,342
|
py
|
Python
|
lookup_table.py
|
yishayv/lyacorr
|
deed114b4cadd4971caec68e2838a5fac39827b1
|
[
"MIT"
] | 2
|
2017-03-21T14:18:35.000Z
|
2020-03-30T20:51:33.000Z
|
lookup_table.py
|
yishayv/lyacorr
|
deed114b4cadd4971caec68e2838a5fac39827b1
|
[
"MIT"
] | null | null | null |
lookup_table.py
|
yishayv/lyacorr
|
deed114b4cadd4971caec68e2838a5fac39827b1
|
[
"MIT"
] | null | null | null |
import numpy as np
def fast_linear_interpolate(f, x):
"""
:param f: array of evenly spaced function values
:param x: array of fractional positions to sample
:type f: np.multiarray.ndarray
:type x: np.multiarray.ndarray
:rtype: np.multiarray.ndarray
"""
x0 = np.floor(x).astype(int)
x1 = np.add(x0, 1)
# limit the range of x1 to prevent out of bounds access
return (x1 - x) * f[x0] + (x - x0) * f[np.clip(x1, a_min=0, a_max=f.size - 1)]
class LinearInterpTable:
def __init__(self, func, x_start, x_end, x_step):
"""
:param func: a function with a 1D array argument
:type x_start: float64
:type x_end: float64
:type x_step: float64
"""
self._x_table = np.arange(x_start, x_end, x_step)
self._func_value_table = func(self._x_table)
self.x_start = x_start
self.x_end = x_end
self.x_step = x_step
def eval(self, ar_x):
"""
:type ar_x: np.multiarray.ndarray
:rtype: np.multiarray.ndarray
"""
assert np.all(ar_x < self.x_end) & np.all(ar_x > self.x_start), "lookup value out of range"
ar_index = self._func_value_table.size * (ar_x - self.x_start) / (self.x_end - self.x_start)
return fast_linear_interpolate(self._func_value_table, ar_index)
| 29.822222
| 100
| 0.622206
| 854
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 564
| 0.420268
|
43f6879757f40989d16e1db4126c95e8352e1759
| 492
|
py
|
Python
|
src/seedwork/domain/rules.py
|
pgorecki/python-ddd
|
0073ccce35c651be263f5d7d3d63f9a49bc0b78a
|
[
"MIT"
] | 10
|
2022-03-16T19:26:51.000Z
|
2022-03-31T23:50:51.000Z
|
src/seedwork/domain/rules.py
|
pgorecki/python-ddd
|
0073ccce35c651be263f5d7d3d63f9a49bc0b78a
|
[
"MIT"
] | null | null | null |
src/seedwork/domain/rules.py
|
pgorecki/python-ddd
|
0073ccce35c651be263f5d7d3d63f9a49bc0b78a
|
[
"MIT"
] | 2
|
2022-03-16T19:26:54.000Z
|
2022-03-27T13:21:02.000Z
|
from pydantic import BaseModel
class BusinessRule(BaseModel):
"""This is a base class for implementing domain rules"""
class Config:
arbitrary_types_allowed = True
# This is an error message that broken rule reports back
__message: str = "Business rule is broken"
def get_message(self) -> str:
return self.__message
def is_broken(self) -> bool:
pass
def __str__(self):
return f"{self.__class__.__name__} {super().__str__()}"
| 23.428571
| 63
| 0.666667
| 458
| 0.930894
| 0
| 0
| 0
| 0
| 0
| 0
| 185
| 0.376016
|
43f6f242e391b123212da34e3f976064029b361e
| 627
|
py
|
Python
|
exs/mundo_2/python/067.py
|
QuatroQuatros/exercicios-CeV
|
c9b995b717fe1dd2c2eee3557db0161390bc78b0
|
[
"MIT"
] | 45
|
2021-01-02T18:36:01.000Z
|
2022-03-26T19:46:47.000Z
|
exs/mundo_2/python/067.py
|
QuatroQuatros/exercicios-CeV
|
c9b995b717fe1dd2c2eee3557db0161390bc78b0
|
[
"MIT"
] | 24
|
2020-12-31T17:23:16.000Z
|
2021-03-11T19:44:36.000Z
|
exs/mundo_2/python/067.py
|
QuatroQuatros/exercicios-CeV
|
c9b995b717fe1dd2c2eee3557db0161390bc78b0
|
[
"MIT"
] | 28
|
2020-12-30T15:57:16.000Z
|
2022-03-26T19:46:49.000Z
|
"""
Desafio 067
Problema: Faça um programa que mostre a tabuada de vários números,
um de cada vez, para cada valor digitado pelo usuário.
O programa será interrompido quando o número solicitado
for negativo.
Resolução do problema:
"""
print('-' * 20)
print(f'{" Tabuada v3.0 ":~^20}')
print('-' * 20)
while True:
tabuada = int(input('Tabuada desejada: '))
print('-' * 20)
if tabuada < 0:
break
for cont in range(0, 11):
print(f'{tabuada} x {cont:2} = {tabuada * cont:2}')
print('-' * 20)
print(f'{" TABUADA FINALIZADA ":~^30}\nFOI UM PRAZER AJUDA-LO!!!')
| 23.222222
| 66
| 0.601276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 435
| 0.685039
|
43fbb641614733e9b5376e1fc262a24a13b94350
| 1,492
|
py
|
Python
|
pyexcel_xlsx/__init__.py
|
pyexcel/pyexcel-xlsx
|
3b3639d12270cc10fff32651280d139ec65bb354
|
[
"BSD-3-Clause"
] | 101
|
2016-02-22T03:51:39.000Z
|
2022-03-08T02:21:50.000Z
|
pyexcel_xlsx/__init__.py
|
pyexcel/pyexcel-xlsx
|
3b3639d12270cc10fff32651280d139ec65bb354
|
[
"BSD-3-Clause"
] | 46
|
2016-05-09T14:16:31.000Z
|
2022-02-25T18:40:57.000Z
|
pyexcel_xlsx/__init__.py
|
pyexcel/pyexcel-xlsx
|
3b3639d12270cc10fff32651280d139ec65bb354
|
[
"BSD-3-Clause"
] | 23
|
2016-01-29T12:26:02.000Z
|
2021-12-30T04:32:20.000Z
|
"""
pyexcel_xlsx
~~~~~~~~~~~~~~~~~~~
The lower level xlsx file format handler using openpyxl
:copyright: (c) 2015-2019 by Onni Software Ltd & its contributors
:license: New BSD License
"""
from pyexcel_io.io import get_data as read_data
from pyexcel_io.io import isstream
from pyexcel_io.io import save_data as write_data
from pyexcel_io.plugins import IOPluginInfoChainV2
__FILE_TYPE__ = "xlsx"
IOPluginInfoChainV2(__name__).add_a_reader(
relative_plugin_class_path="xlsxr.XLSXBook",
locations=["file", "memory"],
file_types=[__FILE_TYPE__, "xlsm"],
stream_type="binary",
).add_a_reader(
relative_plugin_class_path="xlsxr.XLSXBookInContent",
locations=["content"],
file_types=[__FILE_TYPE__, "xlsm"],
stream_type="binary",
).add_a_writer(
relative_plugin_class_path="xlsxw.XLSXWriter",
locations=["file", "memory"],
file_types=[__FILE_TYPE__, "xlsm"],
stream_type="binary",
)
def save_data(afile, data, file_type=None, **keywords):
"""standalone module function for writing module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
write_data(afile, data, file_type=file_type, **keywords)
def get_data(afile, file_type=None, **keywords):
"""standalone module function for reading module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
return read_data(afile, file_type=file_type, **keywords)
| 31.744681
| 75
| 0.72185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 496
| 0.33244
|
43fc77cfe764566289284319cba58cc6a6b81ffc
| 12,775
|
py
|
Python
|
GeneralTools/graph_funcs/generative_model_metric.py
|
frhrdr/MMD-GAN
|
7522093498b658026344541ddd5c248095763fb6
|
[
"Apache-2.0"
] | null | null | null |
GeneralTools/graph_funcs/generative_model_metric.py
|
frhrdr/MMD-GAN
|
7522093498b658026344541ddd5c248095763fb6
|
[
"Apache-2.0"
] | null | null | null |
GeneralTools/graph_funcs/generative_model_metric.py
|
frhrdr/MMD-GAN
|
7522093498b658026344541ddd5c248095763fb6
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import tensorflow as tf
from tensorflow.contrib import gan as tfgan
from GeneralTools.graph_funcs.my_session import MySession
from GeneralTools.math_funcs.graph_func_support import mean_cov_np, trace_sqrt_product_np
from GeneralTools.misc_fun import FLAGS
class GenerativeModelMetric(object):
def __init__(self, image_format=None, model='v1', model_path=None):
""" This class defines several metrics using pre-trained classifier inception v1.
:param image_format:
"""
if model_path is None:
self.model = model
if model == 'v1':
self.inception_graph_def = tfgan.eval.get_graph_def_from_disk(FLAGS.INCEPTION_V1)
elif model == 'v3':
self.inception_graph_def = tfgan.eval.get_graph_def_from_disk(FLAGS.INCEPTION_V3)
elif model in {'swd', 'ms_ssim', 'ssim'}:
pass
else:
raise NotImplementedError('Model {} not implemented.'.format(model))
else:
self.model = 'custom'
self.inception_graph_def = tfgan.eval.get_graph_def_from_disk(model_path)
if image_format is None:
self.image_format = FLAGS.IMAGE_FORMAT
else:
self.image_format = image_format
# preserved for inception v3
self._pool3_v3_ = None
self._logits_v3_ = None
def inception_v1_one_batch(self, image, output_tensor=None):
""" This function runs the inception v1 model on images and give logits output.
Note: if other layers of inception model is needed, change the output_tensor option in tfgan.eval.run_inception
:param image:
:param output_tensor:
:return:
"""
if output_tensor is None:
output_tensor = ['logits:0', 'pool_3:0']
image_size = tfgan.eval.INCEPTION_DEFAULT_IMAGE_SIZE
if self.image_format in {'channels_first', 'NCHW'}:
image = tf.transpose(image, perm=(0, 2, 3, 1))
if image.get_shape().as_list()[1] != image_size:
image = tf.compat.v1.image.resize_bilinear(image, [image_size, image_size])
# inception score uses the logits:0 while FID uses pool_3:0.
return tfgan.eval.run_inception(
image, graph_def=self.inception_graph_def, input_tensor='Mul:0', output_tensor=output_tensor)
def inception_v1(self, images):
""" This function runs the inception v1 model on images and give logits output.
Note: if other layers of inception model is needed, change the output_tensor option in tfgan.eval.run_inception.
Note: for large inputs, e.g. [10000, 64, 64, 3], it is better to run iterations containing this function.
:param images:
:return:
"""
num_images = images.get_shape().as_list()[0]
if num_images > 2500:
raise MemoryError('The input is too big to possibly fit into memory. Consider using multiple runs.')
if num_images >= 400:
print(num_images)
# Note: need to validate the code below
# somehow tfgan.eval.classifier_score does not work properly when splitting the datasets.
# The following code is inspired by:
# https://github.com/tensorflow/tensorflow/blob/r1.7/tensorflow/contrib/gan/python/eval/python/classifier_metrics_impl.py
if num_images % 100 == 0:
generated_images_list = tf.split(images, num_or_size_splits=num_images // 100, axis=0)
logits, pool3 = tf.map_fn(
fn=self.inception_v1_one_batch,
elems=tf.stack(generated_images_list),
dtype=(tf.float32, tf.float32),
parallel_iterations=1,
back_prop=False,
swap_memory=True,
name='RunClassifier')
logits = tf.concat(tf.unstack(logits), 0)
pool3 = tf.concat(tf.unstack(pool3), 0)
else:
generated_images_list = tf.split(
images, num_or_size_splits=[100] * (num_images // 100) + [num_images % 100], axis=0)
# tf.stack requires the dimension of tensor in list to be the same
logits, pool3 = tf.map_fn(
fn=self.inception_v1_one_batch,
elems=tf.stack(generated_images_list[0:-1]),
dtype=(tf.float32, tf.float32),
parallel_iterations=1,
back_prop=False,
swap_memory=True,
name='RunClassifier')
logits_last, pool3_last = self.inception_v1_one_batch(generated_images_list[-1])
logits = tf.concat(tf.unstack(logits) + [logits_last], 0)
pool3 = tf.concat(tf.unstack(pool3) + [pool3_last], 0)
else:
logits, pool3 = self.inception_v1_one_batch(images)
return logits, pool3
@staticmethod
def inception_score_from_logits(logits):
""" This function estimates the inception score from logits output by inception_v1
:param logits:
:return:
"""
if type(logits) == np.ndarray:
logits = tf.constant(logits, dtype=tf.float32)
return tfgan.eval.classifier_score_from_logits(logits)
@staticmethod
def fid_from_pool3(x_pool3, y_pool3):
""" This function estimates Fréchet inception distance from pool3 of inception model
:param x_pool3:
:param y_pool3:
:return:
"""
if type(x_pool3) == np.ndarray:
x_pool3 = tf.constant(x_pool3, dtype=tf.float32)
if type(y_pool3) == np.ndarray:
y_pool3 = tf.constant(y_pool3, dtype=tf.float32)
return tfgan.eval.frechet_classifier_distance_from_activations(x_pool3, y_pool3)
@ staticmethod
def my_fid_from_pool3(x_pool3_np, y_pool3_np):
""" This function estimates Fréchet inception distance from pool3 of inception model.
Different from fid_from_pool3, here pool3_np could be a list [mean, cov]
:param x_pool3_np:
:param y_pool3_np:
:return:
"""
# from scipy.linalg import sqrtm
x_mean, x_cov = x_pool3_np if isinstance(x_pool3_np, (list, tuple)) else mean_cov_np(x_pool3_np)
y_mean, y_cov = y_pool3_np if isinstance(y_pool3_np, (list, tuple)) else mean_cov_np(y_pool3_np)
fid = np.sum((x_mean-y_mean) ** 2)+np.trace(x_cov)+np.trace(y_cov)-2.0*trace_sqrt_product_np(x_cov, y_cov)
return fid
# return np.sum((x_mean - y_mean) ** 2) + np.trace(x_cov + y_cov - 2.0 * sqrtm(np.dot(x_cov, y_cov)))
def inception_score_and_fid_v1(self, x_batch, y_batch, num_batch=10, ckpt_folder=None, ckpt_file=None):
""" This function calculates inception scores and FID based on inception v1.
Note: batch_size * num_batch needs to be larger than 2048, otherwise the convariance matrix will be
ill-conditioned.
According to TensorFlow v1.7 (below), this is actually inception v3 model.
Somehow the downloaded file says it's v1.
code link: https://github.com/tensorflow/tensorflow/blob/r1.7/tensorflow/contrib \
/gan/python/eval/python/classifier_metrics_impl.py
Steps:
1, the pool3 and logits are calculated for x_batch and y_batch with sess
2, the pool3 and logits are passed to corresponding metrics
:param ckpt_file:
:param x_batch: tensor, one batch of x in range [-1, 1]
:param y_batch: tensor, one batch of y in range [-1, 1]
:param num_batch:
:param ckpt_folder: check point folder
:param ckpt_file: in case an older ckpt file is needed, provide it here, e.g. 'cifar.ckpt-6284'
:return:
"""
assert self.model == 'v1', 'GenerativeModelMetric is not initialized with model="v1".'
assert ckpt_folder is not None, 'ckpt_folder must be provided.'
x_logits, x_pool3 = self.inception_v1(x_batch)
y_logits, y_pool3 = self.inception_v1(y_batch)
with MySession(load_ckpt=True) as sess:
inception_outputs = sess.run_m_times(
[x_logits, y_logits, x_pool3, y_pool3],
ckpt_folder=ckpt_folder, ckpt_file=ckpt_file,
max_iter=num_batch, trace=True)
# get logits and pool3
x_logits_np = np.concatenate([inc[0] for inc in inception_outputs], axis=0)
y_logits_np = np.concatenate([inc[1] for inc in inception_outputs], axis=0)
x_pool3_np = np.concatenate([inc[2] for inc in inception_outputs], axis=0)
y_pool3_np = np.concatenate([inc[3] for inc in inception_outputs], axis=0)
FLAGS.print('logits calculated. Shape = {}.'.format(x_logits_np.shape))
FLAGS.print('pool3 calculated. Shape = {}.'.format(x_pool3_np.shape))
# calculate scores
inc_x = self.inception_score_from_logits(x_logits_np)
inc_y = self.inception_score_from_logits(y_logits_np)
xp3_1, xp3_2 = np.split(x_pool3_np, indices_or_sections=2, axis=0)
fid_xx = self.fid_from_pool3(xp3_1, xp3_2)
fid_xy = self.fid_from_pool3(x_pool3_np, y_pool3_np)
with MySession() as sess:
scores = sess.run_once([inc_x, inc_y, fid_xx, fid_xy])
return scores
def sliced_wasserstein_distance(self, x_batch, y_batch, num_batch=128, ckpt_folder=None, ckpt_file=None):
""" This function calculates the sliced wasserstein distance between real and fake images.
This function does not work as expected, swd gives nan
:param x_batch:
:param y_batch:
:param num_batch:
:param ckpt_folder:
:param ckpt_file:
:return:
"""
with MySession(load_ckpt=True) as sess:
batches = sess.run_m_times(
[x_batch, y_batch],
ckpt_folder=ckpt_folder, ckpt_file=ckpt_file,
max_iter=num_batch, trace=True)
# get x_images and y_images
x_images = (tf.constant(np.concatenate([batch[0] for batch in batches], axis=0)) + 1.0) * 128.5
y_images = (tf.constant(np.concatenate([batch[1] for batch in batches], axis=0)) + 1.0) * 128.5
if self.image_format in {'channels_first', 'NCHW'}:
x_images = tf.transpose(x_images, perm=(0, 2, 3, 1))
y_images = tf.transpose(y_images, perm=(0, 2, 3, 1))
print('images obtained, shape: {}'.format(x_images.shape))
# sliced_wasserstein_distance returns a list of tuples (distance_real, distance_fake)
# for each level of the Laplacian pyramid from the highest resolution to the lowest
swd = tfgan.eval.sliced_wasserstein_distance(
x_images, y_images, patches_per_image=64, random_sampling_count=4, use_svd=True)
with MySession() as sess:
swd = sess.run_once(swd)
return swd
def ms_ssim(self, x_batch, y_batch, num_batch=128, ckpt_folder=None, ckpt_file=None, image_size=256):
""" This function calculates the multiscale structural similarity between a pair of images.
The image is downscaled four times; at each scale, a 11x11 filter is applied to extract patches.
USE WITH CAUTION !!!
1. This code was lost once and redone. Need to test on real datasets to verify it.
2. This code can be improved to calculate pairwise ms-ssim using tf.image.ssim. tf.image.ssim_multicale is just
tf.image.ssim with pool downsampling.
:param x_batch:
:param y_batch:
:param num_batch:
:param ckpt_folder:
:param ckpt_file:
:param image_size: ssim is defined on images of size at least 176
:return:
"""
# get x_images and y_images
x_images = (x_batch + 1.0) * 128.5
y_images = (y_batch + 1.0) * 128.5
if self.image_format in {'channels_first', 'NCHW'}:
x_images = tf.transpose(x_images, perm=(0, 2, 3, 1))
y_images = tf.transpose(y_images, perm=(0, 2, 3, 1))
if x_images.get_shape().as_list()[1] != 256:
x_images = tf.compat.v1.image.resize_bilinear(x_images, [image_size, image_size])
y_images = tf.compat.v1.image.resize_bilinear(y_images, [image_size, image_size])
scores = tf.image.ssim_multiscale(x_images, y_images, max_val=255) # scores in range [0, 1]
with MySession(load_ckpt=True) as sess:
scores = sess.run_m_times(
scores,
ckpt_folder=ckpt_folder, ckpt_file=ckpt_file,
max_iter=num_batch, trace=True)
ssim_score = np.mean(np.concatenate(scores, axis=0), axis=0)
return ssim_score
| 45.301418
| 133
| 0.636947
| 12,498
| 0.978164
| 0
| 0
| 1,701
| 0.13313
| 0
| 0
| 4,682
| 0.36644
|
43fe8ce604f5be764fdbae5dfb8933ec293fcd26
| 187
|
py
|
Python
|
App/softwares_env/wizard/wsd/main.py
|
Wizard-collab/wizard
|
c2ec623fe011626716493c232b895fb0513f68ff
|
[
"MIT"
] | null | null | null |
App/softwares_env/wizard/wsd/main.py
|
Wizard-collab/wizard
|
c2ec623fe011626716493c232b895fb0513f68ff
|
[
"MIT"
] | null | null | null |
App/softwares_env/wizard/wsd/main.py
|
Wizard-collab/wizard
|
c2ec623fe011626716493c232b895fb0513f68ff
|
[
"MIT"
] | null | null | null |
import yaml
class wsd():
def __init__(self, file, dict):
self.file = file
self.dict = dict
def write_sd(self):
with open(self.file, 'w') as f:
f.write(yaml.dump(self.dict))
| 17
| 33
| 0.652406
| 174
| 0.930481
| 0
| 0
| 0
| 0
| 0
| 0
| 3
| 0.016043
|
a1003f2195e718d7338e4e93046ad32eab667f13
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Rengar_Mid_Aatrox(Ratings):
pass
class NA_Rengar_Mid_Ahri(Ratings):
pass
class NA_Rengar_Mid_Akali(Ratings):
pass
class NA_Rengar_Mid_Alistar(Ratings):
pass
class NA_Rengar_Mid_Amumu(Ratings):
pass
class NA_Rengar_Mid_Anivia(Ratings):
pass
class NA_Rengar_Mid_Annie(Ratings):
pass
class NA_Rengar_Mid_Ashe(Ratings):
pass
class NA_Rengar_Mid_AurelionSol(Ratings):
pass
class NA_Rengar_Mid_Azir(Ratings):
pass
class NA_Rengar_Mid_Bard(Ratings):
pass
class NA_Rengar_Mid_Blitzcrank(Ratings):
pass
class NA_Rengar_Mid_Brand(Ratings):
pass
class NA_Rengar_Mid_Braum(Ratings):
pass
class NA_Rengar_Mid_Caitlyn(Ratings):
pass
class NA_Rengar_Mid_Camille(Ratings):
pass
class NA_Rengar_Mid_Cassiopeia(Ratings):
pass
class NA_Rengar_Mid_Chogath(Ratings):
pass
class NA_Rengar_Mid_Corki(Ratings):
pass
class NA_Rengar_Mid_Darius(Ratings):
pass
class NA_Rengar_Mid_Diana(Ratings):
pass
class NA_Rengar_Mid_Draven(Ratings):
pass
class NA_Rengar_Mid_DrMundo(Ratings):
pass
class NA_Rengar_Mid_Ekko(Ratings):
pass
class NA_Rengar_Mid_Elise(Ratings):
pass
class NA_Rengar_Mid_Evelynn(Ratings):
pass
class NA_Rengar_Mid_Ezreal(Ratings):
pass
class NA_Rengar_Mid_Fiddlesticks(Ratings):
pass
class NA_Rengar_Mid_Fiora(Ratings):
pass
class NA_Rengar_Mid_Fizz(Ratings):
pass
class NA_Rengar_Mid_Galio(Ratings):
pass
class NA_Rengar_Mid_Gangplank(Ratings):
pass
class NA_Rengar_Mid_Garen(Ratings):
pass
class NA_Rengar_Mid_Gnar(Ratings):
pass
class NA_Rengar_Mid_Gragas(Ratings):
pass
class NA_Rengar_Mid_Graves(Ratings):
pass
class NA_Rengar_Mid_Hecarim(Ratings):
pass
class NA_Rengar_Mid_Heimerdinger(Ratings):
pass
class NA_Rengar_Mid_Illaoi(Ratings):
pass
class NA_Rengar_Mid_Irelia(Ratings):
pass
class NA_Rengar_Mid_Ivern(Ratings):
pass
class NA_Rengar_Mid_Janna(Ratings):
pass
class NA_Rengar_Mid_JarvanIV(Ratings):
pass
class NA_Rengar_Mid_Jax(Ratings):
pass
class NA_Rengar_Mid_Jayce(Ratings):
pass
class NA_Rengar_Mid_Jhin(Ratings):
pass
class NA_Rengar_Mid_Jinx(Ratings):
pass
class NA_Rengar_Mid_Kalista(Ratings):
pass
class NA_Rengar_Mid_Karma(Ratings):
pass
class NA_Rengar_Mid_Karthus(Ratings):
pass
class NA_Rengar_Mid_Kassadin(Ratings):
pass
class NA_Rengar_Mid_Katarina(Ratings):
pass
class NA_Rengar_Mid_Kayle(Ratings):
pass
class NA_Rengar_Mid_Kayn(Ratings):
pass
class NA_Rengar_Mid_Kennen(Ratings):
pass
class NA_Rengar_Mid_Khazix(Ratings):
pass
class NA_Rengar_Mid_Kindred(Ratings):
pass
class NA_Rengar_Mid_Kled(Ratings):
pass
class NA_Rengar_Mid_KogMaw(Ratings):
pass
class NA_Rengar_Mid_Leblanc(Ratings):
pass
class NA_Rengar_Mid_LeeSin(Ratings):
pass
class NA_Rengar_Mid_Leona(Ratings):
pass
class NA_Rengar_Mid_Lissandra(Ratings):
pass
class NA_Rengar_Mid_Lucian(Ratings):
pass
class NA_Rengar_Mid_Lulu(Ratings):
pass
class NA_Rengar_Mid_Lux(Ratings):
pass
class NA_Rengar_Mid_Malphite(Ratings):
pass
class NA_Rengar_Mid_Malzahar(Ratings):
pass
class NA_Rengar_Mid_Maokai(Ratings):
pass
class NA_Rengar_Mid_MasterYi(Ratings):
pass
class NA_Rengar_Mid_MissFortune(Ratings):
pass
class NA_Rengar_Mid_MonkeyKing(Ratings):
pass
class NA_Rengar_Mid_Mordekaiser(Ratings):
pass
class NA_Rengar_Mid_Morgana(Ratings):
pass
class NA_Rengar_Mid_Nami(Ratings):
pass
class NA_Rengar_Mid_Nasus(Ratings):
pass
class NA_Rengar_Mid_Nautilus(Ratings):
pass
class NA_Rengar_Mid_Nidalee(Ratings):
pass
class NA_Rengar_Mid_Nocturne(Ratings):
pass
class NA_Rengar_Mid_Nunu(Ratings):
pass
class NA_Rengar_Mid_Olaf(Ratings):
pass
class NA_Rengar_Mid_Orianna(Ratings):
pass
class NA_Rengar_Mid_Ornn(Ratings):
pass
class NA_Rengar_Mid_Pantheon(Ratings):
pass
class NA_Rengar_Mid_Poppy(Ratings):
pass
class NA_Rengar_Mid_Quinn(Ratings):
pass
class NA_Rengar_Mid_Rakan(Ratings):
pass
class NA_Rengar_Mid_Rammus(Ratings):
pass
class NA_Rengar_Mid_RekSai(Ratings):
pass
class NA_Rengar_Mid_Renekton(Ratings):
pass
class NA_Rengar_Mid_Rengar(Ratings):
pass
class NA_Rengar_Mid_Riven(Ratings):
pass
class NA_Rengar_Mid_Rumble(Ratings):
pass
class NA_Rengar_Mid_Ryze(Ratings):
pass
class NA_Rengar_Mid_Sejuani(Ratings):
pass
class NA_Rengar_Mid_Shaco(Ratings):
pass
class NA_Rengar_Mid_Shen(Ratings):
pass
class NA_Rengar_Mid_Shyvana(Ratings):
pass
class NA_Rengar_Mid_Singed(Ratings):
pass
class NA_Rengar_Mid_Sion(Ratings):
pass
class NA_Rengar_Mid_Sivir(Ratings):
pass
class NA_Rengar_Mid_Skarner(Ratings):
pass
class NA_Rengar_Mid_Sona(Ratings):
pass
class NA_Rengar_Mid_Soraka(Ratings):
pass
class NA_Rengar_Mid_Swain(Ratings):
pass
class NA_Rengar_Mid_Syndra(Ratings):
pass
class NA_Rengar_Mid_TahmKench(Ratings):
pass
class NA_Rengar_Mid_Taliyah(Ratings):
pass
class NA_Rengar_Mid_Talon(Ratings):
pass
class NA_Rengar_Mid_Taric(Ratings):
pass
class NA_Rengar_Mid_Teemo(Ratings):
pass
class NA_Rengar_Mid_Thresh(Ratings):
pass
class NA_Rengar_Mid_Tristana(Ratings):
pass
class NA_Rengar_Mid_Trundle(Ratings):
pass
class NA_Rengar_Mid_Tryndamere(Ratings):
pass
class NA_Rengar_Mid_TwistedFate(Ratings):
pass
class NA_Rengar_Mid_Twitch(Ratings):
pass
class NA_Rengar_Mid_Udyr(Ratings):
pass
class NA_Rengar_Mid_Urgot(Ratings):
pass
class NA_Rengar_Mid_Varus(Ratings):
pass
class NA_Rengar_Mid_Vayne(Ratings):
pass
class NA_Rengar_Mid_Veigar(Ratings):
pass
class NA_Rengar_Mid_Velkoz(Ratings):
pass
class NA_Rengar_Mid_Vi(Ratings):
pass
class NA_Rengar_Mid_Viktor(Ratings):
pass
class NA_Rengar_Mid_Vladimir(Ratings):
pass
class NA_Rengar_Mid_Volibear(Ratings):
pass
class NA_Rengar_Mid_Warwick(Ratings):
pass
class NA_Rengar_Mid_Xayah(Ratings):
pass
class NA_Rengar_Mid_Xerath(Ratings):
pass
class NA_Rengar_Mid_XinZhao(Ratings):
pass
class NA_Rengar_Mid_Yasuo(Ratings):
pass
class NA_Rengar_Mid_Yorick(Ratings):
pass
class NA_Rengar_Mid_Zac(Ratings):
pass
class NA_Rengar_Mid_Zed(Ratings):
pass
class NA_Rengar_Mid_Ziggs(Ratings):
pass
class NA_Rengar_Mid_Zilean(Ratings):
pass
class NA_Rengar_Mid_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 5,944
| 0.908174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a100629a10b0553407de408897d5616acb03768b
| 3,372
|
py
|
Python
|
fixtures_browsers.py
|
aleksandr-kotlyar/python_tests_and_hacks
|
291e3c33b70ef35deb9ba687885e70e6d23fe82f
|
[
"Apache-2.0"
] | 9
|
2020-02-07T05:15:00.000Z
|
2022-01-19T10:19:02.000Z
|
fixtures_browsers.py
|
aleksandr-kotlyar/python_tests_and_hacks
|
291e3c33b70ef35deb9ba687885e70e6d23fe82f
|
[
"Apache-2.0"
] | 5
|
2020-05-03T07:34:03.000Z
|
2021-03-25T18:18:30.000Z
|
fixtures_browsers.py
|
aleksandr-kotlyar/python_tests_and_hacks
|
291e3c33b70ef35deb9ba687885e70e6d23fe82f
|
[
"Apache-2.0"
] | 1
|
2021-07-26T06:24:36.000Z
|
2021-07-26T06:24:36.000Z
|
import logging
import pytest
from selene import Browser, Config
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
@pytest.fixture(scope='function')
def browser_func(choose_driver):
"""Browser that closes after each test function or method."""
yield choose_driver
choose_driver.quit()
@pytest.fixture(scope='class')
def browser_class(choose_driver):
"""Browser that closes after each test class."""
yield choose_driver
choose_driver.quit()
@pytest.fixture(scope='module')
def browser_module(choose_driver):
"""Browser that closes after each test module."""
yield choose_driver
choose_driver.quit()
@pytest.fixture(scope='session')
def choose_driver(is_remote, t_browser):
"""Remote or local browser selector fixture."""
if is_remote:
return remote_driver(t_browser)
return custom_driver(t_browser)
def custom_driver(t_browser):
""" Custom driver """
logging.debug('custom driver config start')
if t_browser == 'chrome':
driver = webdriver.Chrome(executable_path=ChromeDriverManager().install(),
options=headless_chrome_options())
else:
raise ValueError('t_browser does not set')
driver.set_page_load_timeout(10)
browser = Browser(Config(
driver=driver,
timeout=10,
window_width=1366,
window_height=1200,
))
logging.debug('custom driver config finish')
return browser
def headless_chrome_options():
""" Custom chrome options """
logging.info('set chromedriver options start')
chrome_options = Options()
chrome_options.set_capability("pageLoadStrategy", "eager")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--disable-extensions")
chrome_options.add_argument("--disable-infobars")
chrome_options.add_argument("--enable-automation")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--disable-dev-shm-usage")
chrome_options.add_argument("--disable-setuid-sandbox")
logging.info('set chromedriver options finish')
return chrome_options
def remote_driver(t_browser, page_load_strategy=None):
""" Remote driver """
logging.debug('remote driver config start')
remote_mapping = {
'chrome': {
'command_executor': 'http://selenium__standalone-chrome:4444/wd/hub',
'options': webdriver.ChromeOptions()
},
'firefox': {
'command_executor': 'http://selenium__standalone-firefox:4444/wd/hub',
'options': webdriver.FirefoxOptions()
}
}
if page_load_strategy:
desired_capabilities = webdriver.DesiredCapabilities().CHROME
desired_capabilities["pageLoadStrategy"] = "eager"
driver = webdriver.Remote(command_executor=remote_mapping[t_browser]['command_executor'],
options=remote_mapping[t_browser]['options'])
driver.set_page_load_timeout(20)
browser = Browser(Config(
driver=driver,
timeout=10,
window_width=1500,
window_height=1200,
))
logging.debug('remote driver config finish')
return browser
| 32.423077
| 93
| 0.695136
| 0
| 0
| 419
| 0.124259
| 735
| 0.217972
| 0
| 0
| 946
| 0.280546
|
a101053cd887c912399a70d0a235e2cfdc45a962
| 34
|
py
|
Python
|
evaluation/__init__.py
|
Luxios22/Dual_Norm
|
b404a03b15fc05749e0c648d9e46ffe70f6b2a80
|
[
"MIT"
] | null | null | null |
evaluation/__init__.py
|
Luxios22/Dual_Norm
|
b404a03b15fc05749e0c648d9e46ffe70f6b2a80
|
[
"MIT"
] | null | null | null |
evaluation/__init__.py
|
Luxios22/Dual_Norm
|
b404a03b15fc05749e0c648d9e46ffe70f6b2a80
|
[
"MIT"
] | null | null | null |
from .evaluation import evaluation
| 34
| 34
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a1016a14567b8bcc8f6f0d1e157f8a64f32c5aaf
| 7,034
|
py
|
Python
|
utils/file_utils.py
|
lkrmbhlz/MVSC_3D
|
7e32f1b507eb0bc85fae2649da0c8bfa89672064
|
[
"MIT"
] | 2
|
2022-01-22T15:09:22.000Z
|
2022-01-22T15:09:48.000Z
|
utils/file_utils.py
|
lkrmbhlz/MVSC_3D
|
7e32f1b507eb0bc85fae2649da0c8bfa89672064
|
[
"MIT"
] | null | null | null |
utils/file_utils.py
|
lkrmbhlz/MVSC_3D
|
7e32f1b507eb0bc85fae2649da0c8bfa89672064
|
[
"MIT"
] | null | null | null |
import open3d as o3d
import numpy as np
from pclpy import pcl
from tqdm import tqdm
import os
def o3d_meshes(dataset_name: str, path_to_data_folder='../../data'):
"""
Read in mesh (.ply, .stl, .off) files. The function assumes that each class of objects is in
a separate folder and highly spefified for our needs, which is why a list of all objects of
each data set are provided as a hard-coded array of strings.
You can download the data sets referenced in [#1]_ and [#2]_ and use the path to them.
References
----------
.. [#1] http://modelnet.cs.princeton.edu/ Z. Wu, S. Song, A. Khosla, F. Yu, L. Zhang, X. Tang and J. Xiao. 3D ShapeNets: A Deep Representation for Volumetric Shapes. Proceedings of 28th IEEE Conference on Computer Vision and Pattern Recognition (CVPR2015)
.. [#2] http://www.cim.mcgill.ca/~shape/benchMark/ K. Siddiqi, J. Zhang, D. Macrini, A. Shokoufandeh, S. Bouix & S. Dickinson. Retrieving Articulated 3D Models Using Medial Surfaces. Machine Vision and Applications, 19(4), 261--274, 2008.
Parameters
----------
dataset_name: Should correspond to the name of the folder with the data set
path_to_data_folder
Returns
-------
o3d_meshes : array-like, shape (number of objects)
The Open3D mesh representations of the objects as a list.
labels : array-like, shape (number of objects)
The labels of the objects as a list of integers starting from 0.
"""
# http://modelnet.cs.princeton.edu/
if dataset_name == 'modelnet10':
objects = ['bathtub', 'bed', 'chair', 'desk', 'dresser', 'monitor', 'night_stand', 'sofa', 'table', 'toilet']
elif dataset_name == 'tali_15':
objects = ['Manching', 'Milet']
elif dataset_name == 'mixed_bones':
objects = ['capra', 'ovis_aries']
# http://www.cim.mcgill.ca/~shape/benchMark/
elif dataset_name == 'mc_gill':
objects = ['airplanes_ply', 'dinosaurs_ply', 'fishes_ply']
else:
raise ValueError('Unknown dataset')
o3d_meshes = []
labels = []
print('Read in %d classes of mesh files...' % len(objects))
for i, obj in enumerate(tqdm(objects)):
if dataset_name == 'modelnet10':
objects_o3d = [o3d.io.read_triangle_mesh(file) for file in
list_files(path_to_data_folder + '/' + dataset_name + '/' + obj + '/test')]
else:
objects_o3d = [o3d.io.read_triangle_mesh(file) for file in
list_files(path_to_data_folder + '/' + dataset_name + '/' + obj)]
# print('class ', i, ': ', len(objects_o3d), ' objects')
o3d_meshes.extend(objects_o3d)
labels.extend([i] * len(objects_o3d))
return o3d_meshes, labels
def save_o3d_arrays(o3d_meshes, labels, data_set_name: str):
os.makedirs('../generated/' + data_set_name, exist_ok=True)
np.save('../generated/' + data_set_name + '/' + data_set_name + '_labels', labels)
np.save('../generated/' + data_set_name + '/' + data_set_name + '_vertices',
[np.asarray(obj.vertices_list, dtype=object) for obj in o3d_meshes])
def save_meshes_as_pcd_pointclouds(mesh_list, data_set_name: str):
path = '../generated/' + data_set_name + '/pcd_pointclouds/'
os.makedirs(path, exist_ok=True)
for i, mesh in enumerate(mesh_list):
pcd = o3d.geometry.PointCloud()
pcd.points = mesh.vertices
pcd.normals = mesh.vertex_normals
o3d.io.write_point_cloud(path + data_set_name + '_' + str(i) + '.pcd', pcd)
def save_meshes_as_poisson_sampled_pointclouds(mesh_list, data_set_name: str, number_of_points, init_factor):
path = '../../generated/' + data_set_name + '/pcd_pointclouds_poisson/' + str(number_of_points) + '/'
os.makedirs(path, exist_ok=True)
print('Save %d meshes as .pcd point clouds...' % len(mesh_list))
for i, mesh in enumerate(tqdm(mesh_list)):
file_path = path + data_set_name + '_' + str(i).zfill(2) + '.pcd'
pcd = mesh.sample_points_poisson_disk(number_of_points=number_of_points, init_factor=init_factor)
o3d.io.write_point_cloud(file_path, pcd)
def save_meshes_as_ply_files(mesh_list, data_set_name: str):
path = '../generated/' + data_set_name + '/ply_files/'
os.makedirs(path, exist_ok=True)
for i, mesh in enumerate(mesh_list):
o3d.io.write_triangle_mesh(path + data_set_name + '_' + str(i) + '.ply', mesh)
def read_pcd_pointclouds(data_set_name: str, max_point_cloud_size=50000, path_to_generated_folder='../generated'):
pcd_files = list_files(path_to_generated_folder + '/' + data_set_name + '/pcd_pointclouds/')
print('Start reading %d pcd pointclouds.' % len(pcd_files))
reader = pcl.io.PCDReader()
pcl_point_clouds = []
normals = []
ignored = []
for i, file in enumerate(pcd_files):
pc = pcl.PointCloud.PointXYZ()
reader.read(file, pc)
p3d_pc = o3d.io.read_point_cloud(file)
if len(pc.points) < max_point_cloud_size:
pcl_point_clouds.append(pc)
normals.append(p3d_pc.normals)
else:
ignored.append(i)
print('%d files were ignored because they are too large.' % len(ignored))
return pcl_point_clouds, normals, ignored
def read_pcd_pointclouds_poisson(data_set_name: str, number_of_points: int, path_to_generated_folder='../../generated'):
pcd_files = list_files(path_to_generated_folder + '/' + data_set_name + '/pcd_pointclouds_poisson/'
+ str(number_of_points) + '/')
reader = pcl.io.PCDReader()
pcl_point_clouds = []
open3d_point_clouds = []
print('Start reading %d poisson sampled pcd pointclouds...' % len(pcd_files))
for i, file in enumerate(tqdm(pcd_files)):
pc = pcl.PointCloud.PointXYZ()
reader.read(file, pc)
pcl_point_clouds.append(pc)
open3d_point_clouds.append(o3d.io.read_point_cloud(file))
# print(file)
return pcl_point_clouds, open3d_point_clouds
def read_ply_files_with_pcl(data_set_name: str, max_point_cloud_size=50000):
pcd_files = list_files('../generated/' + data_set_name + '/ply_files/')
print('Start reading %d pcd pointclouds.' % len(pcd_files))
reader = pcl.io.PLYReader()
pcl_point_clouds = []
normals = []
ignored = []
for i, file in enumerate(pcd_files):
pc = pcl.PointCloud.PointXYZ()
mesh = pcl.PolygonMesh()
ply = pcl.io.loadPLYFile(file, mesh)
reader.read(file, pc)
p3d_pc = o3d.io.read_point_cloud(file)
if len(pc.points) < max_point_cloud_size:
pcl_point_clouds.append(pc)
normals.append(p3d_pc.normals)
else:
ignored.append(i)
print('%d files were ignored because they are too large.' % len(ignored))
return pcl_point_clouds, normals, ignored
def list_files(directory):
r = []
for root, dirs, files in os.walk(directory):
for name in files:
r.append(os.path.join(root, name))
return r
| 38.228261
| 259
| 0.65681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,326
| 0.33068
|
a101ea954f07ea0e68e1799f7386155f6a1d887a
| 9,523
|
py
|
Python
|
Program.py
|
aakash-lambton/project
|
04a1991fc5e65e0cb8988029adbb1fda03656612
|
[
"Apache-2.0"
] | null | null | null |
Program.py
|
aakash-lambton/project
|
04a1991fc5e65e0cb8988029adbb1fda03656612
|
[
"Apache-2.0"
] | null | null | null |
Program.py
|
aakash-lambton/project
|
04a1991fc5e65e0cb8988029adbb1fda03656612
|
[
"Apache-2.0"
] | null | null | null |
import pymongo
import random
def create_database(db):
user_collection = db['users']
user_collection.drop()
user_collection = db['users']
post_collection = db['posts']
post_collection.drop()
post_collection = db['posts']
comment_collection = db['comments']
comment_collection.drop()
comment_collection = db['comments']
like_collection = db['likes']
like_collection.drop()
like_collection = db['likes']
status_collection = db['status']
status_collection.drop()
status_collection = db['status']
followers_collection = db['followers']
followers_collection.drop()
followers_collection = db['followers']
user_data = [
{
"_id": 1,
"name": "Aakash",
"email": "aakash@gmail.com",
"phone":"647632134",
"friends": 456,
"pictures": 34,
"contacts": 90,
"isVerified": True
},
{
"_id": 2,
"name": "Anmol Monga",
"email": "anmol@gmail.com",
"phone": "6476546784",
"friends": 665,
"pictures": 76,
"contacts": 50,
"isVerified": True
},
{
"_id": 3,
"name": "Harjant Singh",
"email": "harjant@gmail.com",
"phone": "6478765545",
"friends": 987,
"pictures": 64,
"contacts": 75,
"isVerified": False
},
{
"_id": 4,
"name": "Prabhjot Singh",
"email": "prabh@gmail.com",
"phone": "6478672134",
"friends": 654,
"pictures": 68,
"contacts": 46,
"isVerified": True
},
{
"_id": 5,
"name": "Harkaran",
"email": "harkaran@gmail.com",
"phone": "64768664335",
"friends": 786,
"pictures": 74,
"contacts": 87,
"isVerified": False
},
{
"_id": 6,
"name": "Dipanshu",
"email": "deep@gmail.com",
"phone": "416082134",
"friends": 756,
"pictures": 86,
"contacts": 34,
"isVerified": True
},
{
"_id": 7,
"name": "Hrsimran",
"email": "harsimran@gmail.com",
"phone": "643732939",
"friends": 234,
"pictures": 74,
"contacts": 70,
"isVerified": False
},
{
"_id": 8,
"name": "Harpreet ",
"email": "harpreet@gmail.com",
"phone": "324776566",
"friends": 856,
"pictures": 94,
"contacts": 50,
"isVerified": True
},
]
user_status = [
{
"status": "Having Dinner at Bahamas",
"uploadTime": "20:44",
"location": "indonesia",
"likes": 46,
"comments": 34,
"user": "Anmol"
},
{
"status": "Playing cricket at Key Land Field",
"uploadTime": "10:44",
"location": "india",
"likes": 56,
"comments": 14,
"user": "Aakash"
},
{
"status": "Watching Movie at Cineplex Theatre",
"uploadTime": "16:44",
"location": "Canada",
"likes": 96,
"comments": 66,
"user": "Harjant"
},
{
"status": "Reading novel at pearson library",
"uploadTime": "19:34",
"location": "Toronto",
"likes": 51,
"comments": 34,
"user": "Prabhjot"
},
{
"status": "Playing Golf at Wee Golf Course",
"uploadTime": "11:22",
"location": "USA",
"likes": 12,
"comments": 3,
"user": "Harkaran"
},
]
followers = [
{
"name": "Ali",
"active": False,
"lastSeen": "20-8-2020",
"followers": 943,
"username": "ali_zumair"
},
{
"name": "Alex",
"active": True,
"lastSeen": "10-8-2020",
"followers": 443,
"username": "alex_scott"
},
{
"name": "Lee",
"active": False,
"lastSeen": "10-3-2020",
"followers": 333,
"username": "lee_you"
},
{
"name": "joe",
"active": True,
"lastSeen": "09-1-2020",
"followers": 567,
"username": "joe_Tribiani"
},
{
"name": "Ross",
"active": False,
"lastSeen": "05-7-2020",
"followers": 133,
"username": "ross_geller"
}
]
#ADD DATA INTO COLLECTION
user_ids = user_collection.insert_many(user_data)
status_collection.insert_many(user_status)
followers_collection.insert_many(followers)
user_id_list = user_ids.inserted_ids
like_id = 1
post_id = 1
comment_id = 1
#ADD DUMMY POSTS
for user_id in user_ids.inserted_ids:
post_data = [{"_id": post_id,
"user_id": user_id,
"content": 'Dummy post', "view_count": 10,
"likes": [{"like_id": like_id}],
"comments": [{"comment_id": comment_id}]}]
like_id += 1
comment_id += 1
post_id += 1
post_collection.insert_many(post_data)
comment_id = 1
comment_all = []
for p_id in range(1, post_id):
comment_data = [{"_id": comment_id, "post_id": p_id,
"user_id": random.choice(user_id_list),
"comment": "Looks good"}]
comment_collection.insert_many(comment_data)
comment_all.append(comment_id)
comment_id += 1
like_id = 1
for p_id in range(1, post_id):
like_data = [{"_id": like_id, "post_id": p_id,
"user_id": random.choice(user_id_list),
"comment_id": random.choice(comment_all)}]
like_collection.insert_many(like_data)
like_id += 1
#PRINT ALL USERS
def read_all_users(db):
user_collection = db['users']
for user in user_collection.find():
print(user)
#PRINT SINGLE USER
def read_single_users(db):
user_id = int(input("Enter user id: "))
user_collection = db['users']
for user in user_collection.find({"_id": user_id}):
print(user)
#READ ALL POSTS
def read_all_post(db):
post_collection = db['posts']
for post in post_collection.find():
print(post)
#PRINT SINGLE POST
def read_single_post(db):
user_id = int(input("Enter user id: "))
post_collection = db['posts']
for post in post_collection.find({"user_id": user_id}):
print(post)
#PRINT ALL COMMENTS
def read_all_comments(db):
comment_collection = db['comments']
for comment in comment_collection.find():
print(comment)
#PRINT SINGLE COMMENTS
def read_single_comment(db):
user_id = int(input("Enter user id: "))
comment_collection = db['comments']
for comment in comment_collection.find({"user_id": user_id}):
print(comment)
#READ POST DATA
def read_post_comment(db):
post_id = int(input("Enter post id: "))
comment_collection = db['comments']
for comment in comment_collection.find({"post_id": post_id}):
print(comment)
#INSERT NEW USER INTO COLLECTION
def insert_user(db):
users = db["users"]
name = input("User name: ")
email = input("User Email: ")
ids = users.insert_many([{"name": name, "email": email}])
print(ids.inserted_ids)
#DELETE COMMENT
def delete_comment(db):
comment_id = int(input("Enter comment Id: "))
comment_collection = db['comments']
comment = comment_collection.find_one({"_id": comment_id})
db.post.update(
{"_id": comment["post_id"]},
{"$pull": {"comments": {"comment_id": comment["_id"]}}}
)
comment_collection.delete_one({"_id": comment_id})
#UPDATE POST CONTENT
def update_post_content(db):
post_id = int(input("Enter post Id: "))
post_content = input("Enter post content: ")
post_query = {"_id": post_id}
update_data = {"$set": {"content": post_content}}
db.posts.update_one(post_query, update_data)
if __name__ == '__main__': #CONNECT TO MONGO ATLAS
client = pymongo.MongoClient("mongodb+srv://akash:lambton123@db.di1ed.mongodb.net/db?retryWrites=true&w=majority")
database = client["feeddb"]
create_database(database)
print("Reading all users")
read_all_users(database)
print("Reading single user")
read_single_users(database)
print("Reading all posts")
read_all_post(database)
print("Reading single post")
read_single_post(database)
print("Reading all comments")
read_all_comments(database)
print("Reading single comment")
read_single_comment(database)
print("Reading all comments of a post")
read_post_comment(database)
print("Inserting new user")
insert_user(database)
print("Deleting comment")
delete_comment(database)
print("Reading all comments")
read_all_comments(database)
print("Updating the post")
update_post_content(database)
print("Reading all posts")
read_all_post(database)
| 26.825352
| 118
| 0.522209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,005
| 0.315552
|
a102475986cb4c83a3d10579c02a0bf8df165a0a
| 530
|
py
|
Python
|
Mundo 2/ex053.py
|
judigunkel/judi-exercicios-python
|
c61bb75b1ae6141defcf42214194e141a70af15d
|
[
"MIT"
] | null | null | null |
Mundo 2/ex053.py
|
judigunkel/judi-exercicios-python
|
c61bb75b1ae6141defcf42214194e141a70af15d
|
[
"MIT"
] | null | null | null |
Mundo 2/ex053.py
|
judigunkel/judi-exercicios-python
|
c61bb75b1ae6141defcf42214194e141a70af15d
|
[
"MIT"
] | 1
|
2021-03-06T02:41:36.000Z
|
2021-03-06T02:41:36.000Z
|
"""
Crie um programa que leia um a frase qualquer e diga se ela é um palíndromo,
desconsiderando os espaços.
ex:
apos a sopa
a sacada da casa
a torre da derrota
o lobo ama o bolo
anotaram a data da maratona
"""
frase = input('Digite uma frase (sem acentos): ').replace(' ', '').upper()
inverso = ''
for c in range(len(frase) - 1, -1, -1):
inverso += frase[c]
print(f'O inverso de {frase} é {inverso}')
if frase == inverso:
print('A frase digitada é um palíndromo.')
else:
print('A frase digitada não é um Palíndromo')
| 26.5
| 76
| 0.681132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 368
| 0.682746
|
a1027c07377717af9273b6289963cf9e75ece183
| 1,546
|
py
|
Python
|
inferfuzzy/base_set.py
|
leynier/inferfuzzy
|
bc9dd3a3d0d59f323c5c573423ff7d20ba771eeb
|
[
"MIT"
] | 3
|
2020-11-23T21:05:31.000Z
|
2020-11-25T17:33:27.000Z
|
inferfuzzy/base_set.py
|
leynier/fuzzpy
|
bc9dd3a3d0d59f323c5c573423ff7d20ba771eeb
|
[
"MIT"
] | null | null | null |
inferfuzzy/base_set.py
|
leynier/fuzzpy
|
bc9dd3a3d0d59f323c5c573423ff7d20ba771eeb
|
[
"MIT"
] | null | null | null |
from typing import Any, Callable
import matplotlib.pyplot as plt
from numpy import arange
from .membership import Membership
class BaseSet:
def __init__(
self,
name: str,
membership: Membership,
aggregation: Callable[[Any, Any], Any],
):
self.name = name
self.membership = membership
self.aggregation = aggregation
def __add__(self, arg: "BaseSet"):
memb = Membership(
lambda x: self.aggregation(
self.membership(x),
arg.membership(x),
),
self.membership.items + arg.membership.items,
)
return BaseSet(
f"({self.name})_union_({arg.name})",
memb,
aggregation=self.aggregation,
)
def domain(self, step=0.05):
start = self.membership.items[0]
end = self.membership.items[-1]
result = list(arange(start, end, step))
result += self.membership.items
result.sort()
return result
def __iter__(self):
return iter(self.domain())
def __len__(self):
return len(self.domain())
def __str__(self) -> str:
return self.name
def graph(self, step: float = 0.05):
x_data = self.domain(step=step)
y_data = [self.membership(x) for x in x_data]
plt.figure()
plt.title(self.name)
plt.xlabel("Domain values")
plt.ylabel("Membership grade")
plt.plot(x_data, y_data)
plt.savefig(f"set_{self.name}.png")
| 25.766667
| 57
| 0.568564
| 1,416
| 0.915912
| 0
| 0
| 0
| 0
| 0
| 0
| 99
| 0.064036
|
a104d65ea80539f94a6a62d27d42b32939f7ca2a
| 9,911
|
py
|
Python
|
play/play_loop.py
|
wmloh/ChessAI
|
b8eafd673ecb8162e464d78fccd32979a0c28126
|
[
"MIT"
] | 1
|
2021-09-07T20:40:44.000Z
|
2021-09-07T20:40:44.000Z
|
play/play_loop.py
|
wmloh/ChessAI
|
b8eafd673ecb8162e464d78fccd32979a0c28126
|
[
"MIT"
] | null | null | null |
play/play_loop.py
|
wmloh/ChessAI
|
b8eafd673ecb8162e464d78fccd32979a0c28126
|
[
"MIT"
] | null | null | null |
import numpy as np
import chess
import chess.engine
from tkinter.filedialog import asksaveasfilename
from parsing.math_encode import tensor_encode, tensor_decode
from inference.infer_action import get_action
class PlayLoop:
__doc__ = '''
An interactive REPL environment for play with a trained chess AI
'''
TRACE_FORMAT = '{:<7}{:<18}{:<42}{:<45}{:<7}{:<18}{:<42}{}'
TRACE_HEADER = ('move', 'WHITE', 'source', 'target', 'move', 'BLACK', 'source', 'target')
def __init__(self, policy, secondary_policy=None, engine_path='../engine/stockfish.exe'):
'''
Constructs a PlayLoop instance
:param policy: PolicyModel - primary AI agent to simulate
:param secondary_policy: None/PolicyModel/'same'/'stockfish' - Agent used to replace player moves
(if None, human is playing; if 'same', secondary_policy=policy)
'''
self.policy = policy
self.player_white = None
self.board = None
self.keep_trace = False
self.trace = None
self.engine = None
if secondary_policy is not None:
if secondary_policy == 'same':
self.player_move_func = lambda: self._get_action_from_policy(policy)
elif secondary_policy == 'stockfish':
self.player_move_func = self._get_stockfish
self.engine = chess.engine.SimpleEngine.popen_uci(engine_path)
else:
self.player_move_func = lambda: self._get_action_from_policy(secondary_policy)
else:
self.player_move_func = self._get_player_move
def init_game(self, player_side, keep_trace=True):
'''
Sets up a game and indicating the side to play as by the player
:param player_side: 'w'/'b' - side to play as
:param keep_trace: bool - if True, accumulates the trace for the entire game
:return: None
'''
if self.board is not None:
raise RuntimeWarning('Board already initiatized, set force_new=True to force override')
if player_side == 'w':
self.player_white = True
elif player_side == 'b':
self.player_white = False
else:
raise ValueError(f'Expected "w" or "b" for player_side but given {player_side}')
self.board = chess.Board()
self.keep_trace = keep_trace
if keep_trace:
self.trace = list()
def reset(self):
'''
Resets the PlayLoop state (except the trace)
:return: None
'''
self.board = None
self.keep_trace = False
def loop(self, verbose=True):
'''
Runs the loop until the termination of a game
:param verbose: bool - prints messages if True
:return: None
'''
if self.board is None:
raise RuntimeError('init_game was not called to configure game settings!')
if self.board.is_game_over():
raise RuntimeError('Game has already ended. Call reset and init_gram before calling loop')
trace_collector = list()
if not self.player_white:
move, policy = self._get_action_from_policy()
if self.keep_trace: self._store_trace(move, trace_collector, policy=policy)
if verbose: print(f'\nAI made {move} move\n')
while not self.board.is_game_over():
if verbose: print(self.board)
# player/secondary_policy move
move, policy = self.player_move_func()
if self.keep_trace: self._store_trace(move, trace_collector, policy=policy)
if verbose: print(f'\nPlayer made {move} move\n')
if self.board.is_game_over():
break
# policy move
move, policy = self._get_action_from_policy()
if self.keep_trace: self._store_trace(move, trace_collector, policy=policy)
if verbose: print(f'\nAI made {move} move\n')
if len(trace_collector) != 0:
self._store_trace(move, trace_collector, policy=policy, force_flush=True)
if verbose: print('Game completed')
def get_trace(self, printable=True):
'''
Returns the trace
:param printable: bool - If True, returns a printable and formamted string of the trace
:return: str/list(str)
'''
if printable:
return '\n'.join(self.trace)
return self.trace
def save_trace(self, file_path=None, interactive=True):
'''
Saves trace in a text file
Automatically appends ".txt" at the end of the file_path if the suffix is not found
:param file_path: None/str - file path to save to
:param interactive: bool - if True, using Tkinter GUI to select file path
:return: None
'''
if interactive:
file_path = asksaveasfilename(filetypes=[('Text file', '*.txt')])
if file_path[-4:] != '.txt':
file_path = file_path + '.txt'
with open(file_path, 'w') as f:
f.write(self.get_trace(printable=True))
def _get_action_from_policy(self, external_policy=None):
'''
Gets UCI representation of the move using the policy loaded and pushes the move on the board
:param external_policy - None/PolicyModel - policy to use (if None, defaults to loaded policy)
:return: str
'''
policy = self.policy
flip = self.player_white
if external_policy: # player is an AI
policy = external_policy
flip = not flip
src, tgt, _ = policy.infer(tensor_encode(self.board, mirror=flip))
if flip: # perform mirror flips
src = np.flip(src[0, ...], 0)
tgt = np.flip(tgt[0, ...], 0)
else:
src = src[0, ...]
tgt = tgt[0, ...]
move = get_action(self.board, src, tgt)
self.board.push(chess.Move.from_uci(move))
return move, (src, tgt)
def _get_player_move(self):
'''
Obtains the move from the player by command line and pushes the move on the board
:return: str, None
'''
while True: # handles invalid player moves
try:
move_input = input('Enter your move: ')
move = chess.Move.from_uci(move_input)
if move in self.board.legal_moves:
self.board.push(move)
else:
raise AssertionError(f'{move_input} is not a valid move')
except AssertionError as e:
print(f'ERROR: {e}')
else:
break
return move_input, None
def _get_stockfish(self, time=0.001, depth=1):
'''
Obtains the move from the Stockfish engine with the lowest ELO ratings
:param time: float - time limit for the engine
:param depth: int - maximum search depth
:return: str, None
'''
move = self.engine.play(self.board, chess.engine.Limit(time=time, depth=depth),
ponder=False, options={'uci_elo': 1350}).move
self.board.push(move)
return move.uci(), None
def _store_trace(self, move, trace_collector, policy=None, force_flush=False):
'''
Collects the trace onto trace_collector and once white and black has made the move,
append to the main trace list
:param move: str - UCI representation of the move
:param trace_collector: list(str) - string accumulator
:param policy: None/tuple(np.ndarray, np.ndarray) - policy output
:param force_flush: bool - if True, appends incomplete trace
:return: None
'''
trace_collector.append(str(self.board))
trace_collector.append(move)
if policy is None:
trace_collector.append('N/A\n\n\n\n\n\n\n')
trace_collector.append('N/A\n\n\n\n\n\n\n')
else:
trace_collector.append(str(np.around(policy[0], 2)).replace('[[', '')
.replace(' [ ', '').replace(' [', '').replace(']', ''))
trace_collector.append(str(np.around(policy[1], 2)).replace('[[', '')
.replace(' [ ', '').replace(' [', '').replace(']', ''))
if len(trace_collector) == 8: # two half-moves has been made
self.trace.append(PlayLoop.TRACE_FORMAT.format(*PlayLoop.TRACE_HEADER))
for b1, src1, tgt1, b2, src2, tgt2 in zip(trace_collector[0].split('\n'),
trace_collector[2].split('\n'),
trace_collector[3].split('\n'),
trace_collector[4].split('\n'),
trace_collector[6].split('\n'),
trace_collector[7].split('\n')):
self.trace.append(PlayLoop.TRACE_FORMAT.format(trace_collector[1], b1, src1, tgt1,
trace_collector[5], b2, src2, tgt2))
trace_collector[1] = ''
trace_collector[5] = ''
self.trace.append('\n')
trace_collector.clear()
elif force_flush:
self.trace.append(PlayLoop.TRACE_FORMAT.format(*PlayLoop.TRACE_HEADER))
for b1, src1, tgt1 in zip(trace_collector[0].split('\n'),
trace_collector[2].split('\n'),
trace_collector[3].split('\n')):
self.trace.append(PlayLoop.TRACE_FORMAT.format(trace_collector[1], b1, src1, tgt1,
'', '', '', ''))
trace_collector[1] = ''
self.trace.append('\n')
| 39.486056
| 105
| 0.563112
| 9,699
| 0.97861
| 0
| 0
| 0
| 0
| 0
| 0
| 3,279
| 0.330845
|
a10591815a24a01b78e2571e754c9c37c5e03b4b
| 205
|
py
|
Python
|
wave/synth/wave/wave/base/curve.py
|
jedhsu/wave
|
a05d8f4b0a96722bdc2f5a514646c7a44681982b
|
[
"Apache-2.0"
] | null | null | null |
wave/synth/wave/wave/base/curve.py
|
jedhsu/wave
|
a05d8f4b0a96722bdc2f5a514646c7a44681982b
|
[
"Apache-2.0"
] | null | null | null |
wave/synth/wave/wave/base/curve.py
|
jedhsu/wave
|
a05d8f4b0a96722bdc2f5a514646c7a44681982b
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from typing import Generic, Mapping, TypeVar
__all__ = ["Curve"]
T = TypeVar("T")
U = TypeVar("U")
@dataclass
class _Curve(Generic[T, U]):
mapping: Mapping[T, U]
| 14.642857
| 44
| 0.692683
| 55
| 0.268293
| 0
| 0
| 66
| 0.321951
| 0
| 0
| 13
| 0.063415
|
a10652730ddf79d36acced38c1989dd4d1acb1fa
| 877
|
py
|
Python
|
src/jellyroll/providers/utils/anyetree.py
|
blturner/jellyroll
|
8a3b96e84d6cfbaac478bb8f9e406aabff5a77f3
|
[
"BSD-3-Clause"
] | 3
|
2015-03-02T06:34:45.000Z
|
2016-11-24T18:53:59.000Z
|
src/jellyroll/providers/utils/anyetree.py
|
blturner/jellyroll
|
8a3b96e84d6cfbaac478bb8f9e406aabff5a77f3
|
[
"BSD-3-Clause"
] | null | null | null |
src/jellyroll/providers/utils/anyetree.py
|
blturner/jellyroll
|
8a3b96e84d6cfbaac478bb8f9e406aabff5a77f3
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Get an Etree library. Usage::
>>> from anyetree import etree
Returns some etree library. Looks for (in order of decreasing preference):
* ``lxml.etree`` (http://cheeseshop.python.org/pypi/lxml/)
* ``xml.etree.cElementTree`` (built into Python 2.5)
* ``cElementTree`` (http://effbot.org/zone/celementtree.htm)
* ``xml.etree.ElementTree`` (built into Python 2.5)
* ``elementree.ElementTree (http://effbot.org/zone/element-index.htm)
"""
__all__ = ['etree']
SEARCH_PATHS = [
"lxml.etree",
"xml.etree.cElementTree",
"cElementTree",
"xml.etree.ElementTree",
"elementtree.ElementTree",
]
etree = None
for name in SEARCH_PATHS:
try:
etree = __import__(name, '', '', [''])
break
except ImportError:
continue
if etree is None:
raise ImportError("No suitable ElementTree implementation found.")
| 25.057143
| 74
| 0.652223
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 624
| 0.711517
|
a1081e4aca80f13d81fb5c284f116c973136197c
| 608
|
py
|
Python
|
libs/dispatch/dispatcher.py
|
eeshakumar/hythe
|
52ca795c8370ddfb2aa6fb87ff3f63a85c55f913
|
[
"MIT"
] | null | null | null |
libs/dispatch/dispatcher.py
|
eeshakumar/hythe
|
52ca795c8370ddfb2aa6fb87ff3f63a85c55f913
|
[
"MIT"
] | null | null | null |
libs/dispatch/dispatcher.py
|
eeshakumar/hythe
|
52ca795c8370ddfb2aa6fb87ff3f63a85c55f913
|
[
"MIT"
] | null | null | null |
from abc import abstractmethod, ABC
class Dispatcher(ABC):
def __init__(self, dispatch_dict=None):
self._dispatch_dict = dispatch_dict
self._process_list = []
return
def set_dispatch_dict(self, dispatch_dict):
self._dispatch_dict = dispatch_dict
@abstractmethod
def dispatch(self):
raise NotImplementedError
@staticmethod
def execute(dispatch_obj):
try:
print("Running experiment with seed:", dispatch_obj[0])
dispatch_obj[1].run()
except IOError as io_error:
print(io_error.message)
| 22.518519
| 67
| 0.648026
| 567
| 0.932566
| 0
| 0
| 304
| 0.5
| 0
| 0
| 31
| 0.050987
|
a10a14a640ca1ca76f6da0a67be2551ab7a5efc8
| 766
|
py
|
Python
|
3_TT_FLIM.py
|
swabianinstruments/swabianinstruments-web-demo
|
2d59f79958a942ed61f04ea7dd44c98ab2cf17df
|
[
"MIT"
] | null | null | null |
3_TT_FLIM.py
|
swabianinstruments/swabianinstruments-web-demo
|
2d59f79958a942ed61f04ea7dd44c98ab2cf17df
|
[
"MIT"
] | null | null | null |
3_TT_FLIM.py
|
swabianinstruments/swabianinstruments-web-demo
|
2d59f79958a942ed61f04ea7dd44c98ab2cf17df
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 25 11:01:41 2020
@author: liu
"""
from time import sleep
import plot_TT
from TimeTagger import createTimeTagger, freeAllTimeTagger, TimeDifferences
# create a Time Tagger instance
tagger = createTimeTagger()
tagger.reset()
# assign channels for measurement
phot_ch = 1
strt_ch = 2
next_ch = 3
sync_ch = 4
# initialize measurement parameters
binwidth = 10000 # 10 ns
bins = 100
n_pix = 100
# measure FLIM
image = TimeDifferences(tagger, phot_ch, strt_ch, next_ch, sync_ch, binwidth, bins, n_pix)
print("\nFLIM measurement is running.")
sleep(10)
xFLIM = image.getIndex()
yFLIM = image.getData()
plot_TT.BarChart2D(xFLIM, yFLIM)
# free the Time Tagger
freeAllTimeTagger()
| 19.15
| 91
| 0.707572
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 267
| 0.348564
|
a10b1c87fe2ffd2a2fe1dee4b23ec1fe16f8cf15
| 287
|
py
|
Python
|
electroPyy/io/__init__.py
|
ludo67100/electroPyy_Dev
|
3b940adbfdf005dd8231e7ac61aca708033d5a95
|
[
"OML"
] | null | null | null |
electroPyy/io/__init__.py
|
ludo67100/electroPyy_Dev
|
3b940adbfdf005dd8231e7ac61aca708033d5a95
|
[
"OML"
] | null | null | null |
electroPyy/io/__init__.py
|
ludo67100/electroPyy_Dev
|
3b940adbfdf005dd8231e7ac61aca708033d5a95
|
[
"OML"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 21 14:54:51 2019
@author: Ludovic.SPAETH
"""
from electroPyy.io.BaseRawIO import BaseRawIO
from electroPyy.io.HdF5IO import HdF5IO
from electroPyy.io.NeuroExIO import NeuroExIO
from electroPyy.io.WinWcpRawIO import WinWcpRawIO
| 23.916667
| 50
| 0.745645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 96
| 0.334495
|
a10d6496b80a4c774fdd41dcbb4c0a5e756986a0
| 317
|
py
|
Python
|
torch_geometric_temporal/signal/__init__.py
|
tforgaard/pytorch_geometric_temporal
|
d3a6a55119cb8cc38cb6d941ba8f74879d02c4b8
|
[
"MIT"
] | 1,410
|
2020-06-27T03:36:19.000Z
|
2022-03-31T23:29:22.000Z
|
torch_geometric_temporal/signal/__init__.py
|
tforgaard/pytorch_geometric_temporal
|
d3a6a55119cb8cc38cb6d941ba8f74879d02c4b8
|
[
"MIT"
] | 124
|
2020-07-07T16:11:09.000Z
|
2022-03-31T07:21:53.000Z
|
torch_geometric_temporal/signal/__init__.py
|
tforgaard/pytorch_geometric_temporal
|
d3a6a55119cb8cc38cb6d941ba8f74879d02c4b8
|
[
"MIT"
] | 230
|
2020-07-27T11:13:52.000Z
|
2022-03-31T14:31:29.000Z
|
from .dynamic_graph_temporal_signal import *
from .dynamic_graph_temporal_signal_batch import *
from .static_graph_temporal_signal import *
from .static_graph_temporal_signal_batch import *
from .dynamic_graph_static_signal import *
from .dynamic_graph_static_signal_batch import *
from .train_test_split import *
| 28.818182
| 50
| 0.858044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a10e01e242ade75c580d5f9cde2741f0eeac1fca
| 3,605
|
py
|
Python
|
sdks/python/apache_beam/examples/streaming_wordcount_debugging_test.py
|
aaltay/incubator-beam
|
b150ace0884c88bc93da21f6dfe3b7684f886e94
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 9
|
2016-09-28T18:25:24.000Z
|
2019-05-09T12:28:29.000Z
|
sdks/python/apache_beam/examples/streaming_wordcount_debugging_test.py
|
aaltay/incubator-beam
|
b150ace0884c88bc93da21f6dfe3b7684f886e94
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 28
|
2020-03-04T22:01:48.000Z
|
2022-03-12T00:59:47.000Z
|
sdks/python/apache_beam/examples/streaming_wordcount_debugging_test.py
|
aaltay/incubator-beam
|
b150ace0884c88bc93da21f6dfe3b7684f886e94
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 6
|
2020-12-02T09:51:34.000Z
|
2022-03-15T23:09:26.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit test for the streaming wordcount example with debug."""
# pytype: skip-file
import unittest
import mock
import pytest
import apache_beam as beam
from apache_beam.examples import streaming_wordcount_debugging
from apache_beam.testing.test_stream import TestStream
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
# Protect against environments where the PubSub library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from google.cloud import pubsub
except ImportError:
pubsub = None
# pylint: enable=wrong-import-order, wrong-import-position
@pytest.mark.examples_postcommit
class StreamingWordcountDebugging(unittest.TestCase):
@unittest.skipIf(pubsub is None, 'GCP dependencies are not installed')
@mock.patch('apache_beam.io.ReadFromPubSub')
@mock.patch('apache_beam.io.WriteToPubSub')
def test_streaming_wordcount_debugging(self, *unused_mocks):
def FakeReadFromPubSub(topic=None, subscription=None, values=None):
expected_topic = topic
expected_subscription = subscription
def _inner(topic=None, subscription=None):
assert topic == expected_topic
assert subscription == expected_subscription
return TestStream().add_elements(values)
return _inner
class AssertTransform(beam.PTransform):
def __init__(self, matcher):
self.matcher = matcher
def expand(self, pcoll):
assert_that(pcoll, self.matcher)
def FakeWriteToPubSub(topic=None, values=None):
expected_topic = topic
def _inner(topic=None, subscription=None):
assert topic == expected_topic
return AssertTransform(equal_to(values))
return _inner
input_topic = 'projects/fake-beam-test-project/topic/intopic'
input_values = [
'150', '151', '152', '153', '154', '210', '211', '212', '213', '214'
]
output_topic = 'projects/fake-beam-test-project/topic/outtopic'
output_values = [
'150: 1',
'151: 1',
'152: 1',
'153: 1',
'154: 1',
'210: 1',
'211: 1',
'212: 1',
'213: 1',
'214: 1'
]
beam.io.ReadFromPubSub = (
FakeReadFromPubSub(
topic=input_topic,
values=list(x.encode('utf-8') for x in input_values)))
beam.io.WriteToPubSub = (
FakeWriteToPubSub(
topic=output_topic,
values=list(x.encode('utf-8') for x in output_values)))
streaming_wordcount_debugging.run([
'--input_topic',
'projects/fake-beam-test-project/topic/intopic',
'--output_topic',
'projects/fake-beam-test-project/topic/outtopic'
],
save_main_session=False)
if __name__ == '__main__':
unittest.main()
| 32.477477
| 76
| 0.691262
| 2,098
| 0.581969
| 0
| 0
| 2,131
| 0.591123
| 0
| 0
| 1,512
| 0.419417
|
a10e3d1311566cfbb4eeacef8a5558e6389ab6c2
| 147
|
py
|
Python
|
rest_framework_bulk/__init__.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | 1
|
2019-08-20T02:08:33.000Z
|
2019-08-20T02:08:33.000Z
|
rest_framework_bulk/__init__.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | null | null | null |
rest_framework_bulk/__init__.py
|
xordoquy/django-rest-framework-bulk
|
484df717a790591a7bc58d5fed34f958ae82929a
|
[
"MIT"
] | null | null | null |
__version__ = '0.1.3'
__author__ = 'Miroslav Shubernetskiy'
try:
from .generics import *
from .mixins import *
except Exception:
pass
| 16.333333
| 37
| 0.687075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 31
| 0.210884
|
a10e6a87e856699221521cf8bdbfca12b9ee5a97
| 1,773
|
py
|
Python
|
random_forest_classifier.py
|
duongntbk/FashionMNIST
|
982f31ac7d857b5deadfde37f979bc6a047fa007
|
[
"MIT"
] | null | null | null |
random_forest_classifier.py
|
duongntbk/FashionMNIST
|
982f31ac7d857b5deadfde37f979bc6a047fa007
|
[
"MIT"
] | 10
|
2020-01-28T22:19:43.000Z
|
2022-02-10T00:30:45.000Z
|
random_forest_classifier.py
|
duongntbk/FashionMNIST
|
982f31ac7d857b5deadfde37f979bc6a047fa007
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pickle
from sklearn.ensemble import RandomForestClassifier
from base_shallow_classifier import BaseShallowClassifier
class RFClassifier(BaseShallowClassifier):
'''
Image classification using random forest classifier (RFC).
Can reach 87.82% accuracy on test set of FashionMNIST datasets
using the following parameters:
- n_estimators=160
- min_samples_split=2
Note: actual accuracy may vary based on intial seed.
'''
def __init__(self, load_data=True):
'''
Simply calls parent's constructor,
which in turn calls load_data method (if needed).
'''
super().__init__(load_data)
def get_algorithm(self):
'''
Returns the algorithm in use (which is RFC),
this method is used in cross_validation method.
'''
return RandomForestClassifier()
def train_model(self, save_path, max_obs=None,
n_estimators=10, min_samples_split=2):
'''
Trains the model on training set of FashionMNIST datasets,
using RFC algorithm. n_estimators and min_samples_split
can be set from parameters.
'''
if self.train_data is None or self.train_labels is None:
raise ValueError('Fashion MNIST datasets is not loaded')
last_train_index = max_obs if max_obs else self.train_data.shape[0]
train_data = self.train_data[:last_train_index]
train_labels = self.train_labels[:last_train_index]
self.model = RandomForestClassifier(n_estimators=n_estimators,
min_samples_split=min_samples_split)
self.model.fit(train_data, train_labels)
with open(save_path, 'wb') as f:
f.write(pickle.dumps(self.model))
| 31.105263
| 75
| 0.674563
| 1,619
| 0.913142
| 0
| 0
| 0
| 0
| 0
| 0
| 773
| 0.435984
|
a10f0a0a33562a06ed9b546b2f53186a7237246b
| 2,387
|
py
|
Python
|
setup.py
|
mehta-lab/recOrder
|
67f2edb9ab13114dfe41d57e465ae24f961b0004
|
[
"Unlicense"
] | 2
|
2022-01-19T21:13:32.000Z
|
2022-02-24T19:40:24.000Z
|
setup.py
|
mehta-lab/recOrder
|
67f2edb9ab13114dfe41d57e465ae24f961b0004
|
[
"Unlicense"
] | 55
|
2021-06-24T18:53:18.000Z
|
2022-03-30T21:05:14.000Z
|
setup.py
|
mehta-lab/recOrder
|
67f2edb9ab13114dfe41d57e465ae24f961b0004
|
[
"Unlicense"
] | null | null | null |
import os.path as osp
from setuptools import setup, find_packages
# todo: modify as we decide on versions, names, descriptions. readme
MIN_PY_VER = '3.7'
DISTNAME = 'recOrder'
DESCRIPTION = 'computational microscopy toolkit for label-free imaging'
with open("README.md", "r") as fh:
LONG_DESCRIPTION = fh.read()
LONG_DESCRIPTION_content_type = "text/markdown"
LONG_DESCRIPTION = __doc__
LICENSE = 'Chan Zuckerberg Biohub Software License'
INSTALL_REQUIRES = ['numpy', 'scipy', 'matplotlib', 'pycromanager']
REQUIRES = []
# todo: modify for python dependency
CLASSIFIERS = [
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Utilities',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS'
]
# populate packages
PACKAGES = [package for package in find_packages()]
# parse requirements
with open(osp.join('requirements', 'default.txt')) as f:
requirements = [line.strip() for line in f
if line and not line.startswith('#')]
# populate requirements
for l in requirements:
sep = l.split(' #')
INSTALL_REQUIRES.append(sep[0].strip())
if len(sep) == 2:
REQUIRES.append(sep[1].strip())
if __name__ == '__main__':
setup(
name=DISTNAME,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type=LONG_DESCRIPTION_content_type,
license=LICENSE,
version="0.0.1",
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
python_requires=f'>={MIN_PY_VER}',
dependency_links=['https://github.com/mehta-lab/waveorder.git#egg=waveorder'],
packages=PACKAGES,
include_package_data=True,
entry_points={
'console_scripts': [
'recOrder.reconstruct = recOrder.cli_module:main',
'recOrder.convert = scripts.convert_tiff_to_zarr:main'
]
}
)
| 33.619718
| 86
| 0.6615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,123
| 0.470465
|
a1102b00cc945569015366b5d33e47090c8e92f5
| 6,457
|
py
|
Python
|
oscrypto/_openssl/_libssl_ctypes.py
|
frennkie/oscrypto
|
24aff3148379b931d9c72ab3b069e537dc2195f8
|
[
"MIT"
] | 1
|
2020-05-17T06:44:51.000Z
|
2020-05-17T06:44:51.000Z
|
oscrypto/_openssl/_libssl_ctypes.py
|
frennkie/oscrypto
|
24aff3148379b931d9c72ab3b069e537dc2195f8
|
[
"MIT"
] | null | null | null |
oscrypto/_openssl/_libssl_ctypes.py
|
frennkie/oscrypto
|
24aff3148379b931d9c72ab3b069e537dc2195f8
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import platform
import sys
from ctypes.util import find_library
from ctypes import CDLL, CFUNCTYPE, POINTER, c_void_p, c_char_p, c_int, c_size_t, c_long
from .. import _backend_config
from .._ffi import FFIEngineError
from ..errors import LibraryNotFoundError
from ._libcrypto import libcrypto_version_info
__all__ = [
'libssl',
]
libssl_path = _backend_config().get('libssl_path')
if libssl_path is None:
libssl_path = find_library('ssl')
# if we are on catalina, we want to strongly version libssl since unversioned libcrypto has a non-stable ABI
if sys.platform == 'darwin' and platform.mac_ver()[0].startswith('10.15') and libssl_path.endswith('libssl.dylib'):
# libssl.44.dylib is in libressl-2.6 which as a OpenSSL 1.0.1-compatible API
libssl_path = libssl_path.replace('libssl.dylib', 'libssl.44.dylib')
if not libssl_path:
raise LibraryNotFoundError('The library libssl could not be found')
libssl = CDLL(libssl_path, use_errno=True)
P_SSL_METHOD = POINTER(c_void_p)
P_SSL_CTX = POINTER(c_void_p)
P_SSL_SESSION = POINTER(c_void_p)
P_SSL = POINTER(c_void_p)
P_BIO_METHOD = POINTER(c_void_p)
P_BIO = POINTER(c_void_p)
X509 = c_void_p
P_X509 = POINTER(X509)
P_X509_STORE = POINTER(c_void_p)
P_X509_STORE_CTX = POINTER(c_void_p)
_STACK = c_void_p
P_STACK = POINTER(_STACK)
try:
if libcrypto_version_info < (1, 1):
libssl.sk_num.argtypes = [P_STACK]
libssl.sk_num.restype = c_int
libssl.sk_value.argtypes = [P_STACK, c_int]
libssl.sk_value.restype = P_X509
libssl.SSL_library_init.argtypes = []
libssl.SSL_library_init.restype = c_int
libssl.OPENSSL_add_all_algorithms_noconf.argtypes = []
libssl.OPENSSL_add_all_algorithms_noconf.restype = None
libssl.SSLv23_method.argtypes = []
libssl.SSLv23_method.restype = P_SSL_METHOD
else:
libssl.OPENSSL_sk_num.argtypes = [P_STACK]
libssl.OPENSSL_sk_num.restype = c_int
libssl.OPENSSL_sk_value.argtypes = [P_STACK, c_int]
libssl.OPENSSL_sk_value.restype = P_X509
libssl.TLS_method.argtypes = []
libssl.TLS_method.restype = P_SSL_METHOD
libssl.BIO_s_mem.argtypes = []
libssl.BIO_s_mem.restype = P_BIO_METHOD
libssl.BIO_new.argtypes = [
P_BIO_METHOD
]
libssl.BIO_new.restype = P_BIO
libssl.BIO_free.argtypes = [
P_BIO
]
libssl.BIO_free.restype = c_int
libssl.BIO_read.argtypes = [
P_BIO,
c_char_p,
c_int
]
libssl.BIO_read.restype = c_int
libssl.BIO_write.argtypes = [
P_BIO,
c_char_p,
c_int
]
libssl.BIO_write.restype = c_int
libssl.BIO_ctrl_pending.argtypes = [
P_BIO
]
libssl.BIO_ctrl_pending.restype = c_size_t
libssl.SSL_CTX_new.argtypes = [
P_SSL_METHOD
]
libssl.SSL_CTX_new.restype = P_SSL_CTX
libssl.SSL_CTX_set_timeout.argtypes = [
P_SSL_CTX,
c_long
]
libssl.SSL_CTX_set_timeout.restype = c_long
verify_callback = CFUNCTYPE(c_int, c_int, P_X509_STORE_CTX)
setattr(libssl, 'verify_callback', verify_callback)
libssl.SSL_CTX_set_verify.argtypes = [
P_SSL_CTX,
c_int,
POINTER(verify_callback)
]
libssl.SSL_CTX_set_verify.restype = None
libssl.SSL_CTX_set_default_verify_paths.argtypes = [
P_SSL_CTX
]
libssl.SSL_CTX_set_default_verify_paths.restype = c_int
libssl.SSL_CTX_load_verify_locations.argtypes = [
P_SSL_CTX,
c_char_p,
c_char_p
]
libssl.SSL_CTX_load_verify_locations.restype = c_int
libssl.SSL_get_verify_result.argtypes = [
P_SSL
]
libssl.SSL_get_verify_result.restype = c_long
libssl.SSL_CTX_get_cert_store.argtypes = [
P_SSL_CTX
]
libssl.SSL_CTX_get_cert_store.restype = P_X509_STORE
libssl.X509_STORE_add_cert.argtypes = [
P_X509_STORE,
P_X509
]
libssl.X509_STORE_add_cert.restype = c_int
libssl.SSL_CTX_set_cipher_list.argtypes = [
P_SSL_CTX,
c_char_p
]
libssl.SSL_CTX_set_cipher_list.restype = c_int
libssl.SSL_CTX_ctrl.arg_types = [
P_SSL_CTX,
c_int,
c_long,
c_void_p
]
libssl.SSL_CTX_ctrl.restype = c_long
libssl.SSL_CTX_free.argtypes = [
P_SSL_CTX
]
libssl.SSL_CTX_free.restype = None
libssl.SSL_new.argtypes = [
P_SSL_CTX
]
libssl.SSL_new.restype = P_SSL
libssl.SSL_free.argtypes = [
P_SSL
]
libssl.SSL_free.restype = None
libssl.SSL_set_bio.argtypes = [
P_SSL,
P_BIO,
P_BIO
]
libssl.SSL_set_bio.restype = None
libssl.SSL_ctrl.arg_types = [
P_SSL,
c_int,
c_long,
c_void_p
]
libssl.SSL_ctrl.restype = c_long
libssl.SSL_get_peer_cert_chain.argtypes = [
P_SSL
]
libssl.SSL_get_peer_cert_chain.restype = P_STACK
libssl.SSL_get1_session.argtypes = [
P_SSL
]
libssl.SSL_get1_session.restype = P_SSL_SESSION
libssl.SSL_set_session.argtypes = [
P_SSL,
P_SSL_SESSION
]
libssl.SSL_set_session.restype = c_int
libssl.SSL_SESSION_free.argtypes = [
P_SSL_SESSION
]
libssl.SSL_SESSION_free.restype = None
libssl.SSL_set_connect_state.argtypes = [
P_SSL
]
libssl.SSL_set_connect_state.restype = None
libssl.SSL_do_handshake.argtypes = [
P_SSL
]
libssl.SSL_do_handshake.restype = c_int
libssl.SSL_get_error.argtypes = [
P_SSL,
c_int
]
libssl.SSL_get_error.restype = c_int
libssl.SSL_get_version.argtypes = [
P_SSL
]
libssl.SSL_get_version.restype = c_char_p
libssl.SSL_read.argtypes = [
P_SSL,
c_char_p,
c_int
]
libssl.SSL_read.restype = c_int
libssl.SSL_write.argtypes = [
P_SSL,
c_char_p,
c_int
]
libssl.SSL_write.restype = c_int
libssl.SSL_pending.argtypes = [
P_SSL
]
libssl.SSL_pending.restype = c_int
libssl.SSL_shutdown.argtypes = [
P_SSL
]
libssl.SSL_shutdown.restype = c_int
except (AttributeError):
raise FFIEngineError('Error initializing ctypes')
setattr(libssl, '_STACK', _STACK)
setattr(libssl, 'X509', X509)
| 24.093284
| 119
| 0.671055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 382
| 0.059161
|
a1102cc6df4e46f14ab22665f1a454bf74d422a0
| 382
|
py
|
Python
|
etl/etl.py
|
amalshehu/exercism-python
|
eb469246504fb22463e036a989dc9b44e0a83410
|
[
"MIT"
] | 2
|
2016-08-25T10:58:44.000Z
|
2017-11-13T12:58:04.000Z
|
etl/etl.py
|
amalshehu/exercism-python
|
eb469246504fb22463e036a989dc9b44e0a83410
|
[
"MIT"
] | 1
|
2016-08-25T10:59:23.000Z
|
2016-08-25T12:20:19.000Z
|
etl/etl.py
|
amalshehu/exercism-python
|
eb469246504fb22463e036a989dc9b44e0a83410
|
[
"MIT"
] | null | null | null |
# File: etl.py
# Purpose: To do the `Transform` step of an Extract-Transform-Load.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 22 September 2016, 03:40 PM
def transform(words):
new_words = dict()
for point, letters in words.items():
for letter in letters:
new_words[letter.lower()] = point
return new_words
| 27.285714
| 71
| 0.63089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 191
| 0.5
|
a11034c8715f1c4364caa1c40989aaba6b81cecc
| 2,983
|
py
|
Python
|
codango/account/api.py
|
NdagiStanley/silver-happiness
|
67fb6dd4047c603a84276f88a021d4489cf3b41e
|
[
"MIT"
] | 2
|
2019-10-17T01:03:12.000Z
|
2021-11-24T07:43:14.000Z
|
codango/account/api.py
|
NdagiStanley/silver-happiness
|
67fb6dd4047c603a84276f88a021d4489cf3b41e
|
[
"MIT"
] | 49
|
2019-09-05T02:48:04.000Z
|
2021-06-28T02:29:42.000Z
|
codango/account/api.py
|
NdagiStanley/silver-happiness
|
67fb6dd4047c603a84276f88a021d4489cf3b41e
|
[
"MIT"
] | 1
|
2021-11-25T10:19:27.000Z
|
2021-11-25T10:19:27.000Z
|
import psycopg2
from rest_framework import generics, permissions
# from serializers import UserSerializer, UserFollowSerializer, UserSettingsSerializer
from serializers import UserSerializer, UserFollowSerializer, UserSettingsSerializer
from serializers import AllUsersSerializer, UserRegisterSerializer
from userprofile import serializers, models
from django.contrib.auth.models import User
from rest_framework import permissions
class IsOwner(permissions.BasePermission):
"""
Custom of class IsOwnerOrReadOnly(permissions.BasePermission)
That an APIexception is raised instead
We do not want a ReadOnly
"""
def has_object_permission(self, request, view, obj):
# First check if authentication is True
permission_classes = (permissions.IsAuthenticated, )
# Instance is the user
return obj.id == request.user.id
class UserListAPIView(generics.ListAPIView):
"""For /api/v1/users/ url path"""
queryset = User.objects.all()
serializer_class = AllUsersSerializer
permission_classes = (permissions.IsAdminUser,)
class UserDetailAPIView(generics.RetrieveUpdateAPIView):
"""For /api/v1/users/<id> url path"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (IsOwner, )
class UserRegisterAPIView(generics.CreateAPIView):
"""For /api/v1/auth/register url path"""
permission_classes = (permissions.AllowAny,)
queryset = User.objects.all()
serializer_class = UserRegisterSerializer
class UserLogoutAPIView(generics.UpdateAPIView):
"""For /api/v1/auth/logout url path"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (IsOwner, )
class UserFollowAPIView(generics.CreateAPIView):
"""
For api/v1/users/<>/follow/ url path
To enable user to add or remove those that they follow
"""
serializer_class = UserFollowSerializer
def get_queryset(self):
to_be_followed = User.objects.filter(id=self.kwargs['pk']).first()
return to_be_followed
def perform_create(self, serializer):
self.user = User.objects.filter(id=self.request.user.id).first()
try:
models.Follow.objects.create(
follower=self.user, followed=self.get_queryset())
return {"message":
"You have followed user'{}'".format(
self.get_queryset())}, 201
except:
raise serializers.serializers.ValidationError(
'You have already followed this person')
class UserSettingsAPIView(generics.RetrieveUpdateAPIView):
"""
For api/v1/users/<>/settings/ url path
To enable user to update those that their:
- update's frequency, github account and image
"""
"""For api/v1/users/<>/settings/ url path"""
queryset = models.UserSettings.objects.all()
serializer_class = UserSettingsSerializer
permission_classes = (IsOwner,)
| 31.072917
| 86
| 0.706336
| 2,529
| 0.847804
| 0
| 0
| 0
| 0
| 0
| 0
| 832
| 0.278914
|
a1105853736e4203adc6fff03b4073278e494bcb
| 3,597
|
py
|
Python
|
backend/app/apis/v1/resources.py
|
williamsyb/StockTick
|
1dd10101d44fa3a0584f849b022fc8254c2e66c7
|
[
"MIT"
] | 2
|
2020-11-23T13:38:49.000Z
|
2021-08-17T15:37:04.000Z
|
backend/app/apis/v1/resources.py
|
williamsyb/StockTick
|
1dd10101d44fa3a0584f849b022fc8254c2e66c7
|
[
"MIT"
] | null | null | null |
backend/app/apis/v1/resources.py
|
williamsyb/StockTick
|
1dd10101d44fa3a0584f849b022fc8254c2e66c7
|
[
"MIT"
] | null | null | null |
# -*- coding:UTF-8 -*-
from flask import Blueprint, current_app, request
import pandas as pd
from app.protocol import serialize
from app.utils import Utils
from app.database.crud import db_mgr
from app.cache import redis_mgr
api_v1 = Blueprint('api_v1', __name__)
@api_v1.route('/show_raw_data', methods=['GET'])
def show_raw_data():
raw_key = current_app.config['CACHE_RAW_KEY']
data = redis_mgr.load_df_from_redis(raw_key)
if data is None:
depth_raw = db_mgr.query('depth_raw')
order_raw = db_mgr.query('order_raw')
trade_raw = db_mgr.query('trade_raw')
data = db_mgr.combine_raw(depth_raw, order_raw, trade_raw, resample_freq='1s')
data.time = data.time.astype(str)
data.fillna(method='ffill', inplace=True)
data.dropna(axis=0, how='all', inplace=True)
redis_mgr.store_df_to_redis(raw_key, data)
data = serialize(data)
return Utils.build_resp(0, 'success', data)
@api_v1.route('/show_bar_data/<freq>', methods=['GET'])
def show_bar_data(freq):
"""
data的key為 trade_volume: list
order_volume: list
ohlc: 二維list
time: list
"""
freq_key = 'CACHE_' + freq.upper()
if freq_key not in current_app.config:
return Utils.build_resp(-1, f'不支持{freq}', {})
freq_key = current_app.config[freq_key]
result: dict = redis_mgr.get_from_redis(freq_key)
if result is None:
ohlc_freq = db_mgr.query('ohlc_' + freq_key)
order_freq = db_mgr.query('order_' + freq_key)
trade_freq = db_mgr.query('trade_' + freq_key)
data: pd.DataFrame = db_mgr.combine_raw(ohlc_freq, order_freq, trade_freq)
# print(data.head())
result: dict = Utils.treat_bar(data[:])
redis_mgr.set_to_redis(freq_key, result)
return Utils.build_resp(0, 'success', result)
@api_v1.route('/show_statistic', methods=['GET'])
def show_statistic():
print('args:', request.args)
print('json:', request.json)
start_time = str(request.args.get('start_time'))
end_time = str(request.args.get('end_time'))
print('start_time:', start_time)
print('end_time:', end_time)
# 查询缓存
statistic_key = f'start_time:{start_time}_end_time:{end_time}'
statistic_dict = redis_mgr.get_from_redis(statistic_key)
if statistic_dict is not None:
return Utils.build_resp(0, 'success', statistic_dict)
# 若没有缓存则重新构造数据
raw_key = current_app.config['CACHE_RAW_KEY']
data = redis_mgr.load_df_from_redis(raw_key)
if data is None:
depth_raw = db_mgr.query('depth_raw')
order_raw = db_mgr.query('order_raw')
trade_raw = db_mgr.query('trade_raw')
data = db_mgr.combine_raw(depth_raw, order_raw, trade_raw, resample_freq='1s')
redis_mgr.store_df_to_redis(raw_key, data)
# res_df = data.loc['2020-11-20 09:30:03':'2020-11-20 09:30:10'].agg(
# {'price': ['max', 'min'], 'OrderVolume': 'sum', 'TradeVolume': 'sum'})
data.set_index('time', inplace=True)
res_df = data.loc[start_time:end_time].agg(
{'price': ['max', 'min'], 'OrderVolume': 'sum', 'TradeVolume': 'sum'})
max_price = res_df.loc['max', 'price']
min_price = res_df.loc['min', 'price']
total_order = res_df.loc['sum', 'OrderVolume']
total_trade = res_df.loc['sum', 'TradeVolume']
result = dict(
max_price=max_price,
min_price=min_price,
total_order=total_order,
total_trade=total_trade
)
redis_mgr.set_to_redis(statistic_key, result)
return Utils.build_resp(0, 'success', result)
| 38.677419
| 86
| 0.659438
| 0
| 0
| 0
| 0
| 3,369
| 0.924787
| 0
| 0
| 906
| 0.248696
|
a111862555b1576ad0436f2aab598c4b8d1d29a9
| 708
|
py
|
Python
|
report/api/hooks.py
|
Aaron-DH/openstack_sample_project
|
711a56311806d52b632e4394743bd4bdbacb103a
|
[
"Apache-2.0"
] | null | null | null |
report/api/hooks.py
|
Aaron-DH/openstack_sample_project
|
711a56311806d52b632e4394743bd4bdbacb103a
|
[
"Apache-2.0"
] | null | null | null |
report/api/hooks.py
|
Aaron-DH/openstack_sample_project
|
711a56311806d52b632e4394743bd4bdbacb103a
|
[
"Apache-2.0"
] | null | null | null |
from oslo_log import log
from oslo_config import cfg
from report import storage
from pecan import hooks
LOG = log.getLogger(__name__)
class RPCHook(hooks.PecanHook):
def __init__(self, rcp_client):
self._rpc_client = rcp_client
def before(self, state):
state.request.rpc_client = self._rpc_client
class DBHook(hooks.PecanHook):
def __init__(self):
self.storage_connection = storage.get_connection_from_config(cfg.CONF)
if not self.storage_connection:
raise Exception("Api failed to start. "
"Failed to connect to database.")
def before(self, state):
state.request.storage_conn = self.storage_connection
| 25.285714
| 78
| 0.69209
| 567
| 0.800847
| 0
| 0
| 0
| 0
| 0
| 0
| 55
| 0.077684
|
a1119377e73c71b58b46883ef014d640d56156e5
| 117
|
py
|
Python
|
garageofcode/semantic/main.py
|
tpi12jwe/garageofcode
|
3cfaf01f6d77130bb354887e6ed9921c791db849
|
[
"MIT"
] | 2
|
2020-02-11T10:32:06.000Z
|
2020-02-11T17:00:47.000Z
|
garageofcode/semantic/main.py
|
tpi12jwe/garageofcode
|
3cfaf01f6d77130bb354887e6ed9921c791db849
|
[
"MIT"
] | null | null | null |
garageofcode/semantic/main.py
|
tpi12jwe/garageofcode
|
3cfaf01f6d77130bb354887e6ed9921c791db849
|
[
"MIT"
] | null | null | null |
def have(subj, obj):
subj.add(obj)
def change(subj, obj, state):
pass
if __name__ == '__main__':
main()
| 14.625
| 29
| 0.606838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 0.08547
|
a111d2ca236c2a067c9980e65999cf841b19dd21
| 548
|
py
|
Python
|
scholariumat/products/migrations/0012_auto_20181125_1221.py
|
valuehack/scholariumat
|
47c13f3429b95b9ad5ca59b45cf971895260bb5c
|
[
"MIT"
] | null | null | null |
scholariumat/products/migrations/0012_auto_20181125_1221.py
|
valuehack/scholariumat
|
47c13f3429b95b9ad5ca59b45cf971895260bb5c
|
[
"MIT"
] | 232
|
2018-06-30T11:40:52.000Z
|
2020-04-29T23:55:41.000Z
|
scholariumat/products/migrations/0012_auto_20181125_1221.py
|
valuehack/scholariumat
|
47c13f3429b95b9ad5ca59b45cf971895260bb5c
|
[
"MIT"
] | 3
|
2018-05-31T12:57:03.000Z
|
2020-02-27T16:25:44.000Z
|
# Generated by Django 2.0.9 on 2018-11-25 11:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0011_auto_20181123_1446'),
]
operations = [
migrations.RenameField(
model_name='item',
old_name='amount',
new_name='_amount',
),
migrations.AddField(
model_name='itemtype',
name='default_amount',
field=models.SmallIntegerField(blank=True, null=True),
),
]
| 22.833333
| 66
| 0.578467
| 455
| 0.830292
| 0
| 0
| 0
| 0
| 0
| 0
| 131
| 0.239051
|
a113c8e85fbfe0a4e5ea8110782dae46220ba93c
| 262
|
py
|
Python
|
setup.py
|
geickelb/hsip441_neiss_python
|
0ad88a664b369ea058b28d79ed98d02ff8418aad
|
[
"MIT"
] | null | null | null |
setup.py
|
geickelb/hsip441_neiss_python
|
0ad88a664b369ea058b28d79ed98d02ff8418aad
|
[
"MIT"
] | null | null | null |
setup.py
|
geickelb/hsip441_neiss_python
|
0ad88a664b369ea058b28d79ed98d02ff8418aad
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.0.1',
description='compiling code for HSIP441 using python to explore the Neiss database',
author='Garrett Eickelberg',
license='MIT',
)
| 23.818182
| 88
| 0.70229
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 108
| 0.412214
|
a114b71d6021e2552fc945ad4a1ac94774faab77
| 189
|
py
|
Python
|
test.py
|
j178/spotlight
|
1e65ff35826fee9a9d522b502cd781e86fbed01f
|
[
"WTFPL"
] | 5
|
2016-12-06T04:03:16.000Z
|
2020-09-24T14:08:49.000Z
|
test.py
|
j178/spotlight
|
1e65ff35826fee9a9d522b502cd781e86fbed01f
|
[
"WTFPL"
] | 1
|
2020-05-04T02:19:09.000Z
|
2020-06-10T08:44:11.000Z
|
test.py
|
j178/spotlight
|
1e65ff35826fee9a9d522b502cd781e86fbed01f
|
[
"WTFPL"
] | null | null | null |
from weibo import WeiboClient
from weibo.watchyou import fetch_replies
for r in fetch_replies(): # fetch_replies所依赖的weibo全局变量是在watchyou模块中存在的, 函数无法访问到这个模块中的全局变量
print(r['text'])
| 31.5
| 91
| 0.793651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 136
| 0.533333
|
a114be84d6fa960cedd6c469ba949d63204c8275
| 8,181
|
py
|
Python
|
tests/unit/test_db_config_options.py
|
feddovanede/cf-mendix-buildpack-heapdump
|
584678bfab90a2839cfbac4126b08d6359885f91
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_db_config_options.py
|
feddovanede/cf-mendix-buildpack-heapdump
|
584678bfab90a2839cfbac4126b08d6359885f91
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_db_config_options.py
|
feddovanede/cf-mendix-buildpack-heapdump
|
584678bfab90a2839cfbac4126b08d6359885f91
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import json
import os
from unittest import TestCase, mock
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
from buildpack.infrastructure.database import (
UrlDatabaseConfiguration,
get_config,
)
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509 import NameAttribute
from cryptography.x509.base import Certificate
from cryptography.x509.oid import NameOID
class TestDatabaseConfigOptions(TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cert_map = {}
@mock.patch.dict(os.environ, {}, clear=True)
def test_no_setup(self):
with self.assertRaises(RuntimeError):
get_config()
@mock.patch.dict(
os.environ,
{
"MXRUNTIME_DatabaseType": "PostgreSQL",
"MXRUNTIME_DatabaseJdbcUrl": "jdbc:postgresql://username:password@rdsbroker-testfree-nonprod-1-eu-west-1.asdbjasdg.eu-west-1.rds.amazonaws.com:5432/testdatabase",
},
clear=True,
)
def test_mx_runtime_db_config(self):
# Test if MXRUNTIME variables are set up if no database configuration is returned
# based on DATABASE_URL or VCAP_SERVICES
config = get_config()
assert not config
DATABASE_URL_ENV = {
"DATABASE_URL": "postgres://user:secret@host:5432/database"
}
VALID_PARAMS_ENV = {
"DATABASE_CONNECTION_PARAMS": json.dumps(
{"tcpKeepAlive": "false", "connectionTimeout": 30}
)
}
@mock.patch.dict(
os.environ, {**DATABASE_URL_ENV, **VALID_PARAMS_ENV}, clear=True
)
def test_valid_jdbc_parameters(self):
config = get_config()
assert "tcpKeepAlive=false" in config["DatabaseJdbcUrl"]
INVALID_PARAMS_ENV = {
"DATABASE_CONNECTION_PARAMS": '{"tcpKeepAlive": "false"'
}
@mock.patch.dict(
os.environ, {**DATABASE_URL_ENV, **INVALID_PARAMS_ENV}, clear=True
)
def test_invalid_jdbc_parameters(self):
config = get_config()
assert "tcpKeepAlive=false" not in config["DatabaseJdbcUrl"]
@mock.patch.dict(os.environ, DATABASE_URL_ENV, clear=True)
def test_database_url(self):
config = get_config()
assert config
assert config["DatabaseType"] == "PostgreSQL"
def test_inline_certs(self):
self.cert_map = CertGen().cert_map
c = UrlDatabaseConfiguration
native_params = {
c.SSLCERT: self.get_cert("postgresql.crt"),
c.SSLROOTCERT: self.get_cert("root.crt"),
c.SSLKEY: self.get_cert("postgresql.rsa.key"),
}
parts = urlparse("postgres://user:secret@host/database")
parts = parts._replace(query=urlencode(native_params))
native_url = urlunparse(parts)
with mock.patch.dict(
os.environ, {"DATABASE_URL": native_url}, clear=True
):
config = get_config()
assert config
assert config["DatabaseType"] == "PostgreSQL"
native_params[c.SSLKEY] = self.get_cert("postgresql.pk8")
jdbc_params = parse_qs(urlparse(config["DatabaseJdbcUrl"]).query)
self.cmp_cert(native_params, jdbc_params, c.SSLCERT)
self.cmp_cert(native_params, jdbc_params, c.SSLROOTCERT)
self.cmp_cert(native_params, jdbc_params, c.SSLKEY)
def get_cert(self, cert_resource):
return self.cert_map[cert_resource]
@classmethod
def cmp_cert(cls, native_params, jdbc_params, param):
expected_string = native_params[param]
actual_file = jdbc_params[param][0]
with open(actual_file, "rb") as io_actual:
actual_string = io_actual.read().decode("iso-8859-1")
assert expected_string == actual_string, param + " differ"
os.remove(actual_file)
@mock.patch.dict(
os.environ,
{
"VCAP_SERVICES": """
{
"rds-testfree": [
{
"binding_name": null,
"credentials": {
"db_name": "dbuajsdhkasdhaks",
"host": "rdsbroker-testfree-nonprod-1-eu-west-1.asdbjasdg.eu-west-1.rds.amazonaws.com",
"password": "na8nanlayaona0--anbs",
"uri": "postgres://ua98s7ananla:na8nanlayaona0--anbs@rdsbroker-testfree-nonprod-1-eu-west-1.asdbjasdg.eu-west-1.rds.amazonaws.com:5432/dbuajsdhkasdhaks",
"username": "ua98s7ananla"
},
"instance_name": "ops-432a659e.test.foo.io-database",
"label": "rds-testfree",
"name": "ops-432a659e.test.foo.io-database",
"plan": "shared-psql-testfree",
"provider": null,
"syslog_drain_url": null,
"tags": [
"database",
"RDS",
"postgresql"
],
"volume_mounts": []
}
]
}""" # noqa
},
clear=True,
)
def test_vcap(self):
config = get_config()
assert config
assert config["DatabaseType"] == "PostgreSQL"
# Class to generate a test certificate chain
# https://cryptography.io/en/latest/x509/tutorial/
class CertGen:
def __init__(self):
self.init_root_cert()
self.init_postgresql_cert()
self.dump_to_storage()
def dump_to_storage(self):
self.cert_map = {}
self._dump_cert(self.root_cert, "root.crt")
self._dump_cert(self.postgresql_cert, "postgresql.crt")
self._dump_key(
self.postgresql_key,
"postgresql.rsa.key",
serialization.Encoding.PEM,
serialization.PrivateFormat.TraditionalOpenSSL,
)
self._dump_key(
self.postgresql_key,
"postgresql.pk8",
serialization.Encoding.DER,
serialization.PrivateFormat.PKCS8,
)
def _dump_key(self, key, keyout_name, enc, fmt):
self.cert_map[keyout_name] = key.private_bytes(
encoding=enc,
format=fmt,
encryption_algorithm=serialization.NoEncryption(),
).decode("iso-8859-1")
def _dump_cert(self, cert: Certificate, out_name):
self.cert_map[out_name] = cert.public_bytes(
serialization.Encoding.PEM
).decode("iso-8859-1")
def init_root_cert(self):
self.root_key = self._newkey()
ca_subj = x509.Name(
[
NameAttribute(NameOID.COUNTRY_NAME, u"US"),
NameAttribute(NameOID.ORGANIZATION_NAME, u"Authority, Inc"),
NameAttribute(NameOID.COMMON_NAME, u"Authority CA"),
]
)
self.root_cert = self._sign(
ca_subj, self.root_key, ca_subj, self.root_key.public_key(), 3651
)
def init_postgresql_cert(self) -> Certificate:
self.postgresql_key = self._newkey()
subj = x509.Name(
[
NameAttribute(NameOID.COUNTRY_NAME, u"US"),
NameAttribute(NameOID.ORGANIZATION_NAME, u"Authority, Inc"),
NameAttribute(NameOID.COMMON_NAME, u"SQL Client"),
]
)
self.postgresql_cert = self._sign(
self.root_cert.subject,
self.root_key,
subj,
self.postgresql_key.public_key(),
3650,
)
@classmethod
def _newkey(cls):
# Generate our key
return rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
)
@classmethod
def _sign(
cls, issuer: x509.Name, ca_key, subject: x509.Name, req_pub_key, days
) -> Certificate:
# pylint: disable=too-many-arguments
return (
x509.CertificateBuilder()
.subject_name(subject)
.issuer_name(issuer)
.public_key(req_pub_key)
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.datetime.utcnow())
.not_valid_after(
# Our certificate will be valid for 10 days
datetime.datetime.utcnow()
+ datetime.timedelta(days=days)
# Sign our certificate with our private key
)
.sign(ca_key, hashes.SHA256())
)
| 33.391837
| 174
| 0.620218
| 7,555
| 0.923481
| 0
| 0
| 3,697
| 0.451901
| 0
| 0
| 1,990
| 0.243247
|
a11510f716edaa915f408fd4bc5559303960aa62
| 1,770
|
py
|
Python
|
Computer & Information Science Core courses/2168/A*/graph.py
|
Vaporjawn/Temple-University-Computer-Science-Resources
|
8d54db3a85a1baa8ba344efc90593b440eb6d585
|
[
"MIT"
] | 1
|
2020-07-28T16:18:38.000Z
|
2020-07-28T16:18:38.000Z
|
Computer & Information Science Core courses/2168/A*/graph.py
|
Vaporjawn/Temple-University-Computer-Science-Resources
|
8d54db3a85a1baa8ba344efc90593b440eb6d585
|
[
"MIT"
] | 4
|
2020-07-15T06:40:55.000Z
|
2020-08-13T16:01:30.000Z
|
Computer & Information Science Core courses/2168/A*/graph.py
|
Vaporjawn/Temple-University-Computer-Science-Resources
|
8d54db3a85a1baa8ba344efc90593b440eb6d585
|
[
"MIT"
] | null | null | null |
"""Implement the graph to traverse."""
from collections import Counter
class Node:
"""Node class."""
def __init__(self, value, x, y):
"""Initialize node."""
self.x = x
self.y = y
self.value = value
self.neighbors = []
def add_neighbor(self, n, weight):
"""Add a neighbor to this node."""
self.neighbors.append((n, weight))
class Graph:
"""Graph of nodes."""
def __init__(self):
"""Initialize."""
self.nodes = []
def add_node(self, value, x, y):
"""Add a new node to the graph."""
new_node = Node(value, x, y)
self.nodes.append(new_node)
return new_node
def add_edge(self, node1, node2, weight=1):
"""Connect two nodes with optional edge weight specification."""
node1.add_neighbor(node2, weight)
node2.add_neighbor(node1, weight)
def find_path(self, start, end):
"""Use A* to find a path from start to end in the graph."""
visited_nodes = {}
accessible_nodes = {}
current_distance = 0
current = start
# Loop as long as the end node has not been found
# this is not finished yet!!!
while(current.value != end.value):
# calculate cost for each neighbor of n
costs = []
for n in current.neighbors:
cost = self.g(n, current_distance) + self.h(n, end)
costs.append((n, g))
def g(self, n, current_distance):
"""Calculate the distance from the start node."""
return current_distance + n[1]
def h(self, n, end):
"""Estimate the distance to the end node using Manhattan distance."""
return abs(n[0].x - end.x) + abs(n[0].y - end.y)
| 28.095238
| 77
| 0.564972
| 1,692
| 0.955932
| 0
| 0
| 0
| 0
| 0
| 0
| 541
| 0.30565
|
a115499f10a5a3acf2f24d7e3dd1a76b57b5b137
| 245
|
py
|
Python
|
Projects/Python_Python2_json/main.py
|
LiuOcean/luban_examples
|
75d5fd7c1b15d79efc0ebbac21a74bf050aed1fb
|
[
"MIT"
] | 44
|
2021-05-06T06:16:55.000Z
|
2022-03-30T06:27:25.000Z
|
Projects/Python_Python2_json/main.py
|
HFX-93/luban_examples
|
5b90e392d404950d12ff803a186b26bdea5e0292
|
[
"MIT"
] | 1
|
2021-07-25T16:35:32.000Z
|
2021-08-23T04:59:49.000Z
|
Projects/Python_Python2_json/main.py
|
HFX-93/luban_examples
|
5b90e392d404950d12ff803a186b26bdea5e0292
|
[
"MIT"
] | 14
|
2021-06-09T10:38:59.000Z
|
2022-03-30T06:27:24.000Z
|
import json
import gen.Types
def loader(f):
return json.load(open('../GenerateDatas/json/' + f + ".json", 'r'), encoding="utf-8")
tables = gen.Types.Tables(loader)
print(tables)
r = tables.TbFullTypes.getDataList()[0].__dict__
print(r)
| 18.846154
| 89
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 41
| 0.167347
|
a115806c8d50f7e45e72b3d28a59a48fb80d6f6e
| 10,255
|
py
|
Python
|
rplugin/python3/defx/base/kind.py
|
kazukazuinaina/defx.nvim
|
376b2a91703b6bf19283e58bf1e7b5ce5baae5af
|
[
"MIT"
] | null | null | null |
rplugin/python3/defx/base/kind.py
|
kazukazuinaina/defx.nvim
|
376b2a91703b6bf19283e58bf1e7b5ce5baae5af
|
[
"MIT"
] | null | null | null |
rplugin/python3/defx/base/kind.py
|
kazukazuinaina/defx.nvim
|
376b2a91703b6bf19283e58bf1e7b5ce5baae5af
|
[
"MIT"
] | null | null | null |
# ============================================================================
# FILE: kind.py
# AUTHOR: Shougo Matsushita <Shougo.Matsu at gmail.com>
# License: MIT license
# ============================================================================
import json
import typing
from pathlib import Path
from defx.action import ActionAttr
from defx.action import ActionTable
from defx.action import do_action
from defx.context import Context
from defx.defx import Defx
from defx.session import Session
from defx.util import Nvim
from defx.view import View
_action_table: typing.Dict[str, ActionTable] = {}
ACTION_FUNC = typing.Callable[[View, Defx, Context], None]
def action(name: str, attr: ActionAttr = ActionAttr.NONE
) -> typing.Callable[[ACTION_FUNC], ACTION_FUNC]:
def wrapper(func: ACTION_FUNC) -> ACTION_FUNC:
_action_table[name] = ActionTable(func=func, attr=attr)
def inner_wrapper(view: View, defx: Defx, context: Context) -> None:
return func(view, defx, context)
return inner_wrapper
return wrapper
class Base:
def __init__(self, vim: Nvim) -> None:
self.vim = vim
self.name = 'base'
def get_actions(self) -> typing.Dict[str, ActionTable]:
return _action_table
@action(name='add_session', attr=ActionAttr.NO_TAGETS)
def _add_session(view: View, defx: Defx, context: Context) -> None:
path = context.args[0] if context.args else defx._cwd
if path[-1] == '/':
# Remove the last slash
path = path[: -1]
opened_candidates = [] if context.args else list(defx._opened_candidates)
session: Session
if path in view._sessions:
old_session = view._sessions[path]
session = Session(
name=old_session.name, path=old_session.path,
opened_candidates=opened_candidates)
else:
name = Path(path).name
session = Session(
name=name, path=path,
opened_candidates=opened_candidates)
view.print_msg(f'session "{name}" is created')
view._sessions[session.path] = session
_save_session(view, defx, context)
@action(name='call', attr=ActionAttr.REDRAW)
def _call(view: View, defx: Defx, context: Context) -> None:
"""
Call the function.
"""
function = context.args[0] if context.args else None
if not function:
return
dict_context = context._asdict()
dict_context['cwd'] = defx._cwd
dict_context['targets'] = [
str(x['action__path']) for x in context.targets]
view._vim.call(function, dict_context)
@action(name='clear_select_all', attr=ActionAttr.MARK | ActionAttr.NO_TAGETS)
def _clear_select_all(view: View, defx: Defx, context: Context) -> None:
for candidate in [x for x in view._candidates
if x['_defx_index'] == defx._index]:
candidate['is_selected'] = False
@action(name='close_tree', attr=ActionAttr.TREE | ActionAttr.CURSOR_TARGET)
def _close_tree(view: View, defx: Defx, context: Context) -> None:
for target in context.targets:
if target['is_directory'] and target['is_opened_tree']:
view.close_tree(target['action__path'], defx._index)
else:
view.close_tree(target['action__path'].parent, defx._index)
view.search_file(target['action__path'].parent, defx._index)
@action(name='delete_session', attr=ActionAttr.NO_TAGETS)
def _delete_session(view: View, defx: Defx, context: Context) -> None:
if not context.args:
return
session_name = context.args[0]
if session_name not in view._sessions:
return
view._sessions.pop(session_name)
_save_session(view, defx, context)
@action(name='load_session', attr=ActionAttr.NO_TAGETS)
def _load_session(view: View, defx: Defx, context: Context) -> None:
session_file = Path(context.session_file)
if not context.session_file or not session_file.exists():
return
loaded_session = json.loads(session_file.read_text())
if 'sessions' not in loaded_session:
return
view._sessions = {}
for path, session in loaded_session['sessions'].items():
view._sessions[path] = Session(**session)
view._vim.current.buffer.vars['defx#_sessions'] = [
x._asdict() for x in view._sessions.values()
]
@action(name='multi')
def _multi(view: View, defx: Defx, context: Context) -> None:
for arg in context.args:
args: typing.List[str]
if isinstance(arg, list):
args = arg
else:
args = [arg]
do_action(view, defx, args[0], context._replace(args=args[1:]))
@action(name='check_redraw', attr=ActionAttr.NO_TAGETS)
def _nop(view: View, defx: Defx, context: Context) -> None:
pass
@action(name='open_tree', attr=ActionAttr.TREE | ActionAttr.CURSOR_TARGET)
def _open_tree(view: View, defx: Defx, context: Context) -> None:
for target in [x for x in context.targets if x['is_directory']]:
view.open_tree(target['action__path'], defx._index, 0)
@action(name='open_tree_recursive',
attr=ActionAttr.TREE | ActionAttr.CURSOR_TARGET)
def _open_tree_recursive(view: View, defx: Defx, context: Context) -> None:
level = int(context.args[0]) if context.args else 20
for target in [x for x in context.targets if x['is_directory']]:
view.open_tree(target['action__path'], defx._index, level)
@action(name='open_or_close_tree',
attr=ActionAttr.TREE | ActionAttr.CURSOR_TARGET)
def _open_or_close_tree(view: View, defx: Defx, context: Context) -> None:
for target in context.targets:
if not target['is_directory'] or target['is_opened_tree']:
_close_tree(view, defx, context._replace(targets=[target]))
else:
_open_tree(view, defx, context._replace(targets=[target]))
@action(name='print')
def _print(view: View, defx: Defx, context: Context) -> None:
for target in context.targets:
view.print_msg(str(target['action__path']))
@action(name='quit', attr=ActionAttr.NO_TAGETS)
def _quit(view: View, defx: Defx, context: Context) -> None:
view.quit()
@action(name='redraw', attr=ActionAttr.NO_TAGETS)
def _redraw(view: View, defx: Defx, context: Context) -> None:
view.redraw(True)
@action(name='repeat', attr=ActionAttr.MARK)
def _repeat(view: View, defx: Defx, context: Context) -> None:
do_action(view, defx, view._prev_action, context)
@action(name='save_session', attr=ActionAttr.NO_TAGETS)
def _save_session(view: View, defx: Defx, context: Context) -> None:
view._vim.current.buffer.vars['defx#_sessions'] = [
x._asdict() for x in view._sessions.values()
]
if not context.session_file:
return
session_file = Path(context.session_file)
session_file.write_text(json.dumps({
'version': view._session_version,
'sessions': {x: y._asdict() for x, y in view._sessions.items()}
}))
@action(name='search', attr=ActionAttr.NO_TAGETS)
def _search(view: View, defx: Defx, context: Context) -> None:
if not context.args or not context.args[0]:
return
search_path = context.args[0]
path = Path(search_path)
parents: typing.List[Path] = []
while view.get_candidate_pos(
path, defx._index) < 0 and path.parent != path:
path = path.parent
parents.append(path)
for parent in reversed(parents):
view.open_tree(parent, defx._index, 0)
view.update_opened_candidates()
view.redraw()
view.search_file(Path(search_path), defx._index)
@action(name='toggle_columns', attr=ActionAttr.REDRAW)
def _toggle_columns(view: View, defx: Defx, context: Context) -> None:
"""
Toggle the current columns.
"""
columns = (context.args[0] if context.args else '').split(':')
if not columns:
return
current_columns = [x.name for x in view._columns]
if columns == current_columns:
# Use default columns
columns = context.columns.split(':')
view._init_columns(columns)
@action(name='toggle_ignored_files', attr=ActionAttr.REDRAW)
def _toggle_ignored_files(view: View, defx: Defx, context: Context) -> None:
defx._enabled_ignored_files = not defx._enabled_ignored_files
@action(name='toggle_select', attr=ActionAttr.MARK | ActionAttr.NO_TAGETS)
def _toggle_select(view: View, defx: Defx, context: Context) -> None:
candidate = view.get_cursor_candidate(context.cursor)
if not candidate:
return
candidate['is_selected'] = not candidate['is_selected']
@action(name='toggle_select_all', attr=ActionAttr.MARK | ActionAttr.NO_TAGETS)
def _toggle_select_all(view: View, defx: Defx, context: Context) -> None:
for candidate in [x for x in view._candidates
if not x['is_root'] and
x['_defx_index'] == defx._index]:
candidate['is_selected'] = not candidate['is_selected']
@action(name='toggle_select_visual',
attr=ActionAttr.MARK | ActionAttr.NO_TAGETS)
def _toggle_select_visual(view: View, defx: Defx, context: Context) -> None:
if context.visual_start <= 0 or context.visual_end <= 0:
return
start = context.visual_start - 1
end = min([context.visual_end, len(view._candidates)])
for candidate in [x for x in view._candidates[start:end]
if not x['is_root'] and
x['_defx_index'] == defx._index]:
candidate['is_selected'] = not candidate['is_selected']
@action(name='toggle_sort', attr=ActionAttr.MARK | ActionAttr.NO_TAGETS)
def _toggle_sort(view: View, defx: Defx, context: Context) -> None:
"""
Toggle the current sort method.
"""
sort = context.args[0] if context.args else ''
if sort == defx._sort_method:
# Use default sort method
defx._sort_method = context.sort
else:
defx._sort_method = sort
@action(name='yank_path')
def _yank_path(view: View, defx: Defx, context: Context) -> None:
yank = '\n'.join([str(x['action__path']) for x in context.targets])
view._vim.call('setreg', '"', yank)
if (view._vim.call('has', 'clipboard') or
view._vim.call('has', 'xterm_clipboard')):
view._vim.call('setreg', '+', yank)
view.print_msg('Yanked:\n' + yank)
| 33.295455
| 78
| 0.660263
| 195
| 0.019015
| 0
| 0
| 8,910
| 0.868844
| 0
| 0
| 1,319
| 0.12862
|
a11589146f3d49dce0f6bfd0ac0a0e58ecd53f6f
| 3,659
|
py
|
Python
|
shopify_listener/dispatcher.py
|
smallwat3r/shopify-webhook-manager
|
1161f070470bc2d2f81c98222b67300bc616121f
|
[
"MIT"
] | 6
|
2019-08-13T18:12:37.000Z
|
2021-05-26T17:55:58.000Z
|
shopify_listener/dispatcher.py
|
smallwat3r/shopify-webhook-manager
|
1161f070470bc2d2f81c98222b67300bc616121f
|
[
"MIT"
] | null | null | null |
shopify_listener/dispatcher.py
|
smallwat3r/shopify-webhook-manager
|
1161f070470bc2d2f81c98222b67300bc616121f
|
[
"MIT"
] | 4
|
2019-10-16T06:14:35.000Z
|
2021-06-03T06:25:26.000Z
|
# -*- coding: utf-8 -*-
# @Author: Matthieu Petiteau
# @Date: 2019-04-26 21:01:07
# @Last Modified by: Matthieu Petiteau
# @Last Modified time: 2019-04-26 21:52:46
"""Dispatch webhook event to specific actions."""
import json
class Dispatcher:
"""Dispatch the different webhook events to the related functions.
The list of all webhook events can be found at:
https://help.shopify.com/en/api/reference/events/webhook
"""
def __init__(self, data):
"""Init webhook data."""
self.data = json.loads(data)
@staticmethod
def name_topic(topic):
"""Rename the topic event to match the function names."""
return "_" + topic.replace('/', '_')
def dispatch_event(self, topic):
"""Dispatch the event to the correct function."""
return getattr(self, self.name_topic(topic))()
def _carts_create(self):
pass
def _carts_update(self):
pass
def _checkout_create(self):
pass
def _checkout_update(self):
pass
def _checkout_delete(self):
pass
def _collections_create(self):
pass
def _collections_update(self):
pass
def _collections_delete(self):
pass
def _collection_listings_add(self):
pass
def _collection_listings_remove(self):
pass
def _collection_listings_update(self):
pass
def _customers_create(self):
pass
def _customers_disable(self):
pass
def _customers_enable(self):
pass
def _customers_update(self):
pass
def _customers_delete(self):
pass
def _customer_groups_create(self):
pass
def _customer_groups_update(self):
pass
def _customer_groups_delete(self):
pass
def _draft_orders_create(self):
pass
def _draft_orders_update(self):
pass
def _draft_orders_delete(self):
pass
def _fulfillments_create(self):
pass
def _fulfillments_update(self):
pass
def _fulfillment_events_create(self):
pass
def _fulfillment_events_delete(self):
pass
def _inventory_items_create(self):
pass
def _inventory_items_update(self):
pass
def _inventory_items_delete(self):
pass
def _inventory_levels_connect(self):
pass
def _inventory_levels_update(self):
pass
def _inventory_levels_disconnect(self):
pass
def _locations_create(self):
pass
def _locations_update(self):
pass
def _locations_delete(self):
pass
def _orders_cancelled(self):
pass
def _orders_create(self):
pass
def _orders_fulfilled(self):
pass
def _orders_paid(self):
pass
def _orders_partially_fulfilled(self):
pass
def _orders_updated(self):
pass
def _orders_delete(self):
pass
def _orders_transactions_create(self):
pass
def _products_create(self):
pass
def _products_update(self):
pass
def _products_delete(self):
pass
def _product_listings_add(self):
pass
def _product_listings_remove(self):
pass
def _product_listings_update(self):
pass
def _refund_create(self):
pass
def _app_uninstalled(self):
pass
def _shop_update(self):
pass
def _tender_transactions_create(self):
pass
def _themes_create(self):
pass
def _theme_publish(self):
pass
def _theme_update(self):
pass
def _theme_delete(self):
pass
| 18.20398
| 70
| 0.622301
| 3,425
| 0.936048
| 0
| 0
| 151
| 0.041268
| 0
| 0
| 539
| 0.147308
|
a115d6f4a8b34eb7bb70f84e6420459fec3a66db
| 790
|
py
|
Python
|
open_spiel/higc/bots/test_bot_fail_after_few_actions.py
|
higcompetition/tournament
|
b61688f7fad6d33a6af8097c75cb0bf0bc84faf2
|
[
"Apache-2.0"
] | 4
|
2021-07-22T08:01:26.000Z
|
2021-12-30T07:07:23.000Z
|
open_spiel/higc/bots/test_bot_fail_after_few_actions.py
|
higcompetition/tournament
|
b61688f7fad6d33a6af8097c75cb0bf0bc84faf2
|
[
"Apache-2.0"
] | 1
|
2021-07-22T16:42:31.000Z
|
2021-07-23T09:46:22.000Z
|
open_spiel/higc/bots/test_bot_fail_after_few_actions.py
|
higcompetition/tournament
|
b61688f7fad6d33a6af8097c75cb0bf0bc84faf2
|
[
"Apache-2.0"
] | 3
|
2021-07-21T19:02:56.000Z
|
2021-07-30T17:40:39.000Z
|
# A bot that picks the first action from the list for the first two rounds,
# and then exists with an exception.
# Used only for tests.
game_name = input()
play_as = int(input())
print("ready")
while True:
print("start")
num_actions = 0
while True:
message = input()
if message == "tournament over":
print("tournament over")
sys.exit(0)
if message.startswith("match over"):
print("match over")
break
public_buf, private_buf, *legal_actions = message.split(" ")
should_act = len(legal_actions) > 0
if should_act:
num_actions += 1
print(legal_actions[-1])
else:
print("ponder")
if num_actions > 2:
raise RuntimeError
| 26.333333
| 75
| 0.572152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 216
| 0.273418
|
a116cfc21ab7921ef0308c2ab54fca839bd22800
| 2,027
|
py
|
Python
|
python/hsfs/util.py
|
berthoug/feature-store-api
|
85c23ae08c7de65acd79a3b528fa72c07e52a272
|
[
"Apache-2.0"
] | null | null | null |
python/hsfs/util.py
|
berthoug/feature-store-api
|
85c23ae08c7de65acd79a3b528fa72c07e52a272
|
[
"Apache-2.0"
] | null | null | null |
python/hsfs/util.py
|
berthoug/feature-store-api
|
85c23ae08c7de65acd79a3b528fa72c07e52a272
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2020 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import json
from pathlib import Path
from hsfs import feature
class FeatureStoreEncoder(json.JSONEncoder):
def default(self, o):
try:
return o.to_dict()
except AttributeError:
return super().default(o)
def validate_feature(ft):
if isinstance(ft, feature.Feature):
return ft
elif isinstance(ft, str):
return feature.Feature(ft)
def parse_features(feature_names):
if isinstance(feature_names, (str, feature.Feature)):
return [validate_feature(feature_names)]
elif isinstance(feature_names, list) and len(feature_names) > 0:
return [validate_feature(feat) for feat in feature_names]
else:
return []
def get_cert_pw():
"""
Get keystore password from local container
Returns:
Certificate password
"""
hadoop_user_name = "hadoop_user_name"
crypto_material_password = "material_passwd"
material_directory = "MATERIAL_DIRECTORY"
password_suffix = "__cert.key"
pwd_path = Path(crypto_material_password)
if not pwd_path.exists():
username = os.environ[hadoop_user_name]
material_directory = Path(os.environ[material_directory])
pwd_path = material_directory.joinpath(username + password_suffix)
with pwd_path.open() as f:
return f.read()
class VersionWarning(Warning):
pass
class StorageWarning(Warning):
pass
| 27.026667
| 76
| 0.700543
| 261
| 0.128762
| 0
| 0
| 0
| 0
| 0
| 0
| 758
| 0.373952
|
a11724652d428320ddd7198c24a9514a2d3d1923
| 1,720
|
py
|
Python
|
src/map_generation/map_parser.py
|
tbvanderwoude/matching-epea-star
|
13d8716f932bb98398fe8e190e668ee65bcf0f34
|
[
"MIT"
] | 1
|
2021-08-23T18:00:13.000Z
|
2021-08-23T18:00:13.000Z
|
src/map_generation/map_parser.py
|
tbvanderwoude/matching-epea-star
|
13d8716f932bb98398fe8e190e668ee65bcf0f34
|
[
"MIT"
] | null | null | null |
src/map_generation/map_parser.py
|
tbvanderwoude/matching-epea-star
|
13d8716f932bb98398fe8e190e668ee65bcf0f34
|
[
"MIT"
] | 1
|
2021-08-24T08:16:31.000Z
|
2021-08-24T08:16:31.000Z
|
import os.path
from typing import List, Tuple
from mapfmclient import MarkedLocation, Problem
class MapParser:
def __init__(self, root_folder: str):
self.root_folder = root_folder
def parse_map(self, name: str) -> Problem:
with open(os.path.join(self.root_folder, name)) as file:
# Read map width
width_line = file.readline()
width = int(width_line.split(' ')[1])
# Read map height
height_line = file.readline()
height = int(height_line.split(' ')[1])
# Read map
grid = []
for _ in range(height):
grid.append([1 if char == '@' else 0 for char in file.readline()])
# Read number of agents
num_agents = int(file.readline())
starts: List[MarkedLocation] = []
# Read starting positions
for _ in range(num_agents):
line = file.readline().split(' ')
starts.append(MarkedLocation(int(line[2]), int(line[0]), int(line[1])))
# Empty line
file.readline()
# Read goal positions
goals: List[MarkedLocation] = []
for _ in range(num_agents):
line = file.readline().split(' ')
goals.append(MarkedLocation(int(line[2]), int(line[0]), int(line[1])))
return Problem(grid, width, height, starts, goals)
def parse_batch(self, folder: str) -> List[Tuple[str, Problem]]:
paths = os.listdir(f'{self.root_folder}/{folder}')
problems = []
for file in paths:
problems.append((str(file), self.parse_map(f'{folder}/{file}')))
return problems
| 31.851852
| 87
| 0.55
| 1,622
| 0.943023
| 0
| 0
| 0
| 0
| 0
| 0
| 187
| 0.108721
|
a118bed580cb119e113df0f842732da313be42d4
| 9,803
|
py
|
Python
|
library/oci_api_key.py
|
AndreyAdnreyev/oci-ansible-modules
|
accd6e482ff1e8c2ddd6e85958dfe12cd6114383
|
[
"Apache-2.0"
] | null | null | null |
library/oci_api_key.py
|
AndreyAdnreyev/oci-ansible-modules
|
accd6e482ff1e8c2ddd6e85958dfe12cd6114383
|
[
"Apache-2.0"
] | null | null | null |
library/oci_api_key.py
|
AndreyAdnreyev/oci-ansible-modules
|
accd6e482ff1e8c2ddd6e85958dfe12cd6114383
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2018, 2019, Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_api_key
short_description: Upload and delete API signing key of a user in OCI
description:
- This module allows the user upload and delete API signing keys of a user in OCI. A PEM-format RSA credential for
securing requests to the Oracle Cloud Infrastructure REST API. Also known as an API signing key. Specifically,
this is the public key from the key pair. The private key remains with the user calling the API. For information
about generating a key pair in the required PEM format, see Required Keys and OCIDs.
Note that this is not the SSH key for accessing compute instances.
Each user can have a maximum of three API signing keys.
For more information about user credentials, see
U(https://docs.us-phoenix-1.oraclecloud.com/Content/API/Concepts/apisigningkey.htm).
version_added: "2.5"
options:
user_id:
description: The OCID of the user whose API signing key needs to be created or deleted.
required: true
api_signing_key:
description: The public key. Must be an RSA key in PEM format. Required when the API signing key is
uploaded with I(state=present)
required: false
aliases: ['key']
api_key_id:
description: The API signing key's id. The Id must be of the format TENANCY_OCID/USER_OCID/KEY_FINGERPRINT.
required: false
aliases: ['id']
state:
description: The state of the api signing key that must be asserted to. When I(state=present), and the
api key doesn't exist, the api key is created with the provided C(api_signing_key).
When I(state=absent), the api signing key corresponding to the provided C(fingerprint) is deleted.
required: false
default: "present"
choices: ['present', 'absent']
author: "Sivakumar Thyagarajan (@sivakumart)"
extends_documentation_fragment: [ oracle, oracle_creatable_resource, oracle_wait_options ]
"""
EXAMPLES = """
- name: Upload a new api signing key for the specified user
oci_api_key:
user_id: "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
key: "-----BEGIN PUBLIC KEY-----cmdnMIIBIjANBgkqhkiG9w0BAQEFA......mwIDAQAB-----END PUBLIC KEY-----"
- name: Delete an API signing key for the specified user
oci_api_key:
user_id: "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
"id": "ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx/ocid1.user.oc1..xxxxxEXAMPLExxxxx/08:07:a6:7d:06:b4:73:91:e9:2c:da"
state: "absent"
"""
RETURN = """
oci_api_key:
description: Details of the API signing key
returned: On success
type: dict
sample: {
"fingerprint": "08:07:a6:7d:06:b4:73:91:e9:2c:da:42:c8:cb:df:02",
"inactive_status": null,
"key_id": "ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx/ocid1.user.oc1..xxxxxEXAMPLExxxxx/08:07:a6:7d:06:b4:73:91:e9:2c:da",
"key_value": "-----BEGIN PUBLIC KEY-----...urt/fN8jNz2nZwIDAQAB-----END PUBLIC KEY-----",
"lifecycle_state": "ACTIVE",
"time_created": "2018-01-08T09:33:59.705000+00:00",
"user_id": "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.oracle import oci_utils
try:
import oci
from oci.identity.identity_client import IdentityClient
from oci.identity.models import CreateApiKeyDetails
from oci.util import to_dict
from oci.exceptions import ServiceError, MaximumWaitTimeExceeded
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
logger = None
RESOURCE_NAME = "api_key"
def set_logger(provided_logger):
global logger
logger = provided_logger
def get_logger():
return logger
def _get_api_key_from_id(identity_client, user_id, api_key_id, module):
try:
resp = oci_utils.call_with_backoff(
identity_client.list_api_keys, user_id=user_id
)
if resp is not None:
for api_key in resp.data:
if api_key.key_id == api_key_id:
return api_key
return None
except ServiceError as ex:
module.fail_json(msg=ex.message)
def delete_api_key(identity_client, user_id, id, module):
result = {}
changed = False
try:
api_key = _get_api_key_from_id(identity_client, user_id, id, module)
oci_utils.call_with_backoff(
identity_client.delete_api_key,
user_id=user_id,
fingerprint=api_key.fingerprint,
)
get_logger().info("Deleted api password %s", id)
changed = True
# The API key is not returned by list api passwords after it
# is deleted, and so we currently reuse the earlier api password object and mark
# its lifecycle state as DELETED.
# Note: This current approach has problems around idempotency.
# We also don't wait, as there is no state transition that we need to wait for.
api_key.lifecycle_state = "DELETED"
result[RESOURCE_NAME] = to_dict(api_key)
except ServiceError as ex:
module.fail_json(msg=ex.message)
result["changed"] = changed
return result
def _is_api_key_active(api_keys, api_key_id):
result = [
api_key
for api_key in api_keys
if api_key.key_id == api_key_id and api_key.lifecycle_state == "ACTIVE"
]
return len(result) == 1
def create_api_key(identity_client, user_id, key, module):
try:
cakd = CreateApiKeyDetails()
cakd.key = key
result = oci_utils.create_resource(
resource_type=RESOURCE_NAME,
create_fn=identity_client.upload_api_key,
kwargs_create={"user_id": user_id, "create_api_key_details": cakd},
module=module,
)
resource = result[RESOURCE_NAME]
api_key_id = resource["key_id"]
get_logger().info("Created API signing key %s", to_dict(resource))
# API keys don't have a get<resource> and so we can't use oci_utils.create_and_wait
# The following logic manually checks if the API key in `list_api_keys` has reached the desired ACTIVE state
response = identity_client.list_api_keys(user_id)
# wait until the created API Key reaches Active state
oci.wait_until(
identity_client,
response,
evaluate_response=lambda resp: _is_api_key_active(resp.data, api_key_id),
)
result[RESOURCE_NAME] = to_dict(
_get_api_key_from_id(identity_client, user_id, api_key_id, module)
)
return result
except ServiceError as ex:
module.fail_json(msg=ex.message)
except MaximumWaitTimeExceeded as mwte:
module.fail_json(msg=str(mwte))
def main():
set_logger(oci_utils.get_logger("oci_api_key"))
module_args = oci_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
user_id=dict(type="str", required=True),
api_key_id=dict(type="str", required=False, aliases=["id"]),
api_signing_key=dict(type="str", required=False, aliases=["key"]),
state=dict(
type="str",
required=False,
default="present",
choices=["present", "absent"],
),
)
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=False,
required_if=[("state", "absent", ["api_key_id"])],
)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
identity_client = oci_utils.create_service_client(module, IdentityClient)
state = module.params["state"]
result = dict(changed=False)
user_id = module.params.get("user_id", None)
public_key = module.params.get("api_signing_key", None)
api_key_id = module.params.get("api_key_id", None)
if api_key_id is not None:
api_key = _get_api_key_from_id(identity_client, user_id, api_key_id, module)
if state == "absent":
get_logger().debug(
"Delete api password %s for user %s requested", api_key_id, user_id
)
if api_key is not None:
get_logger().debug("Deleting %s", api_key.key_id)
result = delete_api_key(identity_client, user_id, api_key_id, module)
else:
get_logger().debug("API Signing Key %s already deleted.", api_key_id)
elif state == "present":
module.fail_json(msg="API signing key cannot be updated.")
else:
result = oci_utils.check_and_create_resource(
resource_type=RESOURCE_NAME,
create_fn=create_api_key,
kwargs_create={
"identity_client": identity_client,
"user_id": user_id,
"key": public_key,
"module": module,
},
list_fn=identity_client.list_api_keys,
kwargs_list={"user_id": user_id},
module=module,
model=CreateApiKeyDetails(),
create_model_attr_to_get_model_mapping={"key": "key_value"},
)
module.exit_json(**result)
if __name__ == "__main__":
main()
| 36.040441
| 124
| 0.65531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,585
| 0.467714
|
a118ceb32497416f45bc3e52e40410e78c21e051
| 836
|
py
|
Python
|
python_modules/dagster/dagster/core/types/builtin_enum.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | 1
|
2019-07-15T17:34:04.000Z
|
2019-07-15T17:34:04.000Z
|
python_modules/dagster/dagster/core/types/builtin_enum.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster/core/types/builtin_enum.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
import sys
if sys.version_info.major >= 3:
import typing
class BuiltinEnum:
ANY = typing.Any
BOOL = typing.NewType('Bool', bool)
FLOAT = typing.NewType('Float', float)
INT = typing.NewType('Int', int)
PATH = typing.NewType('Path', str)
STRING = typing.NewType('String', str)
NOTHING = typing.NewType('Nothing', None)
@classmethod
def contains(cls, value):
return any(value == getattr(cls, key) for key in dir(cls))
else:
from enum import Enum
class BuiltinEnum(Enum):
ANY = 'Any'
BOOL = 'Bool'
FLOAT = 'Float'
INT = 'Int'
PATH = 'Path'
STRING = 'String'
NOTHING = 'Nothing'
@classmethod
def contains(cls, value):
return isinstance(value, cls)
| 22.594595
| 70
| 0.551435
| 728
| 0.870813
| 0
| 0
| 205
| 0.245215
| 0
| 0
| 87
| 0.104067
|
a11a0df896228fb34c45a26a79b430c991c408ae
| 1,173
|
py
|
Python
|
sallybrowse/extensions/document/__init__.py
|
XiuyuanLu/browse
|
ee5ca57e54fe492d5b109b7cae87d1c8a45dbe25
|
[
"MIT"
] | null | null | null |
sallybrowse/extensions/document/__init__.py
|
XiuyuanLu/browse
|
ee5ca57e54fe492d5b109b7cae87d1c8a45dbe25
|
[
"MIT"
] | null | null | null |
sallybrowse/extensions/document/__init__.py
|
XiuyuanLu/browse
|
ee5ca57e54fe492d5b109b7cae87d1c8a45dbe25
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys, os, re, html
from flask import request, Response
from sallybrowse.extensions import BaseExtension
from subprocess import Popen, PIPE
class Extension(BaseExtension):
PATTERN = re.compile(r".*\.(pdf|xlsx|xls|docx|doc|ods|odt|xlt|ppt|pptx|odp|otp|ltx)$", re.IGNORECASE)
PRIORITY = 100
def __init__(self, *args, **kwargs):
BaseExtension.__init__(self, *args, **kwargs)
def preview(self):
response = Response()
error = False
if request.path.lower().endswith(".pdf"):
try:
data = open(request.path, "rb").read()
except:
error = True
else:
try:
pipe = Popen(("unoconv", "--stdout", "-f", "pdf", request.path), stdout = PIPE)
data, _ = pipe.communicate()
if pipe.returncode != 0:
error = True
except:
error = True
if error:
response.data = """
<html>
<head>
<style>
* {
font-size: 1.1em;
}
body {
background: white;
}
</style>
</head>
<body>
%s
</body>
</html>
""" % self.ERROR_NO_PREVIEW
else:
response.data = data
response.mimetype = "application/pdf"
return response
| 18.919355
| 102
| 0.597613
| 1,002
| 0.85422
| 0
| 0
| 0
| 0
| 0
| 0
| 359
| 0.306053
|
a11c3d72105134f3cd78ad0e461a7ff2f92aa01d
| 4,713
|
py
|
Python
|
Tests/testGalaxy.py
|
elsiehupp/traveller_pyroute
|
32a43665910894896b807576125acee56ef02797
|
[
"MIT"
] | 12
|
2017-02-09T08:58:16.000Z
|
2021-09-04T22:12:57.000Z
|
Tests/testGalaxy.py
|
elsiehupp/traveller_pyroute
|
32a43665910894896b807576125acee56ef02797
|
[
"MIT"
] | 23
|
2017-07-14T05:04:30.000Z
|
2022-03-27T02:20:06.000Z
|
Tests/testGalaxy.py
|
elsiehupp/traveller_pyroute
|
32a43665910894896b807576125acee56ef02797
|
[
"MIT"
] | 4
|
2016-12-31T06:23:47.000Z
|
2022-03-03T19:36:43.000Z
|
"""
Created on Nov 30, 2021
@author: CyberiaResurrection
"""
import unittest
import re
import sys
sys.path.append('../PyRoute')
from Galaxy import Galaxy
from Galaxy import Sector
class testGalaxy(unittest.TestCase):
"""
A very simple, barebones test to check that Verge and Reft end up in their correct relative positions
- Verge being immediately rimward of Reft
"""
def testVerticalOrdering(self):
galaxy = Galaxy(0)
reft = Sector("Reft", "# -3, 0")
self.assertEqual(-3, reft.x)
self.assertEqual(0, reft.y)
verge = Sector("Verge", "# -3, -1")
self.assertEqual(-3, verge.x)
self.assertEqual(-1, verge.y)
galaxy.sectors[reft.name] = reft
galaxy.sectors[verge.name] = verge
# verify, before bounding sectors gets run, nothing is hooked up
self.assertIsNone(galaxy.sectors[reft.name].coreward)
self.assertIsNone(galaxy.sectors[reft.name].rimward)
self.assertIsNone(galaxy.sectors[reft.name].spinward)
self.assertIsNone(galaxy.sectors[reft.name].trailing)
self.assertIsNone(galaxy.sectors[verge.name].coreward)
self.assertIsNone(galaxy.sectors[verge.name].rimward)
self.assertIsNone(galaxy.sectors[verge.name].spinward)
self.assertIsNone(galaxy.sectors[verge.name].trailing)
# set bounding sectors
galaxy.set_bounding_sectors()
# now assert that Reft is coreward from Verge, and (likewise), Verge is rimward from Reft, and nothing else
# got set
self.assertEqual(galaxy.sectors[reft.name], galaxy.sectors[verge.name].coreward, "Reft should be coreward of Verge")
self.assertIsNone(galaxy.sectors[verge.name].rimward, "Nothing should be rimward of Verge")
self.assertIsNone(galaxy.sectors[verge.name].spinward, "Nothing should be spinward of Verge")
self.assertIsNone(galaxy.sectors[verge.name].trailing, "Nothing should be trailing of Verge")
self.assertIsNone(galaxy.sectors[reft.name].coreward, "Nothing should be coreward of Reft")
self.assertIsNone(galaxy.sectors[reft.name].trailing, "Nothing should be trailing of Reft")
self.assertIsNone(galaxy.sectors[reft.name].spinward, "Nothing should be spinward of Reft")
self.assertEqual(galaxy.sectors[verge.name], galaxy.sectors[reft.name].rimward, "Verge should be rimward of Reft")
"""
A very simple, barebones test to check that Dagudashaag and Core end up in their correct relative positions
- Dagudashaag being immediately spinward of Core
"""
def testHorizontalOrdering(self):
galaxy = Galaxy(0)
core = Sector("Core", "# 0, 0")
self.assertEqual(0, core.x)
self.assertEqual(0, core.y)
dagudashaag = Sector("Dagudashaag", "# -1, 0")
self.assertEqual(-1, dagudashaag.x)
self.assertEqual(0, dagudashaag.y)
galaxy.sectors[core.name] = core
galaxy.sectors[dagudashaag.name] = dagudashaag
# verify, before bounding sectors gets run, nothing is hooked up
self.assertIsNone(galaxy.sectors[core.name].coreward)
self.assertIsNone(galaxy.sectors[core.name].rimward)
self.assertIsNone(galaxy.sectors[core.name].spinward)
self.assertIsNone(galaxy.sectors[core.name].trailing)
self.assertIsNone(galaxy.sectors[dagudashaag.name].coreward)
self.assertIsNone(galaxy.sectors[dagudashaag.name].rimward)
self.assertIsNone(galaxy.sectors[dagudashaag.name].spinward)
self.assertIsNone(galaxy.sectors[dagudashaag.name].trailing)
# set bounding sectors
galaxy.set_bounding_sectors()
# now assert that Dagudashaag is spinward from Core, Core is trailing of Dagudashaag, and nothing else
# got set
self.assertEqual(galaxy.sectors[dagudashaag.name], galaxy.sectors[core.name].spinward, "Dagudashaag should be spinward of core")
self.assertIsNone(galaxy.sectors[core.name].coreward, "Nothing should be coreward of Core")
self.assertIsNone(galaxy.sectors[core.name].rimward, "Nothing should be rimward of Core")
self.assertIsNone(galaxy.sectors[core.name].trailing, "Nothing should be trailing of core")
self.assertIsNone(galaxy.sectors[dagudashaag.name].coreward, "Nothing should be coreward of Dagudashaag")
self.assertIsNone(galaxy.sectors[dagudashaag.name].rimward, "Nothing should be rimward of Dagudashaag")
self.assertIsNone(galaxy.sectors[dagudashaag.name].spinward, "Nothing should be spinward of Dagudashaag")
self.assertEqual(galaxy.sectors[core.name], galaxy.sectors[dagudashaag.name].trailing, "Core should be trailing of Dagudashaag")
| 46.205882
| 136
| 0.70401
| 4,526
| 0.960323
| 0
| 0
| 0
| 0
| 0
| 0
| 1,479
| 0.313813
|
a11c870ae3ef5f8dd838f6f8d4edc0a12f86fa5e
| 188
|
py
|
Python
|
py_boot/test.py
|
davidcawork/Investigacion
|
ed25678cbab26e30370e9e2d07b84029bbad4d0b
|
[
"Apache-2.0"
] | null | null | null |
py_boot/test.py
|
davidcawork/Investigacion
|
ed25678cbab26e30370e9e2d07b84029bbad4d0b
|
[
"Apache-2.0"
] | null | null | null |
py_boot/test.py
|
davidcawork/Investigacion
|
ed25678cbab26e30370e9e2d07b84029bbad4d0b
|
[
"Apache-2.0"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
driver = webdriver.Firefox()
driver.get('https://www.google.com')
time.sleep(60)
driver.close()
| 20.888889
| 47
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 24
| 0.12766
|
a11d080c34ade0f2e6de40e4b89c652d910ddf38
| 1,240
|
py
|
Python
|
tests/test_dlms_state.py
|
Layty/dlms-cosem
|
95b67054a1dfb928e960547b0246b7b6794f0594
|
[
"MIT"
] | 1
|
2021-08-20T09:19:07.000Z
|
2021-08-20T09:19:07.000Z
|
tests/test_dlms_state.py
|
Layty/dlms-cosem
|
95b67054a1dfb928e960547b0246b7b6794f0594
|
[
"MIT"
] | null | null | null |
tests/test_dlms_state.py
|
Layty/dlms-cosem
|
95b67054a1dfb928e960547b0246b7b6794f0594
|
[
"MIT"
] | null | null | null |
import pytest
from dlms_cosem import enumerations, state
from dlms_cosem.exceptions import LocalDlmsProtocolError
from dlms_cosem.protocol import acse
from dlms_cosem.protocol.acse import UserInformation
from dlms_cosem.protocol.xdlms import Conformance, InitiateRequestApdu
def test_non_aarq_on_initial_raises_protocol_error():
s = state.DlmsConnectionState()
with pytest.raises(LocalDlmsProtocolError):
s.process_event(acse.ReleaseResponseApdu())
def test_aarq_makes_dlms_waiting_for_aare():
s = state.DlmsConnectionState()
s.process_event(
acse.ApplicationAssociationRequestApdu(
user_information=UserInformation(
InitiateRequestApdu(proposed_conformance=Conformance())
)
)
)
assert s.current_state == state.AWAITING_ASSOCIATION_RESPONSE
def test_aare_sets_ready_on_waiting_aare_response():
s = state.DlmsConnectionState(current_state=state.AWAITING_ASSOCIATION_RESPONSE)
s.process_event(
acse.ApplicationAssociationResponseApdu(
enumerations.AssociationResult.ACCEPTED,
result_source_diagnostics=enumerations.AcseServiceUserDiagnostics.NULL,
)
)
assert s.current_state == state.READY
| 32.631579
| 84
| 0.765323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a11ebc5157787a925779b80587bf0be3060a8389
| 705
|
py
|
Python
|
sets-add.py
|
limeonion/Python-Programming
|
90cbbbd7651fc04669e21be2adec02ba655868cf
|
[
"MIT"
] | null | null | null |
sets-add.py
|
limeonion/Python-Programming
|
90cbbbd7651fc04669e21be2adec02ba655868cf
|
[
"MIT"
] | null | null | null |
sets-add.py
|
limeonion/Python-Programming
|
90cbbbd7651fc04669e21be2adec02ba655868cf
|
[
"MIT"
] | null | null | null |
'''
f we want to add a single element to an existing set, we can use the .add() operation.
It adds the element to the set and returns 'None'.
Example
>>> s = set('HackerRank')
>>> s.add('H')
>>> print s
set(['a', 'c', 'e', 'H', 'k', 'n', 'r', 'R'])
>>> print s.add('HackerRank')
None
>>> print s
set(['a', 'c', 'e', 'HackerRank', 'H', 'k', 'n', 'r', 'R'])
The first line contains an integer N, the total number of country stamps.
The next N lines contains the name of the country where the stamp is from.
Output Format
Output the total number of distinct country stamps on a single line.
'''
n = int(input())
countries = set()
for i in range(n):
countries.add(input())
print(len(countries))
| 22.741935
| 87
| 0.635461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 600
| 0.851064
|
a120a8bf6158dc27ba03b14f3d39ab89d4fa4e32
| 2,331
|
py
|
Python
|
lesson_08/lesson_08_06.py
|
amindmobile/geekbrains-python-002
|
4bc2f7af755d00e73ddc48f1138830cb78e87034
|
[
"MIT"
] | null | null | null |
lesson_08/lesson_08_06.py
|
amindmobile/geekbrains-python-002
|
4bc2f7af755d00e73ddc48f1138830cb78e87034
|
[
"MIT"
] | null | null | null |
lesson_08/lesson_08_06.py
|
amindmobile/geekbrains-python-002
|
4bc2f7af755d00e73ddc48f1138830cb78e87034
|
[
"MIT"
] | null | null | null |
# 6. Продолжить работу над вторым заданием. Реализуйте механизм валидации вводимых пользователем данных. Например, для
# указания количества принтеров, отправленных на склад, нельзя использовать строковый тип данных.
# Подсказка: постарайтесь по возможности реализовать в проекте «Склад оргтехники» максимум возможностей, изученных на
# уроках по ООП.
class StoreMashines:
def __init__(self, name, price, quantity, number_of_lists, *args):
self.name = name
self.price = price
self.quantity = quantity
self.numb = number_of_lists
self.my_store_full = []
self.my_store = []
self.my_unit = {'Модель устройства': self.name, 'Цена за ед': self.price, 'Количество': self.quantity}
def __str__(self):
return f'{self.name} цена {self.price} количество {self.quantity}'
# @classmethod
# @staticmethod
def reception(self):
# print(f'Для выхода - Q, продолжение - Enter')
# while True:
try:
unit = input(f'Введите наименование ')
unit_p = int(input(f'Введите цену за ед '))
unit_q = int(input(f'Введите количество '))
unique = {'Модель устройства': unit, 'Цена за ед': unit_p, 'Количество': unit_q}
self.my_unit.update(unique)
self.my_store.append(self.my_unit)
print(f'Текущий список -\n {self.my_store}')
except:
return f'Ошибка ввода данных'
print(f'Для выхода - Q, продолжение - Enter')
q = input(f'---> ')
if q == 'Q' or q == 'q':
self.my_store_full.append(self.my_store)
print(f'Весь склад -\n {self.my_store_full}')
return f'Выход'
else:
return StoreMashines.reception(self)
class Printer(StoreMashines):
def to_print(self):
return f'to print smth {self.numb} times'
class Scanner(StoreMashines):
def to_scan(self):
return f'to scan smth {self.numb} times'
class Copier(StoreMashines):
def to_copier(self):
return f'to copier smth {self.numb} times'
unit_1 = Printer('hp', 2000, 5, 10)
unit_2 = Scanner('Canon', 1200, 5, 10)
unit_3 = Copier('Xerox', 1500, 1, 15)
print(unit_1.reception())
print(unit_2.reception())
print(unit_3.reception())
print(unit_1.to_print())
print(unit_3.to_copier())
| 33.3
| 118
| 0.637066
| 1,940
| 0.683099
| 0
| 0
| 0
| 0
| 0
| 0
| 1,437
| 0.505986
|
a120f8eceb39d652a13f796940ef296a98d1bfaa
| 1,212
|
py
|
Python
|
epicteller/core/dao/credential.py
|
KawashiroNitori/epicteller
|
264b11e7e6eb58beb0f67ecbbb811d268a533f7a
|
[
"MIT"
] | null | null | null |
epicteller/core/dao/credential.py
|
KawashiroNitori/epicteller
|
264b11e7e6eb58beb0f67ecbbb811d268a533f7a
|
[
"MIT"
] | null | null | null |
epicteller/core/dao/credential.py
|
KawashiroNitori/epicteller
|
264b11e7e6eb58beb0f67ecbbb811d268a533f7a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import Optional
from epicteller.core import redis
from epicteller.core.model.credential import Credential
class CredentialDAO:
r = redis.pool
@classmethod
async def set_access_credential(cls, credential: Credential):
await cls.r.pool.set(f'access_token:{credential.token}', credential.json(), expire=credential.ttl)
@classmethod
async def revoke_access_credential(cls, token: str):
await cls.r.pool.expire(f'access_token:{token}', 10)
@classmethod
async def set_email_validate_token(cls, action: str, token: str, email: str):
await cls.r.pool.set(f'email_validate:{action}:{token}', email, expire=600)
@classmethod
async def get_email_validate_token(cls, action: str, token: str) -> Optional[str]:
email = await cls.r.pool.get(f'email_validate:{action}:{token}')
if not email:
return
return email.decode('utf8')
@classmethod
async def get_access_credential(cls, token: str) -> Optional[Credential]:
data = await cls.r.pool.get(f'access_token:{token}')
if not data:
return
return Credential.parse_raw(data)
| 32.756757
| 106
| 0.679868
| 1,044
| 0.861386
| 0
| 0
| 975
| 0.804455
| 890
| 0.734323
| 198
| 0.163366
|
a1219febbcf1d75638bb20d8c543ddf6a4896780
| 1,101
|
py
|
Python
|
models.py
|
drigobon/prep.gg
|
5db0e5eb99acd7bd41d7546629d6f1b53db2f46e
|
[
"MIT"
] | null | null | null |
models.py
|
drigobon/prep.gg
|
5db0e5eb99acd7bd41d7546629d6f1b53db2f46e
|
[
"MIT"
] | null | null | null |
models.py
|
drigobon/prep.gg
|
5db0e5eb99acd7bd41d7546629d6f1b53db2f46e
|
[
"MIT"
] | null | null | null |
class LeagueGame:
def __init__(self, data):
self.patch = data['patch']
self.win = data['win']
self.side = data['side']
self.opp = data['opp']
self.bans = data['bans']
self.vs_bans = data['vs_bans']
self.picks = data['picks']
self.vs_picks = data['vs_picks']
self.players = data['players']
class LeaguePlayer:
def __init__(self, n_games, n_wins, data):
self.n_games = n_games
self.n_wins = n_wins
self.K = data['K']
self.D = data['D']
self.A = data['A']
self.CS = data['CS']
self.CSM = data['CSM']
self.G = data['G']
self.GM = data['GM']
self.KPAR = data['KPAR']
self.KS = data['KS']
self.GS = data['GS']
class LeagueTeam:
def __init__(self, players, data):
self.players = players
self.region = data['region']
self.season = data['season']
self.WL = data['WL']
self.avg_gm_dur = data['avg_gm_dur']
self.most_banned_by = data['most_banned_by']
self.most_banned_vs = data['most_banned_vs']
self.economy = data['economy']
self.aggression = data['aggression']
self.objectives = data['objectives']
self.vision = data['vision']
| 22.9375
| 46
| 0.643052
| 1,086
| 0.986376
| 0
| 0
| 0
| 0
| 0
| 0
| 208
| 0.188919
|
a121e58fcc354bb0486144293e6dc4511324fbba
| 1,046
|
py
|
Python
|
option.py
|
lotress/new-DL
|
adc9f6f94538088d3d70327d9c7bb089ef7e1638
|
[
"MIT"
] | null | null | null |
option.py
|
lotress/new-DL
|
adc9f6f94538088d3d70327d9c7bb089ef7e1638
|
[
"MIT"
] | null | null | null |
option.py
|
lotress/new-DL
|
adc9f6f94538088d3d70327d9c7bb089ef7e1638
|
[
"MIT"
] | null | null | null |
from common import *
from model import vocab
option = dict(edim=256, epochs=1.5, maxgrad=1., learningrate=1e-3, sdt_decay_step=1, batchsize=8, vocabsize=vocab, fp16=2, saveInterval=10, logInterval=.4)
option['loss'] = lambda opt, model, y, out, *_, rewards=[]: F.cross_entropy(out.transpose(-1, -2), y, reduction='none')
option['criterion'] = lambda y, out, mask, *_: (out[:,:,1:vocab].max(-1)[1] + 1).ne(y).float() * mask.float()
option['startEnv'] = lambda x, y, l, *args: (x, y, l, *args)
option['stepEnv'] = lambda i, pred, l, *args: (False, 1., None, None) # done episode, fake reward, Null next input, Null length, Null args
option['cumOut'] = False # True to keep trajectory
option['devices'] = [0] if torch.cuda.is_available() else [] # list of GPUs
option['init_method'] = 'file:///tmp/sharedfile' # initial configuration for multiple-GPU training
try:
from qhoptim.pyt import QHAdam
option['newOptimizer'] = lambda opt, params, _: QHAdam(params, lr=opt.learningrate, nus=(.7, .8), betas=(0.995, 0.999))
except ImportError: pass
| 69.733333
| 155
| 0.686424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 266
| 0.254302
|
a122487d9193d1e9db5e1e4904c5779cf5ab0b4a
| 1,713
|
py
|
Python
|
Release/cyberbot-micropython/Examples/Terminal_DA_AD.py
|
parallaxinc/cyberbot
|
f7c4d355ee0310dcfef81027802cc41ac6ce90e1
|
[
"MIT"
] | 4
|
2019-03-18T20:49:41.000Z
|
2022-03-24T01:44:36.000Z
|
Release/cyberbot-micropython/Examples/Terminal_DA_AD.py
|
parallaxinc/cyberbot
|
f7c4d355ee0310dcfef81027802cc41ac6ce90e1
|
[
"MIT"
] | 5
|
2019-06-07T18:09:27.000Z
|
2021-04-08T17:16:55.000Z
|
Release/cyberbot-micropython/Examples/Terminal_DA_AD.py
|
parallaxinc/cyberbot
|
f7c4d355ee0310dcfef81027802cc41ac6ce90e1
|
[
"MIT"
] | null | null | null |
# Terminal_DA_AD.py
# Circuit
# D/A0---A/D0, D/A1---A/D1,
# pot A---3.3V, potB---GND, pot wiper---A/D2
# Procedure
# Run, then open REPL and then CTRL + D
# Twist pot input while program runs to see ad2 vary
# Notes
# micro:bit ground is 0.4 V below cyber:bot board ground
# micro:bit 3.3 V = 3.245 V WRT cyber:bot board ground
# cyber:bot 3.3 V = 3.326 V WRT cyber:bot board ground
# Output example
# da0 = 0, da1 = 1024, ad0 = 13, ad1 = 623, ad2 = 7
# da0 = 64, da1 = 960, ad0 = 72, ad1 = 998, ad2 = 7
# da0 = 128, da1 = 896, ad0 = 137, ad1 = 934, ad2 = 7
# da0 = 192, da1 = 832, ad0 = 203, ad1 = 871, ad2 = 7
# da0 = 256, da1 = 768, ad0 = 266, ad1 = 805, ad2 = 87
# da0 = 320, da1 = 704, ad0 = 332, ad1 = 744, ad2 = 150
# da0 = 384, da1 = 640, ad0 = 398, ad1 = 680, ad2 = 211
# da0 = 448, da1 = 576, ad0 = 461, ad1 = 617, ad2 = 261
# da0 = 512, da1 = 512, ad0 = 526, ad1 = 554, ad2 = 308
# da0 = 576, da1 = 448, ad0 = 588, ad1 = 490, ad2 = 372
# da0 = 640, da1 = 384, ad0 = 652, ad1 = 425, ad2 = 469
# da0 = 704, da1 = 320, ad0 = 716, ad1 = 360, ad2 = 629
# da0 = 768, da1 = 256, ad0 = 779, ad1 = 295, ad2 = 806
# da0 = 832, da1 = 192, ad0 = 845, ad1 = 231, ad2 = 867
# da0 = 896, da1 = 128, ad0 = 907, ad1 = 165, ad2 = 947
# da0 = 960, da1 = 64, ad0 = 970, ad1 = 100, ad2 = 1023
from cyberbot import *
bot(22).tone(2000, 300)
while True:
for da in range(0, 1024, 64):
bot(20).write_analog(da)
bot(21).write_analog(1024 - da)
sleep(20)
ad0 = pin0.read_analog()
ad1 = pin1.read_analog()
ad2 = pin2.read_analog()
print("da0 = %d, da1 = %d, ad0 = %d, ad1 = %d, ad2 = %d" % (da, 1024 - da, ad0, ad1, ad2))
sleep(150)
print(" ")
sleep(500)
| 32.320755
| 92
| 0.565674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,354
| 0.790426
|
a122b64cab542d8bb7f50552627ee57f6ed6232b
| 4,781
|
py
|
Python
|
cinebot_mini_render_server/animation_routes.py
|
cheng-chi/cinebot_mini
|
708a7c80d2f203dfe3b52bf84d9cbafac7673d27
|
[
"MIT"
] | null | null | null |
cinebot_mini_render_server/animation_routes.py
|
cheng-chi/cinebot_mini
|
708a7c80d2f203dfe3b52bf84d9cbafac7673d27
|
[
"MIT"
] | null | null | null |
cinebot_mini_render_server/animation_routes.py
|
cheng-chi/cinebot_mini
|
708a7c80d2f203dfe3b52bf84d9cbafac7673d27
|
[
"MIT"
] | null | null | null |
import bpy
from aiohttp import web
import numpy as np
from mathutils import Matrix, Vector
import asyncio
from cinebot_mini_render_server.blender_timer_executor import EXECUTOR
routes = web.RouteTableDef()
def delete_animation_helper(obj):
if not obj.animation_data:
return False
if not obj.animation_data.action:
return False
if not obj.animation_data.action.fcurves:
return False
action = obj.animation_data.action
remove_types = ["location", "scale", "rotation"]
fcurves = [fc for fc in action.fcurves
for type in remove_types
if fc.data_path.startswith(type)]
while fcurves:
fc = fcurves.pop()
action.fcurves.remove(fc)
return True
def handle_object_animation_get_helper(obj_name):
scene = bpy.context.scene
obj = bpy.data.objects[obj_name]
fc = obj.animation_data.action.fcurves[0]
start, end = fc.range()
transforms = []
for t in range(int(start), int(end)):
scene.frame_set(t)
matrix_world = np.array(obj.matrix_world)
tf_data = {
"frame_number": t,
"matrix_world": matrix_world.tolist()
}
transforms.append(tf_data)
return transforms
@routes.get('/api/object/{obj_name}/animation')
async def handle_object_animation_get(request):
obj_name = request.match_info.get('obj_name', "None")
if obj_name not in bpy.data.objects:
raise web.HTTPBadRequest()
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(EXECUTOR,
handle_object_animation_get_helper, obj_name)
data = {
"result": result,
"url": '/api/object/{}/animation'.format(obj_name),
"method": "GET"
}
return web.json_response(data)
def handle_object_animation_put_helper(input_data, obj_name):
scene = bpy.context.scene
obj = bpy.data.objects[obj_name]
print("before delete")
delete_animation_helper(obj)
print("after delete")
if not obj.animation_data:
obj.animation_data_create()
if not obj.animation_data.action:
obj.animation_data.action = bpy.data.actions.new(name=obj_name + "_action")
f_curves_loc = [obj.animation_data.action.fcurves.new(data_path="location", index=i) for i in range(3)]
f_curves_rot = [obj.animation_data.action.fcurves.new(data_path="rotation_euler", index=i) for i in range(3)]
[x.keyframe_points.add(len(input_data["transforms"])) for x in f_curves_loc]
[x.keyframe_points.add(len(input_data["transforms"])) for x in f_curves_rot]
for i, frame in enumerate(input_data["transforms"]):
frame_number = frame["frame_number"]
location = None
rotation_euler = None
if "matrix_world" in frame:
matrix_world = frame["matrix_world"]
m = Matrix(matrix_world)
location = m.to_translation()
rotation_euler = m.to_euler()
elif "location" in frame and "rotation_euler" in frame:
location = frame["location"]
rotation_euler = frame["rotation_euler"]
else:
return False
for j in range(3):
f_curves_loc[j].keyframe_points[i].co = [float(frame_number), location[j]]
f_curves_rot[j].keyframe_points[i].co = [float(frame_number), rotation_euler[j]]
return True
@routes.put('/api/object/{obj_name}/animation')
async def handle_object_animation_put(request):
input_data = await request.json()
obj_name = request.match_info.get('obj_name', "None")
if obj_name not in bpy.data.objects:
raise web.HTTPBadRequest()
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(EXECUTOR,
handle_object_animation_put_helper, input_data, obj_name)
data = {
"result": "SUCCESS" if result else "FAILED",
"url": '/api/object/{}/animation'.format(obj_name),
"method": "PUT"
}
return web.json_response(data=data)
def handle_object_animation_delete_helper(obj_name):
scene = bpy.context.scene
obj = bpy.data.objects[obj_name]
result = delete_animation_helper(obj)
return result
@routes.delete('/api/object/{obj_name}/animation')
async def handle_object_animation_delete(request):
obj_name = request.match_info.get('obj_name', "None")
if obj_name not in bpy.data.objects:
raise web.HTTPBadRequest()
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(EXECUTOR,
handle_object_animation_delete_helper, obj_name)
data = {
"result": "SUCCESS" if result else "FAILED",
"url": '/api/object/{}/animation'.format(obj_name),
"method": "DELETE"
}
return web.json_response(data=data)
| 31.873333
| 113
| 0.665551
| 0
| 0
| 0
| 0
| 1,758
| 0.367706
| 1,611
| 0.336959
| 592
| 0.123823
|
a124c13c10af7bc999fd4983d83bef5b21b878ff
| 64
|
py
|
Python
|
notebooks/_solutions/13-raster-processing32.py
|
jorisvandenbossche/DS-python-geospatial
|
893a12edc5c203a75815f6dcb5f1e18c577c8cd5
|
[
"BSD-3-Clause"
] | 58
|
2020-10-09T10:10:59.000Z
|
2022-03-07T14:58:07.000Z
|
notebooks/_solutions/13-raster-processing32.py
|
jorisvandenbossche/DS-python-geospatial
|
893a12edc5c203a75815f6dcb5f1e18c577c8cd5
|
[
"BSD-3-Clause"
] | 24
|
2020-09-30T19:57:14.000Z
|
2021-10-05T07:21:09.000Z
|
notebooks/_solutions/13-raster-processing32.py
|
jorisvandenbossche/DS-python-geospatial
|
893a12edc5c203a75815f6dcb5f1e18c577c8cd5
|
[
"BSD-3-Clause"
] | 19
|
2020-10-05T09:32:18.000Z
|
2022-03-20T00:09:14.000Z
|
roads_subset = roads[roads["frc_omschrijving"].isin(road_types)]
| 64
| 64
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 18
| 0.28125
|
a124d29567d8f21e6a4afe3634fc6ada1ceff28c
| 4,176
|
py
|
Python
|
pade/tests/v1/script_2_revisado_local.py
|
AndreCrescenzo/multi-agents
|
1c76595aab2fd37cc2de0d44a3b3daadcfe6f4f3
|
[
"MIT"
] | 72
|
2018-10-02T21:02:36.000Z
|
2022-02-19T11:24:10.000Z
|
pade/tests/v1/script_2_revisado_local.py
|
AndreCrescenzo/multi-agents
|
1c76595aab2fd37cc2de0d44a3b3daadcfe6f4f3
|
[
"MIT"
] | 47
|
2018-10-24T14:57:02.000Z
|
2022-03-16T00:09:31.000Z
|
pade/tests/v1/script_2_revisado_local.py
|
AndreCrescenzo/multi-agents
|
1c76595aab2fd37cc2de0d44a3b3daadcfe6f4f3
|
[
"MIT"
] | 36
|
2018-06-03T11:29:27.000Z
|
2022-03-15T23:53:19.000Z
|
# -*- encoding: utf-8 -*-
from utils import display_message, set_ams, start_loop, config_loop
#config_loop(gui=True)
from agent import Agent
from messages import ACLMessage
from aid import AID
from protocols import FipaContractNetProtocol
from filters import Filter
from pickle import loads, dumps
from time import sleep
#===============================================================================
# What is needed to create an agent with standardized protocols behaviours?
# First, the protocol class needs to be defined
# Second, this protocol class needs to be associated with the agent's
# behaviour
#===============================================================================
class BookstoreAgentBehaviour(FipaContractNetProtocol):
def __init__(self, agent):
super(BookstoreAgentBehaviour, self).__init__(agent, is_initiator=False)
def handle_cfp(self, message):
FipaContractNetProtocol.handle_cfp(self, message)
display_message(self.agent.aid.name, 'Request Received')
order = loads(message.content)
for book in self.agent.booksList:
if book['title'] == order['title'] and book['author'] == order['author']:
if book['qty'] >= order['qty']:
response = message.create_reply()
response.set_performative(ACLMessage.PROPOSE)
book['book store'] = self.agent.aid.name
response.set_content(dumps(book))
self.agent.send(response)
else:
response = message.create_reply()
response.set_performative(ACLMessage.REJECT_PROPOSAL)
response.set_content('Request Rejected')
self.agent.send(response)
def handle_accept_propose(self, message):
FipaContractNetProtocol.handle_accept_propose(self, message)
display_message(self.agent.aid.name, 'Proposal Accepted')
response = message.create_reply()
response.set_performative(ACLMessage.INFORM)
response.set_content('Purchase Approved')
self.agent.send(response)
def handle_reject_proposes(self, message):
FipaContractNetProtocol.handle_reject_proposes(self, message)
display_message(self.agent.aid.name, 'Proposal Rejected')
class BookstoreAgent(Agent):
def __init__(self, aid, booksList):
Agent.__init__(self, aid)
self.booksList = booksList
behav_ = BookstoreAgentBehaviour(self)
self.addBehaviour(behav_)
if __name__ == '__main__':
booksList_Saraiva = [{'title' : 'The Lord of the Rings', 'author' : 'J. R. R. Tolkien', 'qty' : 10, 'how much is' : 53.50},
{'title' : 'Harry Potter', 'author' : 'J. K. Roling', 'qty' : 10, 'how much is' : 33.70},
{'title' : 'Game of Thrones', 'author' : 'A. M. M. Martin', 'qty' : 10,'how much is' : 23.80}
]
bookslist_Cultura = [{'title' : 'The Lord of the Rings', 'author' : 'J. R. R. Tolkien', 'qty' : 10, 'how much is' : 43.50},
{'title' : 'Harry Potter', 'author' : 'J. K. Roling', 'qty' : 10, 'how much is' : 31.70},
{'title' : 'Game of Thrones', 'author' : 'A. M. M. Martin', 'qty' : 10, 'how much is' : 53.80}
]
bookStoresInfo = [(AID(name='Cultura'), bookslist_Cultura),
(AID(name='Saraiva'), booksList_Saraiva)]
order = {'title' : 'The Lord of the Rings', 'author' : 'J. R. R. Tolkien', 'qty' : 5}
#set_ams('localhost', 8000)
agents = []
#saraiva = BookstoreAgent(AID(name='Saraiva@192.168.0.100:2002'), booksList_Saraiva)
saraiva = BookstoreAgent(AID(name='Saraiva'), booksList_Saraiva)
saraiva.set_ams()
agents.append(saraiva)
#cultura = BookstoreAgent(AID(name='Cultura@192.168.0.100:2003'), bookslist_Cultura)
cultura = BookstoreAgent(AID(name='Cultura'), bookslist_Cultura)
cultura.set_ams()
agents.append(cultura)
start_loop(agents)
| 41.76
| 127
| 0.58477
| 1,935
| 0.463362
| 0
| 0
| 0
| 0
| 0
| 0
| 1,261
| 0.301964
|
a1251c76fb85b2d7c8033d0baea28470e0f14346
| 9,320
|
py
|
Python
|
beta/dump/pbasis.py
|
addschile/pymctdh
|
20a93ce543526de1919757defceef16f9005f423
|
[
"MIT"
] | null | null | null |
beta/dump/pbasis.py
|
addschile/pymctdh
|
20a93ce543526de1919757defceef16f9005f423
|
[
"MIT"
] | null | null | null |
beta/dump/pbasis.py
|
addschile/pymctdh
|
20a93ce543526de1919757defceef16f9005f423
|
[
"MIT"
] | null | null | null |
from copy import deepcopy
from numba import jit,njit
import numpy as np
import pymctdh.opfactory as opfactory
from pymctdh.cy.sparsemat import CSRmat#,matvec
@njit(fastmath=True)
def matvec(nrows,IA,JA,data,vec,outvec):
"""
"""
d_ind = 0
for i in range(nrows):
ncol = IA[i+1]-IA[i]
for j in range(ncol):
col_ind = JA[d_ind]
outvec[i] = outvec[i] + data[d_ind]*vec[col_ind]
d_ind += 1
return outvec
def matadd(nrows,op1,a,op2,b):
"""
"""
if op1 is None:
opout = deepcopy(op2)
opout.data *= b
else:
data = []
JA = []
IA = [0]
ind1 = 0
ind2 = 0
for i in range(nrows):
op1_col = op1.JA[op1.IA[i]:op1.IA[i+1]]
op2_col = op2.JA[op2.IA[i]:op2.IA[i+1]]
inds = np.union1d(op1_col,op2_col)
IA.append( IA[i]+len(inds) )
for ind in inds:
JA.append( ind )
dat = 0.0
if ind in op1_col:
dat += a*op1.data[ind1]
ind1 +=1
if ind in op2_col:
dat += b*op2.data[ind2]
ind2 +=1
data.append( dat )
data = np.array(data)
IA = np.array(IA, dtype=np.intc)
JA = np.array(JA, dtype=np.intc)
opout = CSRmat(data, IA, JA)
return opout
#@njit(fastmath=True)
def kron(nrows1,IA1,JA1,data1,nrows2,IA2,JA2,data2):
"""
"""
data = []
JA = []
IA = [0]
d_ind1 = 0
for i in range(nrows1):
ncol1 = IA1[i+1]-IA1[i]
for j in range(ncol1):
col_ind1 = JA1[d_ind1]
d_ind2 = 0
for k in range(nrows2):
ncol2 = IA2[i+1]-IA2[i]
IA.append( IA[-1] + ncol2 )
for l in range(ncol2):
data.append( data1[d_ind1]*data2[d_ind2] )
JA.append( JA1[d_ind1]*nrows2 + JA2[d_ind2] )
d_ind2 += 1
d_ind += 1
return CSRmat(np.array(data), np.array(IA, dtype=int), np.array(JA, dtype=int))
class PBasis:
"""
"""
def __init__(self, args, sparse=False):
"""
"""
self.params = {}
self.params['basis'] = args[0].lower()
self.sparse = sparse
# set up parameters for basis
if self.params['basis'] == 'ho':
self.params['npbf'] = args[1]
self.params['mass'] = args[2]
self.params['omega'] = args[3]
if len(args) == 5:
self.combined = args[4]
else:
self.combined = False
if self.combined:
self.npbfs = 1
for n in self.params['npbf']:
self.npbfs *= n
self.make_ops = opfactory.make_ho_ops_combined
if not isinstance(self.params['mass'], list):
mlist = [args[2] for i in range(len(args[1]))]
self.params['mass'] = mlist
if not isinstance(self.params['omega'], list):
omlist = [args[2] for i in range(len(args[1]))]
self.params['omega'] = omlist
else:
self.npbfs = self.params['npbf']
self.make_ops = opfactory.make_ho_ops
#self.grid = make_ho_grid(self.params['npbf'])
elif self.params['basis'] == 'sinc':
self.params['npbf'] = args[1]
self.params['qmin'] = args[2]
self.params['qmax'] = args[3]
self.params['dq'] = args[4]
self.params['mass'] = args[5]
if isinstance(self.params['npbf'], list):
self.make_ops = opfactory.make_sinc_ops_combined
else:
self.make_ops = opfactory.make_sinc_ops
self.grid = np.arange(qmin,qmax+dq,dq)
elif self.params['basis'] == 'plane wave':
if args[1]%2 == 0:
self.params['npbf'] = args[1]+1
else:
self.params['npbf'] = args[1]
self.params['nm'] = int((args[1]-1)/2)
self.params['mass'] = args[2]
if len(args) == 4:
self.combined = args[3]
else:
self.combined = False
if self.combined:
raise NotImplementedError
else:
self.make_ops = opfactory.make_planewave_ops
elif self.params['basis'] == 'plane wave dvr':
raise NotImplementedError
#if args[1]%2 == 0:
# self.params['npbf'] = args[1]+1
#else:
# self.params['npbf'] = args[1]
#self.params['nm'] = int((args[1]-1)/2)
#self.params['mass'] = args[2]
#if len(args) == 4:
# self.combined = args[3]
#else:
# self.combined = False
#if self.combined:
# raise NotImplementedError
#else:
# self.make_ops = opfactory.make_planewave_ops
# #self.grid = np.arange(qmin,qmax+dq,dq)
elif self.params['basis'] == 'radial':
raise NotImplementedError
#self.params['npbf'] = args[1]
#self.params['dq'] = args[2]
#self.params['mass'] = args[3]
else:
raise ValueError("Not a valid basis.")
def make_operators(self, ops, matrix=None):
"""Creates matrices for all the relevant operators used in the
calculation. These matrices are then stored in a dictionary called
self.ops.
Input
-----
ops - list of strings, all the operators that are used for this pbf
"""
try:
self.ops
except:
self.ops = {}
if matrix is None:
matrix = [None for i in range(len(ops))]
for i,op in enumerate(ops):
if not op in self.ops:
if matrix[i] is None:
self.ops[op] = self.make_ops(self.params,op,sparse=self.sparse)
else:
self.ops[op] = matrix[i]
## TODO make this for custom operators
#if isinstance(op,str):
# self.ops[op] = self.make_ops(params,op)
#else:
# ind = 'c%d'%(count)
# count += 1
# self.ops[op] = op.copy()
def make_1b_ham(self, nel, terms):
"""Make the 1-body hamiltonians that act on the spfs with this pbf.
"""
op1b = []
for alpha in range(nel):
if self.sparse:
op = None
else:
#op = np.zeros((self.params['npbf'],)*2)
op = np.zeros((self.npbfs,)*2)
for term in terms[alpha]:
opstr = term['ops'][0]
coeff = term['coeff']
if self.sparse:
#op = matadd(self.params['npbf'],op,1.0,self.ops[opstr],coeff)
op = matadd(self.npbfs,op,1.0,self.ops[opstr],coeff)
else:
#print(type(coeff))
op = op.astype(type(coeff))
op += coeff*self.ops[opstr]
op1b.append( op )
self.ops['1b'] = op1b
return
def operate1b(self, spf, alpha):
"""Operate the single-body hamiltonian on a single spf.
"""
if self.sparse:
op = self.ops['1b'][alpha]
outvec = np.zeros(op.nrows, dtype=complex)
return matvec(op.nrows,op.IA,op.JA,op.data,spf,outvec)
#return matvec(op,spf)
else:
return np.dot(self.ops['1b'][alpha], spf)
def operate(self, spf, term):
"""Operate a single-body term on a single spf.
"""
#return self.ops[term]@spf
if self.sparse:
op = self.ops[term]
outvec = np.zeros(op.nrows, dtype=complex)
return matvec(op.nrows,op.IA,op.JA,op.data,spf,outvec)
#return matvec(self.ops[term], spf)
else:
return np.dot(self.ops[term], spf)
if __name__ == "__main__":
# no mode combination
pbf = PBasis(['ho',22,1.0,1.0])
pbf.make_operators(['q','KE','q^2'])
print(pbf.params['basis'])
print(pbf.params['npbf'])
print(pbf.params['mass'])
print(pbf.params['omega'])
opkeys = pbf.ops.keys()
for op in opkeys:
print(op)
print(pbf.ops[op].shape)
print('')
print('')
# mode combination
pbf = PBasis(['ho',[6,6],1.0,1.0,True])
pbf.make_operators(['(q)*(1)','(1)*(q)'])
print(pbf.params['basis'])
print(pbf.params['npbf'])
print(pbf.params['mass'])
print(pbf.params['omega'])
opkeys = pbf.ops.keys()
for op in opkeys:
print(op)
print(pbf.ops[op].shape)
print('')
print('')
# mode combination
pbf = PBasis(['ho',[6,6],[1.0,2.0],[1.0,2.0],True])
pbf.make_operators(['(q)*(1)','(1)*(q)'])
print(pbf.params['basis'])
print(pbf.params['npbf'])
print(pbf.params['mass'])
print(pbf.params['omega'])
opkeys = pbf.ops.keys()
for op in opkeys:
print(op)
print(pbf.ops[op].shape)
print('')
print('')
| 32.932862
| 83
| 0.483584
| 6,042
| 0.648283
| 0
| 0
| 311
| 0.033369
| 0
| 0
| 1,967
| 0.211052
|
a12707fafb28025c41d88777cd9bef6fd6e1e539
| 13,018
|
py
|
Python
|
handyrl/envs/kaggle/hungry_geese.py
|
HantianZheng/HandyRL
|
2a109faab4745b936e4176e079da4c98dff592e8
|
[
"MIT"
] | 1
|
2021-07-14T07:57:51.000Z
|
2021-07-14T07:57:51.000Z
|
handyrl/envs/kaggle/hungry_geese.py
|
HantianZheng/HandyRL
|
2a109faab4745b936e4176e079da4c98dff592e8
|
[
"MIT"
] | null | null | null |
handyrl/envs/kaggle/hungry_geese.py
|
HantianZheng/HandyRL
|
2a109faab4745b936e4176e079da4c98dff592e8
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2020 DeNA Co., Ltd.
# Licensed under The MIT License [see LICENSE for details]
# kaggle_environments licensed under Copyright 2020 Kaggle Inc. and the Apache License, Version 2.0
# (see https://github.com/Kaggle/kaggle-environments/blob/master/LICENSE for details)
# wrapper of Hungry Geese environment from kaggle
import random
import itertools
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import handyrl.envs.kaggle.public_flood_goose as pfg
# You need to install kaggle_environments, requests
from kaggle_environments import make
from kaggle_environments.envs.hungry_geese.hungry_geese import Observation, Configuration, Action, GreedyAgent
from ...environment import BaseEnvironment
class TorusConv2d(nn.Module):
def __init__(self, input_dim, output_dim, kernel_size, bn):
super().__init__()
self.edge_size = (kernel_size[0] // 2, kernel_size[1] // 2)
self.conv = nn.Conv2d(input_dim, output_dim, kernel_size=kernel_size, padding = self.edge_size, padding_mode = 'circular')
self.bn = nn.BatchNorm2d(output_dim) if bn else None
def forward(self, x):
h = self.conv(x)
h = self.bn(h) if self.bn is not None else h
return h
'''
class GeeseNet(nn.Module):
def __init__(self):
super().__init__()
layers, filters = 12, 32
self.conv0 = TorusConv2d(53, filters, (3, 3), True) # TBD
self.blocks = nn.ModuleList([TorusConv2d(filters, filters, (3, 3), True) for _ in range(layers)])
self.head_p = nn.Linear(filters, 4, bias=False)
self.head_v = nn.Linear(filters * 2, 1, bias=False)
def forward(self, x, _=None):
h = F.relu_(self.conv0(x))
for block in self.blocks:
h = F.relu_(h + block(h))
h_head = (h * x[:,:1]).view(h.size(0), h.size(1), -1).sum(-1)
h_avg = h.view(h.size(0), h.size(1), -1).mean(-1)
p = self.head_p(h_head)
v = torch.tanh(self.head_v(torch.cat([h_head, h_avg], 1)))
return {'policy': p, 'value': v}
'''
class GeeseNet(nn.Module):
def __init__(self):
super().__init__()
layers, filters = 14, 32
self.conv0 = TorusConv2d(53, filters, (3, 3), True) # TBD
self.blocks = nn.ModuleList([TorusConv2d(filters, filters, (3, 3), True) for _ in range(layers)])
self.head_p = nn.Linear(filters, 4, bias=False)
self.head_v = nn.Linear(filters * 2, 1, bias=False)
def forward(self, x, _=None):
h = F.relu_(self.conv0(x))
for block in self.blocks:
h = F.relu_(h + block(h))
h_head = (h * x[:,:1]).view(h.size(0), h.size(1), -1).sum(-1)
h_avg = h.view(h.size(0), h.size(1), -1).mean(-1)
p = self.head_p(h_head)
v = torch.tanh(self.head_v(torch.cat([h_head, h_avg], 1)))
return {'policy': p, 'value': v}
class Environment(BaseEnvironment):
ACTION = ['NORTH', 'SOUTH', 'WEST', 'EAST']
DIRECTION = [[-1, 0], [1, 0], [0, -1], [0, 1]]
NUM_AGENTS = 4
ACTION_MAP = {'N': Action.NORTH, 'S': Action.SOUTH, 'W': Action.WEST, 'E': Action.EAST}
pfg_action_map = { Action.NORTH: 'NORTH', Action.SOUTH: 'SOUTH', Action.WEST: 'WEST', Action.EAST: 'EAST'}
def __init__(self, args={}):
super().__init__()
self.env = make("hungry_geese")
self.reset()
def reset(self, args={}):
obs = self.env.reset(num_agents=self.NUM_AGENTS)
self.update((obs, {}), True)
def update(self, info, reset):
obs, last_actions = info
if reset:
self.obs_list = []
self.obs_list.append(obs)
self.last_actions = last_actions
def action2str(self, a, player=None):
return self.ACTION[a]
def str2action(self, s, player=None):
return self.ACTION.index(s)
def direction(self, pos_from, pos_to):
if pos_from is None or pos_to is None:
return None
x, y = pos_from // 11, pos_from % 11
for i, d in enumerate(self.DIRECTION):
nx, ny = (x + d[0]) % 7, (y + d[1]) % 11
if nx * 11 + ny == pos_to:
return i
return None
def __str__(self):
# output state
obs = self.obs_list[-1][0]['observation']
colors = ['\033[33m', '\033[34m', '\033[32m', '\033[31m']
color_end = '\033[0m'
def check_cell(pos):
for i, geese in enumerate(obs['geese']):
if pos in geese:
if pos == geese[0]:
return i, 'h'
if pos == geese[-1]:
return i, 't'
index = geese.index(pos)
pos_prev = geese[index - 1] if index > 0 else None
pos_next = geese[index + 1] if index < len(geese) - 1 else None
directions = [self.direction(pos, pos_prev), self.direction(pos, pos_next)]
return i, directions
if pos in obs['food']:
return 'f'
return None
def cell_string(cell):
if cell is None:
return '.'
elif cell == 'f':
return 'f'
else:
index, directions = cell
if directions == 'h':
return colors[index] + '@' + color_end
elif directions == 't':
return colors[index] + '*' + color_end
elif max(directions) < 2:
return colors[index] + '|' + color_end
elif min(directions) >= 2:
return colors[index] + '-' + color_end
else:
return colors[index] + '+' + color_end
cell_status = [check_cell(pos) for pos in range(7 * 11)]
s = 'turn %d\n' % len(self.obs_list)
for x in range(7):
for y in range(11):
pos = x * 11 + y
s += cell_string(cell_status[pos])
s += '\n'
for i, geese in enumerate(obs['geese']):
s += colors[i] + str(len(geese) or '-') + color_end + ' '
return s
def step(self, actions):
# state transition
obs = self.env.step([self.action2str(actions.get(p, None) or 0) for p in self.players()])
self.update((obs, actions), False)
def diff_info(self, _):
return self.obs_list[-1], self.last_actions
def turns(self):
# players to move
return [p for p in self.players() if self.obs_list[-1][p]['status'] == 'ACTIVE']
def terminal(self):
# check whether terminal state or not
for obs in self.obs_list[-1]:
if obs['status'] == 'ACTIVE':
return False
return True
def outcome(self):
# return terminal outcomes
# 1st: 1.0 2nd: 0.33 3rd: -0.33 4th: -1.00
rewards = {o['observation']['index']: o['reward'] for o in self.obs_list[-1]}
outcomes = {p: 0 for p in self.players()}
for p, r in rewards.items():
for pp, rr in rewards.items():
if p != pp:
if r > rr:
outcomes[p] += 1 / (self.NUM_AGENTS - 1)
elif r < rr:
outcomes[p] -= 1 / (self.NUM_AGENTS - 1)
return outcomes
def legal_actions(self, player):
# return legal action list
return list(range(len(self.ACTION)))
def action_length(self):
# maximum action label (it determines output size of policy function)
return len(self.ACTION)
def players(self):
return list(range(self.NUM_AGENTS))
def rule_based_action(self, player):
agent = GreedyAgent(Configuration({'rows': 7, 'columns': 11}))
agent.last_action = self.ACTION_MAP[self.ACTION[self.last_actions[player]][0]] if player in self.last_actions else None
obs = {**self.obs_list[-1][0]['observation'], **self.obs_list[-1][player]['observation']}
action = agent(Observation(obs))
return self.ACTION.index(action)
def public_flood_goose_based_action(self, player):
obs = {**self.obs_list[-1][0]['observation'], **self.obs_list[-1][player]['observation']}
conf = {'rows': 7, 'columns': 11}
if player in self.last_actions and len(self.obs_list) > 1:
prev_obs = {**self.obs_list[-2][0]['observation'], **self.obs_list[-2][player]['observation']}
pos_int = prev_obs['geese'][prev_obs['index']][0]
pfg.public_flood_agent_goose.last_pos = pfg.Pos(pos_int//11, pos_int%11)
else:
pfg.public_flood_agent_goose.last_pos = None
# print("prev action = ", pfg.public_flood_agent_goose.last_action)
state = pfg.State.from_obs_conf(obs, conf)
action = pfg.public_flood_agent_goose.step(state)
action = state.geo.action_to(state.my_goose.head, action)
# print("action = ", action)
# print("action = ",self.ACTION.index(self.pfg_action_map[action]))
return self.ACTION.index(self.pfg_action_map[action])
def net(self):
return GeeseNet
def observation(self, player): # = None
# if player is None:
# player = 0
b = np.zeros((self.NUM_AGENTS * 13 + 1, 7 * 11), dtype=np.float32) # TBD
obs = self.obs_list[-1][0]['observation']
for p, geese in enumerate(obs['geese']):
# head position
for pos in geese[:1]:
b[0 + (p - player) % self.NUM_AGENTS, pos] = 1
# whole position
for pos in geese:
b[4 + (p - player) % self.NUM_AGENTS, pos] = 1
# body position
for pos in geese[1:-1]:
b[8 + (p - player) % self.NUM_AGENTS, pos] = 1
# tip position
for pos in geese[-1:]:
b[12 + (p - player) % self.NUM_AGENTS, pos] = 1
# previous head positon: see below
# code attached below: line 16,17,18,19
# potential next move
for pos in geese[:1]:
b[20 + (p - player) % self.NUM_AGENTS, (pos - 1)%77] = 1
b[20 + (p - player) % self.NUM_AGENTS, (pos + 1)%77] = 1
b[20 + (p - player) % self.NUM_AGENTS, (pos - 11)%77] = 1
b[20 + (p - player) % self.NUM_AGENTS, (pos + 11)%77] = 1
# the impossible part will be removed in the previous head positions
# snake length for each player
b[24 + (p - player) % self.NUM_AGENTS, :] = len(geese)/77
# snake last second grid
for pos in geese[-2:-1]:
b[28 + (p - player) % self.NUM_AGENTS, pos] = 1
# snake last third grid
for pos in geese[-3:-2]:
b[32 + (p - player) % self.NUM_AGENTS, pos] = 1
# ordered grid snake
for gridi, gridpos in enumerate(geese):
b[36 + (p - player) % self.NUM_AGENTS, gridpos] = (len(geese) - gridi)/20
# previous head position
if len(self.obs_list) > 1:
obs_prev = self.obs_list[-2][0]['observation']
for p, geese in enumerate(obs_prev['geese']):
for pos in geese[:1]:
b[16 + (p - player) % self.NUM_AGENTS, pos] = 1
b[20 + (p - player) % self.NUM_AGENTS, pos] = 0
b[40, :] = b[0:4, :].sum(axis = 0) # all heads
b[41, ] = b[4:8, :].sum(axis = 0) # all wholes
b[42, ] = b[8:12, :].sum(axis = 0) # all bodies
b[43, ] = b[12:16, :].sum(axis = 0) # all tails
b[44, ] = b[16:20, :].sum(axis = 0) # all previous heads
b[45, ] = b[20:24, :].max(axis = 0) # all potential steps
b[46, ] = b[28:32, :].sum(axis = 0) # all last second grid
b[47, ] = b[32:36, :].sum(axis = 0) # all last third grid
b[48, ] = b[36:40, :].sum(axis = 0) # all ordered grid
# food
for pos in obs['food']:
b[49, pos] = 1
# step, distance to next starving
b[50, :] = obs['step']%40 / 40
# step, wether next turn will be starving
b[51, :] = (obs['step']+1)% 40 == 0
b[52, :] = obs['step']/200
# TBD: centralizing
player_head = obs['geese'][player][0]
player_head_x = player_head//11
player_head_y = player_head%11
return b.reshape(-1, 7, 11)
if __name__ == '__main__':
e = Environment()
for _ in range(100):
e.reset()
while not e.terminal():
print(e)
actions = {p: e.legal_actions(p) for p in e.turns()}
print([[e.action2str(a, p) for a in alist] for p, alist in actions.items()])
e.step({p: random.choice(alist) for p, alist in actions.items()})
print(e)
print(e.outcome())
| 38.40118
| 130
| 0.536027
| 11,009
| 0.845675
| 0
| 0
| 0
| 0
| 0
| 0
| 2,758
| 0.211861
|
a128f73c987352e7a2b67ff853ae7ba81f0f3c24
| 198
|
py
|
Python
|
py files/normalization.py
|
kilarinikhil/ComputerVision
|
1cb2985f9c5e45bd0763cb676028ea97fce2b27b
|
[
"Apache-2.0"
] | null | null | null |
py files/normalization.py
|
kilarinikhil/ComputerVision
|
1cb2985f9c5e45bd0763cb676028ea97fce2b27b
|
[
"Apache-2.0"
] | null | null | null |
py files/normalization.py
|
kilarinikhil/ComputerVision
|
1cb2985f9c5e45bd0763cb676028ea97fce2b27b
|
[
"Apache-2.0"
] | 1
|
2020-06-04T18:39:00.000Z
|
2020-06-04T18:39:00.000Z
|
import numpy as np
def normalize(image):
mean = np.mean(image)
meanSubtractedImage = image - mean
return np.divide(meanSubtractedImage,np.power(np.sum(np.power(meanSubtractedImage,2)),0.5))
| 22
| 92
| 0.752525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|