content stringlengths 5 1.05M |
|---|
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 30 11:54:11 2019
@author: Wentao
"""
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from core import *
from algos import *
from backtest import *
import ffn
ffn.extend_pandas()
#generate fake data
data = pd.DataFrame(np.random.uniform(0,900,size=(900, 4)), columns=list('ABCD'), \
index=[pd.Timestamp(datetime.utcnow().date() + timedelta(days=-i)) \
for i in range(900,0,-1)])
#s = Strategy('s1', [RunAfterDays(20),RunMonthly(),SelectAll(),WeighERC(),Rebalance()])
#s = Strategy('s1', [RunAfterDays(20),RunQuarterly(),SelectAll(),WeighInvVol(),Rebalance()])
#s = Strategy('s1', [RunAfterDays(20),RunQuarterly(),SelectRandomly(),WeighMeanVar(),Rebalance()],['A','B'])
mom_s = Strategy('mom_s', [RunMonthly(),SelectAll(),SelectMomentum(1),WeighEqually(), \
Rebalance()],
['A', 'B','C','D'])
s = Strategy('master', [RunMonthly(),SelectAll(),WeighEqually(),Rebalance()], \
[mom_s, 'A','C'])
test = Backtest(s, data)
res = run(test)
res.plot()
res.display()
res.plot_histogram()
res.plot_security_weights() |
import boto3
import os
import requests
import json
from random import (randrange, randint)
from datetime import datetime
from datetime import timedelta
APPTWEAK_API_KEY = os.getenv('APPTWEAK_API_KEY')
FIREHOSE_STREAM_NAME = 'firehose-blinkist-ratings'
COUNTRY = 'de'
BLINKIST_APPS = {
'ios': '568839295',
'android': 'com.blinkslabs.blinkist.android'
}
def fetch_reviews(api_key, store, app_id, country):
'''
Fetch reviews from AppTweak API
Returns top 100 arrays of reviews
'''
return requests.get('https://api.apptweak.com/{store}/applications/{app_id}/reviews.json?country={country}'.format(
store=store, app_id=app_id, country=country), headers={'X-Apptweak-Key': api_key}).json()
def create_client(service, region):
return boto3.client(service, region_name=region)
def transform_apptweak_reviews(reviews, metadata):
'''
Transform reviews fetched from AppTweak into expected format used by Kinesis / Glue
'''
# TODO Specific better failure behavior
return [{
"published_date": datetime.strptime(review['date'], '%Y-%m-%dT%H:%M:%Sz').strftime('%Y-%m-%d'),
"author": review.get('id'),
"rating": review['rating'],
"platform": metadata['request']['store']
} for review in reviews]
def send_kinesis(kinesis_client, kinesis_stream_name, data):
# TODO use batch records :)
for row in data:
# encode the string to bytes
encodedValues = bytes(json.dumps(row), 'utf-8')
# put the records to kinesis
response = kinesis_client.put_record(
Record={
"Data": encodedValues # data byte-encoded
},
DeliveryStreamName=kinesis_stream_name
)
print('Total Records sent to Kinesis: {0}'.format(len(data)))
def get_reviews(api_key, store, app_id, country):
apptweak_reviews = fetch_reviews(api_key, store, app_id, COUNTRY)
transformed_data = transform_apptweak_reviews(
apptweak_reviews['content'], apptweak_reviews['metadata'])
return transformed_data
def main():
# Fetch data without silent errors
# Android
android_reviews = get_reviews(
APPTWEAK_API_KEY, 'android', BLINKIST_APPS['android'], COUNTRY)
print('Apptweak Android reviews fetched: {}'.format(len(android_reviews)))
# iOS
ios_reviews = get_reviews(APPTWEAK_API_KEY, 'ios',
BLINKIST_APPS['ios'], COUNTRY)
print('Apptweak iOs reviews fetched: {}'.format(len(android_reviews)))
kinesis = create_client('firehose', 'eu-west-1')
send_kinesis(kinesis, FIREHOSE_STREAM_NAME, android_reviews + ios_reviews)
if __name__ == "__main__":
main()
|
from cidc_schemas.prism import SUPPORTED_ASSAYS, PROTOCOL_ID_FIELD_NAME
from .utils import (
copy_dict_with_branch,
get_prismify_args,
get_test_trial,
LocalFileUploadEntry,
PrismTestData,
)
assay_data_generators = []
def assay_data_generator(f):
assay_data_generators.append(f)
return f
@assay_data_generator
def clinical_data() -> PrismTestData:
upload_type = "clinical_data"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"clinical_data": {
"assay_creator": "DFCI",
"records": [
{
"clinical_file": {
"upload_placeholder": "28ec20a1-d2dc-46aa-91be-819b684da268"
},
"comment": "no comment",
},
{
"clinical_file": {
"upload_placeholder": "38ec20a1-d2dc-46aa-91be-819b684da268"
},
"comment": "no comment",
},
],
},
"protocol_identifier": "test_prism_trial_id",
}
upload_entries = [
LocalFileUploadEntry(
local_path="test_file.xlsx",
gs_key="test_prism_trial_id/clinical/response.xlsx",
upload_placeholder="28ec20a1-d2dc-46aa-91be-819b684da268",
metadata_availability=True,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="test_file2.csv",
gs_key="test_prism_trial_id/clinical/dosage.csv",
upload_placeholder="38ec20a1-d2dc-46aa-91be-819b684da268",
metadata_availability=True,
allow_empty=False,
),
]
base_trial = get_test_trial([])
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "clinical_data")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def cytof() -> PrismTestData:
upload_type = "cytof"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"assays": {
"cytof": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"input_files": {
"processed_fcs": {
"upload_placeholder": "97c3b6a6-b03d-4ca1-92f8-b8651e51d0c6"
},
},
"concatenation_version": "GHIL",
"normalization_version": "ABC",
"preprocessing_notes": "a note like any other note",
},
{
"cimac_id": "CTTTPP121.00",
"input_files": {
"processed_fcs": {
"upload_placeholder": "7e992a16-9c6a-4ef1-90b8-ef1a599b88bc"
},
},
"concatenation_version": "GHIL",
"normalization_version": "ABC",
"preprocessing_notes": "a different note",
},
],
"assay_run_id": "test_prism_trial_id_run_1",
"assay_creator": "DFCI",
"instrument": "PresNixon123",
"source_fcs": [
{"upload_placeholder": "4918a014-0e63-4a36-a45a-c62d593e225e"},
{"upload_placeholder": "0bbd7520-18b9-4ec3-8344-49f02dcadb08"},
],
"batch_id": "XYZ1",
"injector": "HAT123",
"date_of_acquisition": "43355",
"acquisition_buffer": "ABC",
"bead_removal": True,
"normalization_method": "Fluidigm",
"debarcoding_protocol": "Fluidigm",
"harware_version": "Fluidigm 3.0.2",
"cytof_antibodies": [
{
"antibody": "CD8",
"clone": "C8/144b",
"company": "DAKO",
"cat_num": "C8-ABC",
"lot_num": "3983272",
"isotope": "146Nd",
"dilution": "100X",
"stain_type": "Surface Stain",
"usage": "Used",
},
{
"antibody": "PD-L1",
"clone": "C2/11p",
"company": "DAKO",
"cat_num": "C8-AB123",
"lot_num": "1231272",
"isotope": "146Nb",
"dilution": "100X",
"stain_type": "Surface Stain",
"usage": "Analysis Only",
},
],
"barcodes": [
{
"barcode_id": "CTTTPP111.00",
"barcode_num": "CTTTPP111",
"debarcoding_key": "ABC123",
}
],
"controls": [
{
"control_name": "XYZ1-control",
"input_files": {
"processed_fcs": {
"upload_placeholder": "aaa92a16-9c6a-4ef1-90b8-ef1a599aaaaa"
},
},
}
],
}
]
},
"protocol_identifier": "test_prism_trial_id",
}
upload_entries = [
LocalFileUploadEntry(
local_path="sample1.fcs",
gs_key="test_prism_trial_id/cytof/CTTTPP111.00/processed.fcs",
upload_placeholder="97c3b6a6-b03d-4ca1-92f8-b8651e51d0c6",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="sample2.fcs",
gs_key="test_prism_trial_id/cytof/CTTTPP121.00/processed.fcs",
upload_placeholder="7e992a16-9c6a-4ef1-90b8-ef1a599b88bc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="batch1f1.fcs",
gs_key="test_prism_trial_id/cytof/XYZ1/source_0.fcs",
upload_placeholder="4918a014-0e63-4a36-a45a-c62d593e225e",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="batch1f2.fcs",
gs_key="test_prism_trial_id/cytof/XYZ1/source_1.fcs",
upload_placeholder="0bbd7520-18b9-4ec3-8344-49f02dcadb08",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="XYZ1-control.fcs",
gs_key="test_prism_trial_id/cytof/controls/XYZ1-control/processed.fcs",
upload_placeholder="aaa92a16-9c6a-4ef1-90b8-ef1a599aaaaa",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["cytof"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def ihc() -> PrismTestData:
upload_type = "ihc"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "123", # testing integer protocol id
"assays": {
"ihc": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"ihc_image": {
"upload_placeholder": "e4294fb9-047f-4df6-b614-871289a1a2a8"
}
},
"marker_positive": "positive",
"tumor_proportion_score": 0.67,
"intensity": 0.0,
"percentage_expression": 0.0,
"h_score": 0,
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"ihc_image": {
"upload_placeholder": "fba3f94b-669c-48c7-aee0-f0d5e5e8a341"
}
},
"marker_positive": "no_call",
"tumor_proportion_score": 0.1,
"intensity": 1.0,
"percentage_expression": 10.0,
"h_score": 120,
},
{
"cimac_id": "CTTTPP122.00",
"files": {
"ihc_image": {
"upload_placeholder": "ecd3f6ea-8315-4fa9-bb37-501b4e821aed"
}
},
"marker_positive": "negative",
"tumor_proportion_score": 0.1,
"intensity": 2.0,
"percentage_expression": 40.0,
"h_score": 299,
},
{
"cimac_id": "CTTTPP123.00",
"files": {
"ihc_image": {
"upload_placeholder": "af19deb2-a66e-4c2c-960c-308781245c69"
}
},
"marker_positive": "positive",
"tumor_proportion_score": "NE",
"intensity": 3.0,
"percentage_expression": 100.0,
"h_score": 300,
},
],
"assay_creator": "DFCI",
"slide_scanner_model": "Vectra 2.0",
"staining_platform": "auto",
"autostainer_model": "Bond RX",
"antibody": {
"antibody": "XYZ",
"company": "XYZ",
"clone": "XYZ",
"cat_num": "ABX.123",
"lot_num": "#12345",
"dilution": "1:100",
"incubation_time": "06:45:00",
"incubation_temp": "54c",
},
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="path/to/image1.tif",
gs_key="123/ihc/CTTTPP111.00/ihc_image.tif",
upload_placeholder="e4294fb9-047f-4df6-b614-871289a1a2a8",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="path/to/image2.tiff",
gs_key="123/ihc/CTTTPP121.00/ihc_image.tiff",
upload_placeholder="fba3f94b-669c-48c7-aee0-f0d5e5e8a341",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="path/to/image3.svs",
gs_key="123/ihc/CTTTPP122.00/ihc_image.svs",
upload_placeholder="ecd3f6ea-8315-4fa9-bb37-501b4e821aed",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="path/to/image4.qptiff",
gs_key="123/ihc/CTTTPP123.00/ihc_image.qptiff",
upload_placeholder="af19deb2-a66e-4c2c-960c-308781245c69",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["ihc"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
base_trial[PROTOCOL_ID_FIELD_NAME] = "123"
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def hande() -> PrismTestData:
upload_type = "hande"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "123",
"assays": {
"hande": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"image_file": {
"upload_placeholder": "eeeeeeee-047f-4df6-b614-871289a1a2a8"
},
},
"tumor_tissue_percentage": 1.0,
"viable_tumor_percentage": 1.0,
"viable_stroma_percentage": 1.0,
"necrosis_percentage": 1.0,
"fibrosis_percentage": 1.0,
"comment": "a comment",
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"image_file": {
"upload_placeholder": "eeeeeeee-669c-48c7-aee0-f0d5e5e8a341"
}
},
"tumor_tissue_percentage": 2.0,
"viable_tumor_percentage": 2.0,
"viable_stroma_percentage": 2.0,
"necrosis_percentage": 2.0,
"fibrosis_percentage": 2.0,
"comment": "another comment",
},
],
"assay_creator": "DFCI",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="path/to/image1.svs",
gs_key="123/hande/CTTTPP111.00/image_file.svs",
upload_placeholder="eeeeeeee-047f-4df6-b614-871289a1a2a8",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="path/to/image2.svs",
gs_key="123/hande/CTTTPP121.00/image_file.svs",
upload_placeholder="eeeeeeee-669c-48c7-aee0-f0d5e5e8a341",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["hande"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
base_trial[PROTOCOL_ID_FIELD_NAME] = "123"
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def wes_bam() -> PrismTestData:
upload_type = "wes_bam"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"wes": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"bam": [
{
"upload_placeholder": "d75a0a45-50dd-4aa5-bd46-2793bd5c84e5"
},
{
"upload_placeholder": "3385fc87-9630-440b-9924-448168050170"
},
]
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"bam": [
{
"upload_placeholder": "c2ffea21-0771-45ca-bd08-f384b012afb9"
},
{
"upload_placeholder": "b5952706-527d-4a6c-b085-97cb02059da6"
},
]
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
},
],
"assay_creator": "Mount Sinai",
"sequencing_protocol": "Express Somatic Human WES (Deep Coverage) v1.1",
"library_kit": "Hyper Prep ICE Exome Express: 1.0",
"sequencer_platform": "Illumina - NextSeq 550",
"paired_end_reads": "Paired",
"read_length": 100,
"bait_set": "whole_exome_illumina_coding_v1",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.1.1.bam",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/reads_0.bam",
upload_placeholder="d75a0a45-50dd-4aa5-bd46-2793bd5c84e5",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.1.1_2.bam",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/reads_1.bam",
upload_placeholder="3385fc87-9630-440b-9924-448168050170",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.2.1.bam",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/reads_0.bam",
upload_placeholder="c2ffea21-0771-45ca-bd08-f384b012afb9",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.2.1_2.bam",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/reads_1.bam",
upload_placeholder="b5952706-527d-4a6c-b085-97cb02059da6",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["wes"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def wes_fastq() -> PrismTestData:
upload_type = "wes_fastq"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"wes": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"r1": [
{
"upload_placeholder": "3c8b4fe4-780a-4431-908f-aa879c01c009"
},
{
"upload_placeholder": "c665c9ca-7065-46b8-b1c8-b871e15db294"
},
],
"r2": [
{
"upload_placeholder": "82bc1123-55e2-4640-a9c9-a259d5756a86"
},
{
"upload_placeholder": "92bc1123-55e2-4640-a9c9-a259d5756a86"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"r1": [
{
"upload_placeholder": "4d57fa58-5dd4-4379-878d-935d79d2507f"
},
{
"upload_placeholder": "c24a1b3d-a19a-414a-9fc4-55bcbb7db9ec"
},
],
"r2": [
{
"upload_placeholder": "5eb4b639-c2a4-48f8-85f8-e9a04f5233c6"
},
{
"upload_placeholder": "6eb4b639-c2a4-48f8-85f8-e9a04f5233c6"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
},
],
"assay_creator": "Mount Sinai",
"sequencing_protocol": "Express Somatic Human WES (Deep Coverage) v1.1",
"library_kit": "Hyper Prep ICE Exome Express: 1.0",
"sequencer_platform": "Illumina - NextSeq 550",
"paired_end_reads": "Paired",
"read_length": 100,
"bait_set": "whole_exome_illumina_coding_v1",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/r1_L1.fastq.gz",
upload_placeholder="3c8b4fe4-780a-4431-908f-aa879c01c009",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1_2.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/r1_L2.fastq.gz",
upload_placeholder="c665c9ca-7065-46b8-b1c8-b871e15db294",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/r2_L1.fastq.gz",
upload_placeholder="82bc1123-55e2-4640-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.1.1_2.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP111.00/r2_L2.fastq.gz",
upload_placeholder="92bc1123-55e2-4640-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/r1_L1.fastq.gz",
upload_placeholder="4d57fa58-5dd4-4379-878d-935d79d2507f",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1_2.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/r1_L2.fastq.gz",
upload_placeholder="c24a1b3d-a19a-414a-9fc4-55bcbb7db9ec",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/r2_L1.fastq.gz",
upload_placeholder="5eb4b639-c2a4-48f8-85f8-e9a04f5233c6",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.2.1_2.fastq.gz",
gs_key="test_prism_trial_id/wes/CTTTPP121.00/r2_L2.fastq.gz",
upload_placeholder="6eb4b639-c2a4-48f8-85f8-e9a04f5233c6",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["wes"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def atacseq_fastq() -> PrismTestData:
upload_type = "atacseq_fastq"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"atacseq": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"replicate_number": 1,
"files": {
"r1": [
{
"upload_placeholder": "3c8b4fe4-780a-4431-908f-aa879c01c009"
},
],
"r2": [
{
"upload_placeholder": "82bc1123-55e2-4640-a9c9-a259d5756a86"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
"percent_q30": 90.0,
},
{
"cimac_id": "CTTTPP111.00",
"replicate_number": 2,
"files": {
"r1": [
{
"upload_placeholder": "4d57fa58-5dd4-4379-878d-935d79d2507f"
},
],
"r2": [
{
"upload_placeholder": "5eb4b639-c2a4-48f8-85f8-e9a04f5233c6"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
"percent_q30": 90.0,
},
{
"cimac_id": "CTTTPP111.00",
"replicate_number": 1,
"files": {
"r1": [
{
"upload_placeholder": "c665c9ca-7065-46b8-b1c8-b871e15db294"
},
],
"r2": [
{
"upload_placeholder": "92bc1123-55e2-4640-a9c9-a259d5756a86"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
"percent_q30": 90.0,
},
{
"cimac_id": "CTTTPP111.00",
"replicate_number": 2,
"files": {
"r1": [
{
"upload_placeholder": "c24a1b3d-a19a-414a-9fc4-55bcbb7db9ec"
},
],
"r2": [
{
"upload_placeholder": "6eb4b639-c2a4-48f8-85f8-e9a04f5233c6"
},
],
},
"sequencing_date": "2010-01-01 00:00:00",
"quality_flag": 1.0,
"percent_q30": 90.0,
},
],
"batch_id": "XYZ",
"assay_creator": "Mount Sinai",
"sequencer_platform": "Illumina - NextSeq 550",
"paired_end_reads": "Paired",
"read_length": 100,
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/1/r1_L1.fastq.gz",
upload_placeholder="3c8b4fe4-780a-4431-908f-aa879c01c009",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/1/r2_L1.fastq.gz",
upload_placeholder="82bc1123-55e2-4640-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1_2.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/1/r1_L2.fastq.gz",
upload_placeholder="c665c9ca-7065-46b8-b1c8-b871e15db294",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.1.1_2.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/1/r2_L2.fastq.gz",
upload_placeholder="92bc1123-55e2-4640-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/2/r1_L1.fastq.gz",
upload_placeholder="4d57fa58-5dd4-4379-878d-935d79d2507f",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/2/r2_L1.fastq.gz",
upload_placeholder="5eb4b639-c2a4-48f8-85f8-e9a04f5233c6",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1_2.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/2/r1_L2.fastq.gz",
upload_placeholder="c24a1b3d-a19a-414a-9fc4-55bcbb7db9ec",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.2.1_2.fastq.gz",
gs_key="test_prism_trial_id/atacseq/CTTTPP111.00/2/r2_L2.fastq.gz",
upload_placeholder="6eb4b639-c2a4-48f8-85f8-e9a04f5233c6",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["atacseq"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def misc_data() -> PrismTestData:
upload_type = "misc_data"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"misc_data": [
{
"assay_creator": "DFCI",
"files": [
{
"name": "foo.txt",
"file": {
"upload_placeholder": "3c8b4fe4-780a-4431-908f-aa879c01c009"
},
"file_location": "foo",
},
{
"name": "file_name.txt",
"file": {
"upload_placeholder": "82bc1123-55e2-4640-a9c9-a259d5756a86"
},
"file_location": "bar",
},
{
"name": "baz.txt",
"file": {
"upload_placeholder": "c665c9ca-7065-46b8-b1c8-b871e15db294"
},
"file_location": "baz",
"description": "this is a description",
},
{
"name": "file_name.barbaz",
"file": {
"upload_placeholder": "c665c9ca-7065-46b8-a9c9-a259d5756a86"
},
"file_location": "barbaz",
"description": "this is a description",
},
],
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="foo",
gs_key="test_prism_trial_id/misc_data/foo.txt",
upload_placeholder="3c8b4fe4-780a-4431-908f-aa879c01c009",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="bar",
gs_key="test_prism_trial_id/misc_data/file_name.txt",
upload_placeholder="82bc1123-55e2-4640-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="baz",
gs_key="test_prism_trial_id/misc_data/baz.txt",
upload_placeholder="c665c9ca-7065-46b8-b1c8-b871e15db294",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="barbaz",
gs_key="test_prism_trial_id/misc_data/file_name.barbaz",
upload_placeholder="c665c9ca-7065-46b8-a9c9-a259d5756a86",
metadata_availability=False,
allow_empty=False,
),
]
base_trial = get_test_trial()
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def rna_bam() -> PrismTestData:
upload_type = "rna_bam"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"rna": [
{
"records": [
{
"cimac_id": "CTTTPP122.00",
"files": {
"bam": [
{
"upload_placeholder": "8c85011c-ccee-49b4-a940-be6ece437953"
},
{
"upload_placeholder": "5cebf955-8f5b-4523-807b-3bd3cf5811f6"
},
]
},
"library_yield_ng": 600.0,
"dv200": 0.7,
"rqs": 8.0,
"quality_flag": 1.0,
},
{
"cimac_id": "CTTTPP123.00",
"files": {
"bam": [
{
"upload_placeholder": "10859cc5-8258-4d00-9118-9939b354a519"
},
{
"upload_placeholder": "c7cf5b84-b924-48dd-9f7b-a32efd6a7b0d"
},
]
},
"dv200": 0.8,
"rqs": 9.0,
"rin": 9.0,
"quality_flag": 1.0,
},
],
"assay_creator": "DFCI",
"enrichment_method": "Transcriptome capture v1",
"sequencer_platform": "Illumina - HiSeq 3000",
"paired_end_reads": "Paired",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.1.1.bam",
gs_key="test_prism_trial_id/rna/CTTTPP122.00/reads_0.bam",
upload_placeholder="8c85011c-ccee-49b4-a940-be6ece437953",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.1.1_2.bam",
gs_key="test_prism_trial_id/rna/CTTTPP122.00/reads_1.bam",
upload_placeholder="5cebf955-8f5b-4523-807b-3bd3cf5811f6",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.2.1.bam",
gs_key="test_prism_trial_id/rna/CTTTPP123.00/reads_0.bam",
upload_placeholder="10859cc5-8258-4d00-9118-9939b354a519",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="gs://local/path/to/fwd.1.2.1_2.bam",
gs_key="test_prism_trial_id/rna/CTTTPP123.00/reads_1.bam",
upload_placeholder="c7cf5b84-b924-48dd-9f7b-a32efd6a7b0d",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["rna"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def rna_fastq() -> PrismTestData:
upload_type = "rna_fastq"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"rna": [
{
"records": [
{
"cimac_id": "CTTTPP122.00",
"files": {
"r1": [
{
"upload_placeholder": "2635df00-082b-4e2d-92a8-7a5e629483db"
},
{
"upload_placeholder": "b0723fe8-5533-40e0-86cb-16162d8683e4"
},
],
"r2": [
{
"upload_placeholder": "1cd2bb4f-3f84-4f78-b387-4edb6dcc5d1b"
}
],
},
"library_yield_ng": 600.0,
"dv200": 0.7,
"rqs": 8.0,
"quality_flag": 1.0,
},
{
"cimac_id": "CTTTPP123.00",
"files": {
"r1": [
{
"upload_placeholder": "d49521dc-d531-4555-a874-80aa0ce31dc1"
},
{
"upload_placeholder": "5ebfef93-5c4c-496d-b8ae-13c1978322d2"
},
],
"r2": [
{
"upload_placeholder": "ae150200-c6b2-459c-a264-b56bc2aca263"
}
],
},
"library_yield_ng": 650.0,
"dv200": 0.8,
"rqs": 9.0,
"rin": 9.0,
"quality_flag": 1.0,
},
],
"assay_creator": "DFCI",
"enrichment_method": "Transcriptome capture v1",
"enrichment_vendor_kit": "Illumina - TruSeq Stranded PolyA mRNA",
"sequencer_platform": "Illumina - HiSeq 3000",
"paired_end_reads": "Paired",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP122.00/r1_0.fastq.gz",
upload_placeholder="2635df00-082b-4e2d-92a8-7a5e629483db",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.1.1_2.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP122.00/r1_1.fastq.gz",
upload_placeholder="b0723fe8-5533-40e0-86cb-16162d8683e4",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.1.1.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP122.00/r2_0.fastq.gz",
upload_placeholder="1cd2bb4f-3f84-4f78-b387-4edb6dcc5d1b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP123.00/r1_0.fastq.gz",
upload_placeholder="d49521dc-d531-4555-a874-80aa0ce31dc1",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/fwd.1.2.1_2.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP123.00/r1_1.fastq.gz",
upload_placeholder="5ebfef93-5c4c-496d-b8ae-13c1978322d2",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/rev.1.2.1.fastq.gz",
gs_key="test_prism_trial_id/rna/CTTTPP123.00/r2_0.fastq.gz",
upload_placeholder="ae150200-c6b2-459c-a264-b56bc2aca263",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["rna"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def tcr_adaptive() -> PrismTestData:
upload_type = "tcr_adaptive"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"tcr": [
{
"controls": [
{
"id": "neg",
"tsv_file": {
"upload_placeholder": "3735df00-082b-4e2d-92a8-7a5e629483dc"
},
}
],
"records": [
{
"cimac_id": "CTTTPP111.00",
"tsv_file": {
"upload_placeholder": "3635df00-082b-4e2d-92a8-7a5e629483dc"
},
},
{
"cimac_id": "CTTTPP121.00",
"tsv_file": {
"upload_placeholder": "e49521dc-d531-4555-a874-80aa0ce31dc2"
},
},
],
"assay_creator": "Adaptive",
"sequencer_platform": "Adaptive",
"batch_id": "XYZ",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="neg.tsv",
gs_key="test_prism_trial_id/tcr/XYZ/controls/neg/reads.tsv",
upload_placeholder="3735df00-082b-4e2d-92a8-7a5e629483dc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="CTTTPP111_00.tsv",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP111.00/reads.tsv",
upload_placeholder="3635df00-082b-4e2d-92a8-7a5e629483dc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="CTTTPP121_00.tsv",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/reads.tsv",
upload_placeholder="e49521dc-d531-4555-a874-80aa0ce31dc2",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["tcr"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def tcr_fastq() -> PrismTestData:
upload_type = "tcr_fastq"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"tcr": [
{
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"replicates": [
{
"replicate_id": "1A",
"r1": [
{
"upload_placeholder": "3635df00-082b-4e2d-92a8-7a5e629483dc"
}
],
"r2": [
{
"upload_placeholder": "2cd2bb4f-3f84-4f78-b387-4edb6dcc5d1c"
}
],
"i1": [
{
"upload_placeholder": "aa35df00-082b-4e2d-92a8-7a5e629483dc"
}
],
"i2": [
{
"upload_placeholder": "bbd2bb4f-3f84-4f78-b387-4edb6dcc5d1c"
}
],
"rna_quantity_ng": 600.0,
}
]
},
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"replicates": [
{
"replicate_id": "1A",
"r1": [
{
"upload_placeholder": "e49521dc-d531-4555-a874-80aa0ce31dc2"
}
],
"r2": [
{
"upload_placeholder": "be150200-c6b2-459c-a264-b56bc2aca264"
}
],
"i1": [
{
"upload_placeholder": "cc9521dc-d531-4555-a874-80aa0ce31dc2"
}
],
"i2": [
{
"upload_placeholder": "dd150200-c6b2-459c-a264-b56bc2aca264"
}
],
"rna_quantity_ng": 650.0,
},
{
"replicate_id": "2A",
"r1": [
{
"upload_placeholder": "r29521dc-d531-4555-a874-80aa0ce31dc2"
}
],
"r2": [
{
"upload_placeholder": "r2150200-c6b2-459c-a264-b56bc2aca264"
}
],
"i1": [
{
"upload_placeholder": "r29521dc-d531-4555-a874-80aa0ce31dc3"
}
],
"i2": [
{
"upload_placeholder": "r2150200-c6b2-459c-a264-b56bc2aca265"
}
],
"rna_quantity_ng": 10.0,
},
]
},
},
],
"assay_creator": "Mount Sinai",
"sequencer_platform": "Illumina - HiSeq 3000",
"batch_id": "XYZ",
"sequencing_run_date": "2019-12-12 00:00:00",
"sample_sheet": {
"upload_placeholder": "rb150200-c6b2-459c-a264-b56bc2aca26a"
},
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="/local/path/to/read1_1.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP111.00/replicate_1A/r1.fastq.gz",
upload_placeholder="3635df00-082b-4e2d-92a8-7a5e629483dc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/read2_1.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP111.00/replicate_1A/r2.fastq.gz",
upload_placeholder="2cd2bb4f-3f84-4f78-b387-4edb6dcc5d1c",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index1_1.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP111.00/replicate_1A/i1.fastq.gz",
upload_placeholder="aa35df00-082b-4e2d-92a8-7a5e629483dc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index2_1.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP111.00/replicate_1A/i2.fastq.gz",
upload_placeholder="bbd2bb4f-3f84-4f78-b387-4edb6dcc5d1c",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/read1_2.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_1A/r1.fastq.gz",
upload_placeholder="e49521dc-d531-4555-a874-80aa0ce31dc2",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/read2_2.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_1A/r2.fastq.gz",
upload_placeholder="be150200-c6b2-459c-a264-b56bc2aca264",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index1_2.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_1A/i1.fastq.gz",
upload_placeholder="cc9521dc-d531-4555-a874-80aa0ce31dc2",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index2_2.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_1A/i2.fastq.gz",
upload_placeholder="dd150200-c6b2-459c-a264-b56bc2aca264",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/read1_3.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_2A/r1.fastq.gz",
upload_placeholder="r29521dc-d531-4555-a874-80aa0ce31dc2",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/read2_3.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_2A/r2.fastq.gz",
upload_placeholder="r2150200-c6b2-459c-a264-b56bc2aca264",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index1_3.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_2A/i1.fastq.gz",
upload_placeholder="r29521dc-d531-4555-a874-80aa0ce31dc3",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/index2_3.fastq.gz",
gs_key="test_prism_trial_id/tcr/XYZ/CTTTPP121.00/replicate_2A/i2.fastq.gz",
upload_placeholder="r2150200-c6b2-459c-a264-b56bc2aca265",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="/local/path/to/sample_sheet.csv",
gs_key="test_prism_trial_id/tcr/XYZ/SampleSheet.csv",
upload_placeholder="rb150200-c6b2-459c-a264-b56bc2aca26a",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["tcr"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def olink() -> PrismTestData:
upload_type = "olink"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"olink": {
"batches": [
{
"batch_id": "batch1",
"records": [
{
"chip_barcode": "1111",
"files": {
"assay_npx": {
"upload_placeholder": "d658b480-ed78-4717-b622-3e84bde632b6"
},
"assay_raw_ct": {
"upload_placeholder": "4e9d0a47-90dc-4134-9ad6-3e3dd83619d6"
},
},
"run_date": "2019-12-12 00:00:00",
"run_time": "10:11:00",
"instrument": "MIOMARKHD411",
"fludigm_application_version": "4.1.3",
"fludigm_application_build": "20140305.43",
"probe_type": "FAM-MGB",
"passive_reference": "ROX",
"quality_threshold": 0.5,
"baseline_correction": "Linear",
"number_of_samples": 90.0,
"number_of_samples_failed": 5.0,
"npx_manager_version": "Olink NPX Manager 0.0.82.0",
},
{
"chip_barcode": "1112",
"files": {
"assay_npx": {
"upload_placeholder": "9855c579-82e0-42ee-8225-7c1c736bb69f"
},
"assay_raw_ct": {
"upload_placeholder": "b387e41a-1c6a-42b5-aa16-dccf6249e404"
},
},
"run_date": "2019-12-12 00:00:00",
"run_time": "10:11:00",
"instrument": "MIOMARKHD411",
"fludigm_application_version": "4.1.3",
"fludigm_application_build": "20140305.43",
"probe_type": "FAM-MGB",
"passive_reference": "ROX",
"quality_threshold": 0.5,
"baseline_correction": "Linear",
"number_of_samples": 80.0,
"number_of_samples_failed": 10.0,
"npx_manager_version": "Olink NPX Manager 0.0.82.0",
},
],
"assay_creator": "DFCI",
"panel": "Olink INFLAMMATION(v.3004)",
"assay_panel_lot": "1",
"combined": {
"npx_file": {
"upload_placeholder": "1b0b3b8f-6417-4a37-85dc-e8aa75594678"
},
"npx_manager_version": "Olink NPX Manager 0.0.82.0",
},
}
],
"study": {
"npx_file": {
"upload_placeholder": "19b31c40-a3dd-4be1-b9bd-022b9ff08dfd"
},
"npx_manager_version": "Olink NPX Manager 0.0.82.0",
},
}
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="olink_assay_1_NPX.xlsx",
gs_key="test_prism_trial_id/olink/batch_batch1/chip_1111/assay_npx.xlsx",
upload_placeholder="d658b480-ed78-4717-b622-3e84bde632b6",
metadata_availability=True,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="olink_assay_1_CT.csv",
gs_key="test_prism_trial_id/olink/batch_batch1/chip_1111/assay_raw_ct.csv",
upload_placeholder="4e9d0a47-90dc-4134-9ad6-3e3dd83619d6",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="olink_assay_2_NPX.xlsx",
gs_key="test_prism_trial_id/olink/batch_batch1/chip_1112/assay_npx.xlsx",
upload_placeholder="9855c579-82e0-42ee-8225-7c1c736bb69f",
metadata_availability=True,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="olink_assay_2_CT.csv",
gs_key="test_prism_trial_id/olink/batch_batch1/chip_1112/assay_raw_ct.csv",
upload_placeholder="b387e41a-1c6a-42b5-aa16-dccf6249e404",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="olink_assay_combined.xlsx",
gs_key="test_prism_trial_id/olink/study_npx.xlsx",
upload_placeholder="19b31c40-a3dd-4be1-b9bd-022b9ff08dfd",
metadata_availability=True,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="olink_assay_batch_combined.xlsx",
gs_key="test_prism_trial_id/olink/batch_batch1/combined_npx.xlsx",
upload_placeholder="1b0b3b8f-6417-4a37-85dc-e8aa75594678",
metadata_availability=True,
allow_empty=False,
),
]
base_trial = get_test_trial()
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def elisa() -> PrismTestData:
upload_type = "elisa"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"elisa": [
{
"antigens": [
{
"antigen": "GST",
"antigen_type": "protein",
"final_concentration": 1.0,
"final_concentration_units": "Nanogram per Microliter",
},
{
"antigen": "p53 16-32",
"antigen_type": "peptide",
"final_concentration": 1.0,
"final_concentration_units": "Micromolar",
},
],
"assay_creator": "DFCI",
"assay_run_id": "test_prism_trial_id_run_1",
"assay_xlsx": {
"upload_placeholder": "69b033d4-c895-4fb4-88e8-9b2a5e264874"
},
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="otest_prism_trial_id_run_1_ELISA.xlsx",
gs_key="test_prism_trial_id/elisa/test_prism_trial_id_run_1/assay.xlsx",
upload_placeholder="69b033d4-c895-4fb4-88e8-9b2a5e264874",
metadata_availability=True,
allow_empty=False,
)
]
base_trial = get_test_trial()
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def mif() -> PrismTestData:
upload_type = "mif"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"mif": [
{
"panel": "Panel 1: PD-L1, CD68, PD-1, CD8, CD3, pan-cytokeratin, DAPI",
"antibodies": [
{
"antibody": "CD8",
"export_name": "CD8 (Opal 540)",
"clone": "C8/144b",
"company": "DAKO",
"cat_num": "C8-ABC",
"lot_num": "3983272",
"staining_order": 2,
"fluor_wavelength": 520,
"primary_ab_dilution": "1:5000",
"dilutent": "DV",
"fluor_dilution": "1:100",
"antigen_retrieval_time": "00:01:00",
"primary_incubation_time": "00:01:00",
"amplification_time": "00:01:00",
},
{
"antibody": "PD-L1",
"export_name": "PD-L1 (Opal 200)",
"clone": "9A11",
"company": "CST",
"cat_num": "9A-ABC",
"lot_num": "29387234",
"staining_order": 3,
"fluor_wavelength": 540,
"primary_ab_dilution": "1:5000",
"dilutent": "VENTANA",
"fluor_dilution": "1:200",
"antigen_retrieval_time": "00:01:00",
"primary_incubation_time": "00:01:00",
"amplification_time": "00:01:00",
},
],
"records": [
{
"cimac_id": "CTTTPP111.00",
"files": {
"regions_of_interest": [
{
"roi_id": "1",
"composite_image": {
"upload_placeholder": "6aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"im3": {
"upload_placeholder": "7aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"component_data": {
"upload_placeholder": "8aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"exports": [
{
"export_id": "CD4",
"binary_seg_maps": {
"upload_placeholder": "1aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data": {
"upload_placeholder": "2aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data_summary": {
"upload_placeholder": "3aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data": {
"upload_placeholder": "2zaaaaaa-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data_summary": {
"upload_placeholder": "3zaaaaaa-047f-4df6-b614-871289a1a2a"
},
"phenotype_map": {
"upload_placeholder": "4aaaaaaa-047f-4df6-b614-871289a1a2a"
},
"image_with_all_seg": {
"upload_placeholder": "4aaaaaab-047f-4df6-b614-871289a1a2a"
},
"image_with_cell_seg_map": {
"upload_placeholder": "4aaaaabb-047f-4df6-b614-871289a1a2a"
},
"image_with_phenotype_map": {
"upload_placeholder": "4aaaabbb-047f-4df6-b614-871289a1a2a"
},
"image_with_tissue_seg": {
"upload_placeholder": "4aaabbbb-047f-4df6-b614-871289a1a2a"
},
"score_data": [
{
"upload_placeholder": "5aaaaaa1-047f-4df6-b614-871289a1a2a"
},
{
"upload_placeholder": "5aaaaaa2-047f-4df6-b614-871289a1a2a"
},
],
},
{
"export_id": "CD8",
"binary_seg_maps": {
"upload_placeholder": "1aaaaaaa-047f-4df6-b614-871289a1a2b"
},
"cell_seg_data": {
"upload_placeholder": "2aaaaaaa-047f-4df6-b614-871289a1a2b"
},
"cell_seg_data_summary": {
"upload_placeholder": "3aaaaaaa-047f-4df6-b614-871289a1a2b"
},
"tissue_seg_data": {
"upload_placeholder": "2zaaaaaa-047f-4df6-b614-871289a1a2b"
},
"tissue_seg_data_summary": {
"upload_placeholder": "3zaaaaaa-047f-4df6-b614-871289a1a2b"
},
"phenotype_map": {
"upload_placeholder": "4aaaaaaa-047f-4df6-b614-871289a1a2b"
},
"image_with_all_seg": {
"upload_placeholder": "4aaaaaab-047f-4df6-b614-871289a1a2b"
},
"image_with_cell_seg_map": {
"upload_placeholder": "4aaaaabb-047f-4df6-b614-871289a1a2b"
},
"image_with_phenotype_map": {
"upload_placeholder": "4aaaabbb-047f-4df6-b614-871289a1a2b"
},
"image_with_tissue_seg": {
"upload_placeholder": "4aaabbbb-047f-4df6-b614-871289a1a2b"
},
"score_data": [
{
"upload_placeholder": "5aaaaaa1-047f-4df6-b614-871289a1a2b"
},
{
"upload_placeholder": "5aaaaaa2-047f-4df6-b614-871289a1a2b"
},
],
},
],
}
]
},
},
{
"cimac_id": "CTTTPP121.00",
"files": {
"regions_of_interest": [
{
"roi_id": "1",
"composite_image": {
"upload_placeholder": "6bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"im3": {
"upload_placeholder": "7bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"component_data": {
"upload_placeholder": "8bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"exports": [
{
"export_id": "CD4",
"binary_seg_maps": {
"upload_placeholder": "1bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data": {
"upload_placeholder": "2bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data_summary": {
"upload_placeholder": "3bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data": {
"upload_placeholder": "2abbbbbb-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data_summary": {
"upload_placeholder": "3abbbbbb-047f-4df6-b614-871289a1a2a"
},
"phenotype_map": {
"upload_placeholder": "4bbbbbbb-047f-4df6-b614-871289a1a2a"
},
"image_with_all_seg": {
"upload_placeholder": "4bbbbbba-047f-4df6-b614-871289a1a2a"
},
"image_with_cell_seg_map": {
"upload_placeholder": "4bbbbbaa-047f-4df6-b614-871289a1a2a"
},
"image_with_phenotype_map": {
"upload_placeholder": "4bbbbaaa-047f-4df6-b614-871289a1a2a"
},
"image_with_tissue_seg": {
"upload_placeholder": "4bbbaaaa-047f-4df6-b614-871289a1a2a"
},
"score_data": [
{
"upload_placeholder": "5bbbbbbb-047f-4df6-b614-871289a1a2a"
}
],
}
],
},
{
"roi_id": "2",
"composite_image": {
"upload_placeholder": "6ccccccc-047f-4df6-b614-871289a1a2a"
},
"im3": {
"upload_placeholder": "7ccccccc-047f-4df6-b614-871289a1a2a"
},
"component_data": {
"upload_placeholder": "8ccccccc-047f-4df6-b614-871289a1a2a"
},
"exports": [
{
"export_id": "CD4",
"binary_seg_maps": {
"upload_placeholder": "1ccccccc-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data": {
"upload_placeholder": "2ccccccc-047f-4df6-b614-871289a1a2a"
},
"cell_seg_data_summary": {
"upload_placeholder": "3ccccccc-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data": {
"upload_placeholder": "2acccccc-047f-4df6-b614-871289a1a2a"
},
"tissue_seg_data_summary": {
"upload_placeholder": "3acccccc-047f-4df6-b614-871289a1a2a"
},
"phenotype_map": {
"upload_placeholder": "4ccccccc-047f-4df6-b614-871289a1a2a"
},
"image_with_all_seg": {
"upload_placeholder": "4bbbbbbc-047f-4df6-b614-871289a1a2a"
},
"image_with_cell_seg_map": {
"upload_placeholder": "4bbbbbcc-047f-4df6-b614-871289a1a2a"
},
"image_with_phenotype_map": {
"upload_placeholder": "4bbbbccc-047f-4df6-b614-871289a1a2a"
},
"image_with_tissue_seg": {
"upload_placeholder": "4bbbcccc-047f-4df6-b614-871289a1a2a"
},
"score_data": [
{
"upload_placeholder": "5ccccccc-047f-4df6-b614-871289a1a2a"
}
],
}
],
},
]
},
},
],
"assay_creator": "DFCI",
"slide_scanner_model": "Hamamatsu",
"image_analysis_software": "InForm",
"image_analysis_software_version": "2.4.2",
"cell_segmentation_model": "proprietary",
"positive_cell_detection": "proprietary",
"staining": "Bond RX",
"staining_date": "2001-01-01 00:00:00",
"imaging_date": "2001-01-01 00:00:00",
"imaging_status": "Yes",
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="111/1_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/score_data_0.txt",
upload_placeholder="5aaaaaa1-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111_extra/1_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/score_data_1.txt",
upload_placeholder="5aaaaaa2-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/binary_seg_maps.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/binary_seg_maps.tif",
upload_placeholder="1aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/cell_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/cell_seg_data.txt",
upload_placeholder="2aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/cell_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/cell_seg_data_summary.txt",
upload_placeholder="3aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/tissue_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/tissue_seg_data.txt",
upload_placeholder="2zaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/tissue_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/tissue_seg_data_summary.txt",
upload_placeholder="3zaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/phenotype_map.tif",
upload_placeholder="4aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/image_with_all_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/image_with_all_seg.tif",
upload_placeholder="4aaaaaab-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/image_with_cell_seg_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/image_with_cell_seg_map.tif",
upload_placeholder="4aaaaabb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/image_with_phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/image_with_phenotype_map.tif",
upload_placeholder="4aaaabbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD4/image_with_tissue_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD4/image_with_tissue_seg.tif",
upload_placeholder="4aaabbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1_composite_image.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/composite_image.tif",
upload_placeholder="6aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1.im3",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/multispectral.im3",
upload_placeholder="7aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1_component_data.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/component_data.tif",
upload_placeholder="8aaaaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/score_data_0.txt",
upload_placeholder="5aaaaaa1-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111_extra/1_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/score_data_1.txt",
upload_placeholder="5aaaaaa2-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/binary_seg_maps.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/binary_seg_maps.tif",
upload_placeholder="1aaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/cell_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/cell_seg_data.txt",
upload_placeholder="2aaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/cell_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/cell_seg_data_summary.txt",
upload_placeholder="3aaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/tissue_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/tissue_seg_data.txt",
upload_placeholder="2zaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/tissue_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/tissue_seg_data_summary.txt",
upload_placeholder="3zaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/phenotype_map.tif",
upload_placeholder="4aaaaaaa-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/image_with_all_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/image_with_all_seg.tif",
upload_placeholder="4aaaaaab-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/image_with_cell_seg_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/image_with_cell_seg_map.tif",
upload_placeholder="4aaaaabb-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/image_with_phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/image_with_phenotype_map.tif",
upload_placeholder="4aaaabbb-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="111/1/CD8/image_with_tissue_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP111.00/roi_1/CD8/image_with_tissue_seg.tif",
upload_placeholder="4aaabbbb-047f-4df6-b614-871289a1a2b",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/binary_seg_maps.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/binary_seg_maps.tif",
upload_placeholder="1bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/cell_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/cell_seg_data.txt",
upload_placeholder="2bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/cell_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/cell_seg_data_summary.txt",
upload_placeholder="3bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/tissue_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/tissue_seg_data.txt",
upload_placeholder="2abbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/tissue_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/tissue_seg_data_summary.txt",
upload_placeholder="3abbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/phenotype_map.tif",
upload_placeholder="4bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/image_with_all_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/image_with_all_seg.tif",
upload_placeholder="4bbbbbba-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/image_with_cell_seg_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/image_with_cell_seg_map.tif",
upload_placeholder="4bbbbbaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/image_with_phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/image_with_phenotype_map.tif",
upload_placeholder="4bbbbaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1/CD4/image_with_tissue_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/image_with_tissue_seg.tif",
upload_placeholder="4bbbaaaa-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/CD4/score_data_0.txt",
upload_placeholder="5bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1_composite_image.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/composite_image.tif",
upload_placeholder="6bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1.im3",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/multispectral.im3",
upload_placeholder="7bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/1_component_data.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_1/component_data.tif",
upload_placeholder="8bbbbbbb-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/binary_seg_maps.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/binary_seg_maps.tif",
upload_placeholder="1ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/cell_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/cell_seg_data.txt",
upload_placeholder="2ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/cell_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/cell_seg_data_summary.txt",
upload_placeholder="3ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/tissue_seg_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/tissue_seg_data.txt",
upload_placeholder="2acccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/tissue_seg_data_summary.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/tissue_seg_data_summary.txt",
upload_placeholder="3acccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/phenotype_map.tif",
upload_placeholder="4ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/image_with_all_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/image_with_all_seg.tif",
upload_placeholder="4bbbbbbc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/image_with_cell_seg_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/image_with_cell_seg_map.tif",
upload_placeholder="4bbbbbcc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/image_with_phenotype_map.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/image_with_phenotype_map.tif",
upload_placeholder="4bbbbccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2/CD4/image_with_tissue_seg.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/image_with_tissue_seg.tif",
upload_placeholder="4bbbcccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2_score_data.txt",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/CD4/score_data_0.txt",
upload_placeholder="5ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2_composite_image.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/composite_image.tif",
upload_placeholder="6ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2.im3",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/multispectral.im3",
upload_placeholder="7ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="121/2_component_data.tif",
gs_key="test_prism_trial_id/mif/CTTTPP121.00/roi_2/component_data.tif",
upload_placeholder="8ccccccc-047f-4df6-b614-871289a1a2a",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
record["cimac_id"]
for batch in prismify_patch["assays"]["mif"]
for record in batch["records"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
@assay_data_generator
def nanostring() -> PrismTestData:
upload_type = "nanostring"
prismify_args = get_prismify_args(upload_type)
prismify_patch = {
"protocol_identifier": "test_prism_trial_id",
"assays": {
"nanostring": [
{
"assay_creator": "DFCI",
"batch_id": "test_batch",
"data": {
"raw": {
"upload_placeholder": "d658b480-ed78-4717-b622-3e84bde632b6"
},
"normalized": {
"upload_placeholder": "4e9d0a47-90dc-4134-9ad6-3e3dd83619d6"
},
},
"runs": [
{
"run_id": "RUN01",
"control_raw_rcc": {
"upload_placeholder": "1b0b3b8f-6417-4a37-85dc-e8aa75594678"
},
"samples": [
{
"cimac_id": "CTTTPP111.00",
"raw_rcc": {
"upload_placeholder": "9855c579-82e0-42ee-8225-7c1c736bb69f"
},
},
{
"cimac_id": "CTTTPP112.00",
"raw_rcc": {
"upload_placeholder": "9855c579-82e0-42ee-8225-7c1c736bb69g"
},
},
],
},
{
"run_id": "RUN02",
"control_raw_rcc": {
"upload_placeholder": "1b0b3b8f-6417-4a37-85dc-e8aa75594679"
},
"samples": [
{
"cimac_id": "CTTTPP111.00",
"raw_rcc": {
"upload_placeholder": "9855c579-82e0-42ee-8225-7c1c736bb69h"
},
}
],
},
],
}
]
},
}
upload_entries = [
LocalFileUploadEntry(
local_path="raw_data.csv",
upload_placeholder="d658b480-ed78-4717-b622-3e84bde632b6",
gs_key="test_prism_trial_id/nanostring/test_batch/raw_data.csv",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="normalized.csv",
upload_placeholder="4e9d0a47-90dc-4134-9ad6-3e3dd83619d6",
gs_key="test_prism_trial_id/nanostring/test_batch/normalized_data.csv",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="RUN01_reference.rcc",
upload_placeholder="1b0b3b8f-6417-4a37-85dc-e8aa75594678",
gs_key="test_prism_trial_id/nanostring/test_batch/RUN01/control.rcc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="RUN01_111.rcc",
upload_placeholder="9855c579-82e0-42ee-8225-7c1c736bb69f",
gs_key="test_prism_trial_id/nanostring/test_batch/RUN01/CTTTPP111.00.rcc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="RUN01_112.rcc",
upload_placeholder="9855c579-82e0-42ee-8225-7c1c736bb69g",
gs_key="test_prism_trial_id/nanostring/test_batch/RUN01/CTTTPP112.00.rcc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="RUN02_reference.rcc",
upload_placeholder="1b0b3b8f-6417-4a37-85dc-e8aa75594679",
gs_key="test_prism_trial_id/nanostring/test_batch/RUN02/control.rcc",
metadata_availability=False,
allow_empty=False,
),
LocalFileUploadEntry(
local_path="RUN02_111.rcc",
upload_placeholder="9855c579-82e0-42ee-8225-7c1c736bb69h",
gs_key="test_prism_trial_id/nanostring/test_batch/RUN02/CTTTPP111.00.rcc",
metadata_availability=False,
allow_empty=False,
),
]
cimac_ids = [
sample["cimac_id"]
for batch in prismify_patch["assays"]["nanostring"]
for runs in batch["runs"]
for sample in runs["samples"]
]
base_trial = get_test_trial(cimac_ids)
target_trial = copy_dict_with_branch(base_trial, prismify_patch, "assays")
return PrismTestData(
upload_type,
prismify_args,
prismify_patch,
upload_entries,
base_trial,
target_trial,
)
missing = set(SUPPORTED_ASSAYS).difference([f.__name__ for f in assay_data_generators])
assert not missing, f"Missing assay test data generators for {missing}"
|
#!/usr/bin/env python
""" generated source for module EWrapperMsgGenerator """
#
# Original file copyright original author(s).
# This file copyright Troy Melhase, troy@gci.net.
#
# WARNING: all changes to this file will be lost.
from ib.ext.AnyWrapperMsgGenerator import AnyWrapperMsgGenerator
from ib.ext.EClientSocket import EClientSocket
from ib.ext.MarketDataType import MarketDataType
from ib.ext.TickType import TickType
from ib.ext.Util import Util
from ib.lib import Double
# package: com.ib.client
class EWrapperMsgGenerator(AnyWrapperMsgGenerator):
""" generated source for class EWrapperMsgGenerator """
SCANNER_PARAMETERS = "SCANNER PARAMETERS:"
FINANCIAL_ADVISOR = "FA:"
@classmethod
def tickPrice(cls, tickerId, field, price, canAutoExecute):
""" generated source for method tickPrice """
return "id=" + str(tickerId) + " " + TickType.getField(field) + "=" + str(price) + " " + (" canAutoExecute" if (canAutoExecute != 0) else " noAutoExecute")
@classmethod
def tickSize(cls, tickerId, field, size):
""" generated source for method tickSize """
return "id=" + str(tickerId) + " " + TickType.getField(field) + "=" + str(size)
@classmethod
def tickOptionComputation(cls, tickerId, field, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice):
""" generated source for method tickOptionComputation """
toAdd = "id=" + str(tickerId) + " " + TickType.getField(field) \
+ ": vol = " + (str(impliedVol) if (impliedVol >= 0 and impliedVol != Double.MAX_VALUE) else "N/A") \
+ " delta = " + (str(delta) if (abs(delta) <= 1) else "N/A") \
+ " gamma = " + (str(gamma) if (abs(gamma) <= 1) else "N/A") \
+ " vega = " + (str(vega) if (abs(vega) <= 1) else "N/A") \
+ " theta = " + (str(theta) if (abs(theta) <= 1) else "N/A") \
+ " optPrice = " + (str(optPrice) if (optPrice >= 0 and optPrice != Double.MAX_VALUE) else "N/A") \
+ " pvDividend = " + (str(pvDividend) if (pvDividend >= 0 and pvDividend != Double.MAX_VALUE) else "N/A") \
+ " undPrice = " + (str(undPrice) if (undPrice >= 0 and undPrice != Double.MAX_VALUE) else "N/A")
return toAdd
@classmethod
def tickGeneric(cls, tickerId, tickType, value):
""" generated source for method tickGeneric """
return "id=" + str(tickerId) + " " + TickType.getField(tickType) + "=" + str(value)
@classmethod
def tickString(cls, tickerId, tickType, value):
""" generated source for method tickString """
return "id=" + str(tickerId) + " " + TickType.getField(tickType) + "=" + str(value)
@classmethod
def tickEFP(cls, tickerId, tickType, basisPoints, formattedBasisPoints, impliedFuture, holdDays, futureExpiry, dividendImpact, dividendsToExpiry):
""" generated source for method tickEFP """
return "id=" + str(tickerId) + " " + TickType.getField(tickType) \
+ ": basisPoints = " + str(basisPoints) + "/" + formattedBasisPoints \
+ " impliedFuture = " + str(impliedFuture) + " holdDays = " + str(holdDays) \
+ " futureExpiry = " + futureExpiry + " dividendImpact = " + str(dividendImpact) \
+ " dividends to expiry = " + str(dividendsToExpiry)
@classmethod
def orderStatus(cls, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld):
""" generated source for method orderStatus """
return "order status: orderId=" + str(orderId) + " clientId=" + str(clientId) \
+ " permId=" + str(permId) + " status=" + status + " filled=" + str(filled) \
+ " remaining=" + str(remaining) + " avgFillPrice=" + str(avgFillPrice) \
+ " lastFillPrice=" + str(lastFillPrice) + " parent Id=" + str(parentId) \
+ " whyHeld=" + whyHeld
@classmethod
def openOrder(cls, orderId, contract, order, orderState):
""" generated source for method openOrder """
msg = "open order: orderId=" + str(orderId) \
+ " action=" + str(order.m_action) \
+ " quantity=" + str(order.m_totalQuantity) \
+ " conid=" + str(contract.m_conId) \
+ " symbol=" + str(contract.m_symbol) \
+ " secType=" + str(contract.m_secType) \
+ " expiry=" + str(contract.m_expiry) \
+ " strike=" + str(contract.m_strike) \
+ " right=" + str(contract.m_right) \
+ " multiplier=" + str(contract.m_multiplier) \
+ " exchange=" + str(contract.m_exchange) \
+ " primaryExch=" + str(contract.m_primaryExch) \
+ " currency=" + str(contract.m_currency) \
+ " localSymbol=" + str(contract.m_localSymbol) \
+ " tradingClass=" + str(contract.m_tradingClass) \
+ " type=" + str(order.m_orderType) \
+ " lmtPrice=" + Util.DoubleMaxString(order.m_lmtPrice) \
+ " auxPrice=" + Util.DoubleMaxString(order.m_auxPrice) \
+ " TIF=" + str(order.m_tif) \
+ " localSymbol=" + str(contract.m_localSymbol) \
+ " client Id=" + str(order.m_clientId) \
+ " parent Id=" + str(order.m_parentId) \
+ " permId=" + str(order.m_permId) \
+ " outsideRth=" + str(order.m_outsideRth) \
+ " hidden=" + str(order.m_hidden) \
+ " discretionaryAmt=" + str(order.m_discretionaryAmt) \
+ " displaySize=" + str(order.m_displaySize) \
+ " triggerMethod=" + str(order.m_triggerMethod) \
+ " goodAfterTime=" + str(order.m_goodAfterTime) \
+ " goodTillDate=" + str(order.m_goodTillDate) \
+ " faGroup=" + str(order.m_faGroup) \
+ " faMethod=" + str(order.m_faMethod) \
+ " faPercentage=" + str(order.m_faPercentage) \
+ " faProfile=" + str(order.m_faProfile) \
+ " shortSaleSlot=" + str(order.m_shortSaleSlot) \
+ " designatedLocation=" + str(order.m_designatedLocation) \
+ " exemptCode=" + str(order.m_exemptCode) \
+ " ocaGroup=" + str(order.m_ocaGroup) \
+ " ocaType=" + str(order.m_ocaType) \
+ " rule80A=" + str(order.m_rule80A) \
+ " allOrNone=" + str(order.m_allOrNone) \
+ " minQty=" + Util.IntMaxString(order.m_minQty) \
+ " percentOffset=" + Util.DoubleMaxString(order.m_percentOffset) \
+ " eTradeOnly=" + order.m_eTradeOnly \
+ " firmQuoteOnly=" + str(order.m_firmQuoteOnly) \
+ " nbboPriceCap=" + Util.DoubleMaxString(order.m_nbboPriceCap) \
+ " optOutSmartRouting=" + str(order.m_optOutSmartRouting) \
+ " auctionStrategy=" + str(order.m_auctionStrategy) \
+ " startingPrice=" + Util.DoubleMaxString(order.m_startingPrice) \
+ " stockRefPrice=" + Util.DoubleMaxString(order.m_stockRefPrice) \
+ " delta=" + Util.DoubleMaxString(order.m_delta) \
+ " stockRangeLower=" + Util.DoubleMaxString(order.m_stockRangeLower) \
+ " stockRangeUpper=" + Util.DoubleMaxString(order.m_stockRangeUpper) \
+ " volatility=" + Util.DoubleMaxString(order.m_volatility) \
+ " volatilityType=" + str(order.m_volatilityType) \
+ " deltaNeutralOrderType=" + str(order.m_deltaNeutralOrderType) \
+ " deltaNeutralAuxPrice=" + Util.DoubleMaxString(order.m_deltaNeutralAuxPrice) \
+ " deltaNeutralConId=" + str(order.m_deltaNeutralConId) \
+ " deltaNeutralSettlingFirm=" + str(order.m_deltaNeutralSettlingFirm) \
+ " deltaNeutralClearingAccount=" + str(order.m_deltaNeutralClearingAccount) \
+ " deltaNeutralClearingIntent=" + str(order.m_deltaNeutralClearingIntent) \
+ " deltaNeutralOpenClose=" + str(order.m_deltaNeutralOpenClose) \
+ " deltaNeutralShortSale=" + str(order.m_deltaNeutralShortSale) \
+ " deltaNeutralShortSaleSlot=" + str(order.m_deltaNeutralShortSaleSlot) \
+ " deltaNeutralDesignatedLocation=" + str(order.m_deltaNeutralDesignatedLocation) \
+ " continuousUpdate=" + str(order.m_continuousUpdate) \
+ " referencePriceType=" + str(order.m_referencePriceType) \
+ " trailStopPrice=" + Util.DoubleMaxString(order.m_trailStopPrice) \
+ " trailingPercent=" + Util.DoubleMaxString(order.m_trailingPercent) \
+ " scaleInitLevelSize=" + Util.IntMaxString(order.m_scaleInitLevelSize) \
+ " scaleSubsLevelSize=" + Util.IntMaxString(order.m_scaleSubsLevelSize) \
+ " scalePriceIncrement=" + Util.DoubleMaxString(order.m_scalePriceIncrement) \
+ " scalePriceAdjustValue=" + Util.DoubleMaxString(order.m_scalePriceAdjustValue) \
+ " scalePriceAdjustInterval=" + Util.IntMaxString(order.m_scalePriceAdjustInterval) \
+ " scaleProfitOffset=" + Util.DoubleMaxString(order.m_scaleProfitOffset) \
+ " scaleAutoReset=" + str(order.m_scaleAutoReset) \
+ " scaleInitPosition=" + Util.IntMaxString(order.m_scaleInitPosition) \
+ " scaleInitFillQty=" + Util.IntMaxString(order.m_scaleInitFillQty) \
+ " scaleRandomPercent=" + str(order.m_scaleRandomPercent) \
+ " hedgeType=" + str(order.m_hedgeType) \
+ " hedgeParam=" + str(order.m_hedgeParam) \
+ " account=" + str(order.m_account) \
+ " settlingFirm=" + str(order.m_settlingFirm) \
+ " clearingAccount=" + str(order.m_clearingAccount) \
+ " clearingIntent=" + str(order.m_clearingIntent) \
+ " notHeld=" + str(order.m_notHeld) \
+ " whatIf=" + str(order.m_whatIf)
if "BAG" == contract.m_secType:
if contract.m_comboLegsDescrip is not None:
msg += " comboLegsDescrip=" + str(contract.m_comboLegsDescrip)
msg += " comboLegs={"
if contract.m_comboLegs is not None:
i = 0
while i < len(contract.m_comboLegs):
comboLeg = contract.m_comboLegs[i]
msg += " leg " + str(i + 1) + ": "
msg += "conId=" + str(comboLeg.m_conId)
msg += " ratio=" + str(comboLeg.m_ratio)
msg += " action=" + str(comboLeg.m_action)
msg += " exchange=" + str(comboLeg.m_exchange)
msg += " openClose=" + str(comboLeg.m_openClose)
msg += " shortSaleSlot=" + str(comboLeg.m_shortSaleSlot)
msg += " designatedLocation=" + str(comboLeg.m_designatedLocation)
msg += " exemptCode=" + str(comboLeg.m_exemptCode)
if order.m_orderComboLegs is not None and len(contract.m_comboLegs) == len(order.m_orderComboLegs):
orderComboLeg = order.m_orderComboLegs[i]
msg += " price=" + Util.DoubleMaxString(orderComboLeg.m_price)
msg += ";"
i += 1
msg += "}"
if order.m_basisPoints != Double.MAX_VALUE:
msg += " basisPoints=" + Util.DoubleMaxString(order.m_basisPoints)
msg += " basisPointsType=" + Util.IntMaxString(order.m_basisPointsType)
if contract.m_underComp is not None:
underComp = contract.m_underComp
msg += " underComp.conId =" + str(underComp.m_conId) + " underComp.delta =" + str(underComp.m_delta) + " underComp.price =" + str(underComp.m_price)
if not Util.StringIsEmpty(order.m_algoStrategy):
msg += " algoStrategy=" + str(order.m_algoStrategy)
msg += " algoParams={"
if order.m_algoParams is not None:
algoParams = order.m_algoParams
i = 0
while i < len(algoParams):
param = algoParams[i]
if i > 0:
msg += ","
msg += str(param.m_tag) + "=" + str(param.m_value)
i += 1
msg += "}"
if "BAG" == contract.m_secType:
msg += " smartComboRoutingParams={"
if order.m_smartComboRoutingParams is not None:
smartComboRoutingParams = order.m_smartComboRoutingParams
i = 0
while i < len(smartComboRoutingParams):
param = smartComboRoutingParams[i]
if i > 0:
msg += ","
msg += str(param.m_tag) + "=" + str(param.m_value)
i += 1
msg += "}"
orderStateMsg = " status=" + str(orderState.m_status) \
+ " initMargin=" + str(orderState.m_initMargin) \
+ " maintMargin=" + str(orderState.m_maintMargin) \
+ " equityWithLoan=" + str(orderState.m_equityWithLoan) \
+ " commission=" + Util.DoubleMaxString(orderState.m_commission) \
+ " minCommission=" + Util.DoubleMaxString(orderState.m_minCommission) \
+ " maxCommission=" + Util.DoubleMaxString(orderState.m_maxCommission) \
+ " commissionCurrency=" + str(orderState.m_commissionCurrency) \
+ " warningText=" + str(orderState.m_warningText)
return msg + orderStateMsg
@classmethod
def openOrderEnd(cls):
""" generated source for method openOrderEnd """
return " =============== end ==============="
@classmethod
def updateAccountValue(cls, key, value, currency, accountName):
""" generated source for method updateAccountValue """
return "updateAccountValue: " + key + " " + value + " " + currency + " " + accountName
@classmethod
def updatePortfolio(cls, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName):
""" generated source for method updatePortfolio """
msg = "updatePortfolio: " + cls.contractMsg(contract) + \
str(position) + " " + str(marketPrice) + " " + str(marketValue) + \
" " + str(averageCost) + " " + str(unrealizedPNL) + " " + \
str(realizedPNL) + " " + accountName
return msg
@classmethod
def updateAccountTime(cls, timeStamp):
""" generated source for method updateAccountTime """
return "updateAccountTime: " + timeStamp
@classmethod
def accountDownloadEnd(cls, accountName):
""" generated source for method accountDownloadEnd """
return "accountDownloadEnd: " + accountName
@classmethod
def nextValidId(cls, orderId):
""" generated source for method nextValidId """
return "Next Valid Order ID: " + orderId
@classmethod
def contractDetails(cls, reqId, contractDetails):
""" generated source for method contractDetails """
contract = contractDetails.m_summary
msg = "reqId = " + reqId + " ===================================\n" + \
" ---- Contract Details begin ----\n" + \
cls.contractMsg(contract) + cls.contractDetailsMsg(contractDetails) + \
" ---- Contract Details End ----\n"
return msg
@classmethod
def contractDetailsMsg(cls, contractDetails):
""" generated source for method contractDetailsMsg """
msg = "marketName = " + str(contractDetails.m_marketName) + "\n" \
+ "minTick = " + str(contractDetails.m_minTick) + "\n" \
+ "price magnifier = " + str(contractDetails.m_priceMagnifier) + "\n" \
+ "orderTypes = " + str(contractDetails.m_orderTypes) + "\n" \
+ "validExchanges = " + str(contractDetails.m_validExchanges) + "\n" \
+ "underConId = " + str(contractDetails.m_underConId) + "\n" \
+ "longName = " + str(contractDetails.m_longName) + "\n" \
+ "contractMonth = " + str(contractDetails.m_contractMonth) + "\n" \
+ "industry = " + str(contractDetails.m_industry) + "\n" \
+ "category = " + str(contractDetails.m_category) + "\n" \
+ "subcategory = " + str(contractDetails.m_subcategory) + "\n" \
+ "timeZoneId = " + str(contractDetails.m_timeZoneId) + "\n" \
+ "tradingHours = " + str(contractDetails.m_tradingHours) + "\n" \
+ "liquidHours = " + str(contractDetails.m_liquidHours) + "\n" \
+ "evRule = " + str(contractDetails.m_evRule) + "\n" \
+ "evMultiplier = " + str(contractDetails.m_evMultiplier) + "\n" \
+ cls.contractDetailsSecIdList(contractDetails)
return msg
@classmethod
def contractMsg(cls, contract):
""" generated source for method contractMsg """
msg = "conid = " + str(contract.m_conId) + "\n" \
+ "symbol = " + str(contract.m_symbol) + "\n" \
+ "secType = " + str(contract.m_secType) + "\n" \
+ "expiry = " + str(contract.m_expiry) + "\n" \
+ "strike = " + str(contract.m_strike) + "\n" \
+ "right = " + str(contract.m_right) + "\n" \
+ "multiplier = " + str(contract.m_multiplier) + "\n" \
+ "exchange = " + str(contract.m_exchange) + "\n" \
+ "primaryExch = " + str(contract.m_primaryExch) + "\n" \
+ "currency = " + str(contract.m_currency) + "\n" \
+ "localSymbol = " + str(contract.m_localSymbol) + "\n" \
+ "tradingClass = " + str(contract.m_tradingClass) + "\n"
return msg
@classmethod
def bondContractDetails(cls, reqId, contractDetails):
""" generated source for method bondContractDetails """
contract = contractDetails.m_summary
msg = "reqId = " + str(reqId) + " ===================================\n" \
+ " ---- Bond Contract Details begin ----\n" \
+ "symbol = " + str(contract.m_symbol) + "\n" \
+ "secType = " + str(contract.m_secType) + "\n" \
+ "cusip = " + str(contractDetails.m_cusip) + "\n" \
+ "coupon = " + str(contractDetails.m_coupon) + "\n" \
+ "maturity = " + str(contractDetails.m_maturity) + "\n" \
+ "issueDate = " + str(contractDetails.m_issueDate) + "\n" \
+ "ratings = " + str(contractDetails.m_ratings) + "\n" \
+ "bondType = " + str(contractDetails.m_bondType) + "\n" \
+ "couponType = " + str(contractDetails.m_couponType) + "\n" \
+ "convertible = " + str(contractDetails.m_convertible) + "\n" \
+ "callable = " + str(contractDetails.m_callable) + "\n" \
+ "putable = " + str(contractDetails.m_putable) + "\n" \
+ "descAppend = " + str(contractDetails.m_descAppend) + "\n" \
+ "exchange = " + str(contract.m_exchange) + "\n" \
+ "currency = " + str(contract.m_currency) + "\n" \
+ "marketName = " + str(contractDetails.m_marketName) + "\n" \
+ "tradingClass = " + str(contract.m_tradingClass) + "\n" \
+ "conid = " + str(contract.m_conId) + "\n" \
+ "minTick = " + str(contractDetails.m_minTick) + "\n" \
+ "orderTypes = " + str(contractDetails.m_orderTypes) + "\n" \
+ "validExchanges = " + str(contractDetails.m_validExchanges) + "\n" \
+ "nextOptionDate = " + str(contractDetails.m_nextOptionDate) + "\n" \
+ "nextOptionType = " + str(contractDetails.m_nextOptionType) + "\n" \
+ "nextOptionPartial = " + str(contractDetails.m_nextOptionPartial) + "\n" \
+ "notes = " + str(contractDetails.m_notes) + "\n" \
+ "longName = " + str(contractDetails.m_longName) + "\n" \
+ "evRule = " + str(contractDetails.m_evRule) + "\n" \
+ "evMultiplier = " + str(contractDetails.m_evMultiplier) + "\n" \
+ cls.contractDetailsSecIdList(contractDetails) \
+ " ---- Bond Contract Details End ----\n"
return msg
@classmethod
def contractDetailsSecIdList(cls, contractDetails):
""" generated source for method contractDetailsSecIdList """
msg = "secIdList={"
if contractDetails.m_secIdList is not None:
secIdList = contractDetails.m_secIdList
i = 0
while i < len(secIdList):
param = secIdList[i]
if i > 0:
msg += ","
msg += str(param.m_tag) + "=" + str(param.m_value)
i += 1
msg += "}\n"
return msg
@classmethod
def contractDetailsEnd(cls, reqId):
""" generated source for method contractDetailsEnd """
return "reqId = " + str(reqId) + " =============== end ==============="
@classmethod
def execDetails(cls, reqId, contract, execution):
""" generated source for method execDetails """
msg = " ---- Execution Details begin ----\n" \
+ "reqId = " + str(reqId) + "\n" \
+ "orderId = " + str(execution.m_orderId) + "\n" \
+ "clientId = " + str(execution.m_clientId) + "\n" \
+ cls.contractMsg(contract) \
+ "execId = " + str(execution.m_execId) + "\n" \
+ "time = " + str(execution.m_time) + "\n" \
+ "acctNumber = " + str(execution.m_acctNumber) + "\n" \
+ "executionExchange = " + str(execution.m_exchange) + "\n" \
+ "side = " + str(execution.m_side) + "\n" \
+ "shares = " + str(execution.m_shares) + "\n" \
+ "price = " + str(execution.m_price) + "\n" \
+ "permId = " + str(execution.m_permId) + "\n" \
+ "liquidation = " + str(execution.m_liquidation) + "\n" \
+ "cumQty = " + str(execution.m_cumQty) + "\n" \
+ "avgPrice = " + str(execution.m_avgPrice) + "\n" \
+ "orderRef = " + str(execution.m_orderRef) + "\n" \
+ "evRule = " + str(execution.m_evRule) + "\n" \
+ "evMultiplier = " + str(execution.m_evMultiplier) + "\n" \
" ---- Execution Details end ----\n"
return msg
@classmethod
def execDetailsEnd(cls, reqId):
""" generated source for method execDetailsEnd """
return "reqId = " + str(reqId) + " =============== end ==============="
@classmethod
def updateMktDepth(cls, tickerId, position, operation, side, price, size):
""" generated source for method updateMktDepth """
return "updateMktDepth: " + str(tickerId) + " " + str(position) + " " + str(operation) + " " + str(side) + " " + str(price) + " " + str(size)
@classmethod
def updateMktDepthL2(cls, tickerId, position, marketMaker, operation, side, price, size):
""" generated source for method updateMktDepthL2 """
return "updateMktDepth: " + str(tickerId) + " " + str(position) + " " + marketMaker + " " + str(operation) + " " + str(side) + " " + str(price) + " " + str(size)
@classmethod
def updateNewsBulletin(cls, msgId, msgType, message, origExchange):
""" generated source for method updateNewsBulletin """
return "MsgId=" + str(msgId) + " :: MsgType=" + str(msgType) + " :: Origin=" + origExchange + " :: Message=" + message
@classmethod
def managedAccounts(cls, accountsList):
""" generated source for method managedAccounts """
return "Connected : The list of managed accounts are : [" + accountsList + "]"
@classmethod
def receiveFA(cls, faDataType, xml):
""" generated source for method receiveFA """
return cls.FINANCIAL_ADVISOR + " " + EClientSocket.faMsgTypeName(faDataType) + " " + xml
@classmethod
def historicalData(cls, reqId, date, open, high, low, close, volume, count, WAP, hasGaps):
""" generated source for method historicalData """
return "id=" + str(reqId) \
+ " date = " + date \
+ " open=" + str(open) \
+ " high=" + str(high) \
+ " low=" + str(low) \
+ " close=" + str(close) \
+ " volume=" + str(volume) \
+ " count=" + str(count) \
+ " WAP=" + str(WAP) \
+ " hasGaps=" + str(hasGaps)
@classmethod
def realtimeBar(cls, reqId, time, open, high, low, close, volume, wap, count):
""" generated source for method realtimeBar """
return "id=" + str(reqId) \
+ " time = " + str(time) \
+ " open=" + str(open) \
+ " high=" + str(high) \
+ " low=" + str(low) \
+ " close=" + str(close) \
+ " volume=" + str(volume) \
+ " count=" + str(count) \
+ " WAP=" + str(wap)
@classmethod
def scannerParameters(cls, xml):
""" generated source for method scannerParameters """
return cls.SCANNER_PARAMETERS + "\n" + xml
@classmethod
def scannerData(cls, reqId, rank, contractDetails, distance, benchmark, projection, legsStr):
""" generated source for method scannerData """
contract = contractDetails.m_summary
return "id = " + str(reqId) \
+ " rank=" + str(rank) \
+ " symbol=" + str(contract.m_symbol) \
+ " secType=" + str(contract.m_secType) \
+ " expiry=" + str(contract.m_expiry) \
+ " strike=" + str(contract.m_strike) \
+ " right=" + str(contract.m_right) \
+ " exchange=" + str(contract.m_exchange) \
+ " currency=" + str(contract.m_currency) \
+ " localSymbol=" + str(contract.m_localSymbol) \
+ " marketName=" + str(contractDetails.m_marketName) \
+ " tradingClass=" + str(contract.m_tradingClass) \
+ " distance=" + distance \
+ " benchmark=" + benchmark \
+ " projection=" + projection \
+ " legsStr=" + legsStr
@classmethod
def scannerDataEnd(cls, reqId):
""" generated source for method scannerDataEnd """
return "id = " + str(reqId) + " =============== end ==============="
@classmethod
def currentTime(cls, time):
""" generated source for method currentTime """
return "current time = " + str(time)
@classmethod
def fundamentalData(cls, reqId, data):
""" generated source for method fundamentalData """
return "id = " + str(reqId) + " len = " + str(len(data)) + '\n' + data
@classmethod
def deltaNeutralValidation(cls, reqId, underComp):
""" generated source for method deltaNeutralValidation """
return "id = " + str(reqId) + " underComp.conId =" + str(underComp.m_conId) + " underComp.delta =" + str(underComp.m_delta) + " underComp.price =" + str(underComp.m_price)
@classmethod
def tickSnapshotEnd(cls, tickerId):
""" generated source for method tickSnapshotEnd """
return "id=" + str(tickerId) + " =============== end ==============="
@classmethod
def marketDataType(cls, reqId, marketDataType):
""" generated source for method marketDataType """
return "id=" + str(reqId) + " marketDataType = " + MarketDataType.getField(marketDataType)
@classmethod
def commissionReport(cls, commissionReport):
""" generated source for method commissionReport """
msg = "commission report:" \
+ " execId=" + str(commissionReport.m_execId) \
+ " commission=" + Util.DoubleMaxString(commissionReport.m_commission) \
+ " currency=" + str(commissionReport.m_currency) \
+ " realizedPNL=" + Util.DoubleMaxString(commissionReport.m_realizedPNL) \
+ " yield=" + Util.DoubleMaxString(commissionReport.m_yield) \
+ " yieldRedemptionDate=" \
+ Util.IntMaxString(commissionReport.m_yieldRedemptionDate)
return msg
@classmethod
def position(cls, account, contract, position, avgCost):
""" generated source for method position """
msg = " ---- Position begin ----\n" \
+ "account = " + str(account) + "\n" \
+ cls.contractMsg(contract) \
+ "position = " + Util.IntMaxString(position) + "\n" \
+ "avgCost = " + Util.DoubleMaxString(avgCost) + "\n" + \
" ---- Position end ----\n"
return msg
@classmethod
def positionEnd(cls):
""" generated source for method positionEnd """
return " =============== end ==============="
@classmethod
def accountSummary(cls, reqId, account, tag, value, currency):
""" generated source for method accountSummary """
msg = " ---- Account Summary begin ----\n" \
+ "reqId = " + str(reqId) + "\n" \
+ "account = " + str(account) + "\n" \
+ "tag = " + str(tag) + "\n" \
+ "value = " + str(value) + "\n" \
+ "currency = " + str(currency) + "\n" \
+ " ---- Account Summary end ----\n"
return msg
@classmethod
def accountSummaryEnd(cls, reqId):
""" generated source for method accountSummaryEnd """
return "id=" + str(reqId) + " =============== end ==============="
|
class EvaluationResult:
def __init__(self, individual_id: str, accuracy_test: float = -1.0, f1_test: float = -1.0):
self.individual_id = individual_id
self.accuracy_test = accuracy_test
self.f1_test = f1_test
|
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def deps(repo_mapping = {}):
if "com_github_kazuho_picojson" not in native.existing_rules():
http_archive(
name = "com_github_kazuho_picojson",
url = "https://github.com/kazuho/picojson/archive/v1.3.0.tar.gz",
sha256 = "056805ca2691798f5545935a14bb477f2e1d827c9fb862e6e449dbea22801c7d",
strip_prefix = "picojson-1.3.0",
repo_mapping = repo_mapping,
build_file = "@com_github_3rdparty_bazel_rules_picojson//:BUILD.bazel",
)
|
#MIT License
#
#Copyright (c) 2017 TheChyz
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
from s2clientprotocol import sc2api_pb2
import logging
log = logging.getLogger(__name__)
# Values of x and y must be from (and including) 0-64
# Simulates click on minimap
def moveCamera(x, y):
# log.debug("Moving Camera:\nx-axis=%d\ny-axis=%d" % (x, y))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_feature_layer.camera_move.center_minimap.x = x
action.action_feature_layer.camera_move.center_minimap.y = y
#print(request.ListFields()) #For debugging
return request
# Sends a message to "all chat"
# TODO Response returns "Error"
# message ActionChat {
# enum Channel {
# Broadcast = 1;
# Team = 2;
# }
# optional Channel channel = 1;
# optional string message = 2;
# }
def chat(message):
# log.debug("Sending message to all chat:\n%s" % message)
request = sc2api_pb2.Request()
action = request.action.actions.add()
action_chat = action.chat.add()
action_chat.message = message
action_chat.channel = sc2api_pb2.ActionChat.Broadcast
#print(request.ListFields()) #For debugging
return request
# Selects units in a rectangle on the screen
# Can also add units to the selection
# resolutions of x and y come from "request.join_game.options.feature_layer"
# x_start: 0-84
# y_start: 0-84
# x_end: 0-84
# y_end: 0-84
# add: bool value; if the area selected should add the units highlighted to the current selection
def unitSelectScreenArea(x_start, y_start, x_end, y_end, add):
# log.debug("Selecting Screen Area:\nx_start=%d\ny_start=%d\nx_end=%d\ny_end=%d\nadd=%r" %
# (x_start, y_start, x_end, y_end, add))
request = sc2api_pb2.Request()
action = request.action.actions.add()
screen_selection = action.action_feature_layer.unit_selection_rect.selection_screen_coord.add()
screen_selection.p0.x = x_start
screen_selection.p0.y = y_start
screen_selection.p1.x = x_end
screen_selection.p1.y = y_end
action.action_feature_layer.unit_selection_rect.selection_add = add
return request
# Selects a unit at a point on the screen
# resolutions of x and y come from "request.join_game.options.feature_layer"
# x: 0-84
# y: 0-84
# click_type: one of the following (all starting with "spatial_pb2.ActionSpatialUnitSelectionPoint")
# .Select (normal click. Changes selection to unit.)
# .Toggle (shift+click. Toggle selection of unit)
# .AllType (control+click. Selects all units of a given type.)
# .AddAllType (shift+control+click. Selects all units of a given type.)
# TODO response sometimes gives error, but that may be due to not having anything clickable at that location
# TODO seems to work, should debug further to make sure tho
def unitSelectPoint(x, y, click_type):
# log.debug("Selecting Point:\nx=%d\ny=%d\nclick_type=%d" % (x, y, click_type))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_feature_layer.unit_selection_point.selection_screen_coord.x = x
action.action_feature_layer.unit_selection_point.selection_screen_coord.y = y
action.action_feature_layer.unit_selection_point.type = click_type
return request
# Send a command to a unit on the screen
# resolutions of x and y come from "request.join_game.options.feature_layer"
# ability_id: The number of the ability_id
# x: 0-84
# y: 0-84
# queue_command: bool (like a shift command to be peformed, queued)
def unitCommandScreen(ability_id, x, y, queue_command):
# log.debug("Unit Command Screen:\nability_id=%d\nx=%d\ny=%d\queue_command=%r"
# % (ability_id, x, y, queue_command))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_feature_layer.unit_command.ability_id = ability_id
action.action_feature_layer.unit_command.target_screen_coord.x = x
action.action_feature_layer.unit_command.target_screen_coord.y = y
action.action_feature_layer.unit_command.queue_command = queue_command
return request
# Send a command to a unit on the minimap
# resolutions of x and y come from "request.join_game.options.feature_layer"
# ability_id: The number of the ability_id
# x: 0-64
# y: 0-64
# queue_command: bool (like a shift command to be peformed, queued)
def unitCommandMinimap(ability_id, x, y, queue_command):
# log.debug("Unit Command Screen:\nability_id=%d\nx=%d\ny=%d\queue_command=%r"
# % (ability_id, x, y, queue_command))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_feature_layer.unit_command.ability_id = ability_id
action.action_feature_layer.unit_command.target_minimap_coord.x = x
action.action_feature_layer.unit_command.target_minimap_coord.y = y
action.action_feature_layer.unit_command.queue_command = queue_command
return request
# Equivalent to number hotkey. Replaces current selection with control group.
# action_input: one of the following (all starting with "ui_pb2.ActionControlGroup")
# .Recall (number hotkey. Replaces current selection with control group.)
# .Set (Control + number hotkey. Sets control group to current selection.)
# .Append (Shift + number hotkey. Adds current selection into control group.)
# .SetAndSteal (Control + Alt + number hotkey. Sets control group to current selection.
# Units are removed from other control groups.
# .AppendAndSteal (Shift + Alt + number hotkey. Adds current selection into control group.
# Units are removed from other control groups.)
# control_group_index: TODO find out what this is exactly, is this keyboard values 0-9? (I guessed it is atm)
def controlGroupRecall(action_input, index):
# log.debug("Control Group Recall:\naction=%d\nindex=%d" % (action, index))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.control_group.action = action_input
action.action_ui.control_group.control_group_index = index
return request
# Select army hotkey
# add: bool value. add army to current selection?
# TODO untested
def selectArmy(add):
# log.debug("Select Army:\nadd=%r" % (add))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.select_army.selection_add = add
return request
# Select warp gates hotkey
# add: bool value. add warp gates to current selection?
# TODO untested
def selectWarpGates(add):
# log.debug("Select Warp Gates:\nadd=%r" % (add))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.select_warp_gates.selection_add = add
return request
# Select larva
# TODO test if works
def selectLarva():
# log.debug("Select Larva")
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.select_larva.setInParent()
return request
# Select idle worker(s)
# click_type: one of the following (all starting with "ui_pb2.ActionSelectIdleWorker")
# .Set (click with no modifiers. Replaces selection with single idle worker.)
# .Add (shift+click. Adds single idle worker to current selection.)
# .All (control+click. Selects all idle workers.)
# .AddAll (shift+control+click. Adds all idle workers to current selection.)
# TODO untested
def selectIdleWorker(click_type):
# log.debug("Select Idle Worker:\nclick_type=%d" % (click_type))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.select_idle_worker.type = click_type
return request
# Click on icons?
# panel_type: one of the following (all starting with "ui_pb2.ActionMultiPanel")
# .SingleSelect (Click on icon)
# .DeselectUnit (Shift Click on icon)
# .SelectAllOfType (Control Click on icon.)
# .DeselectAllOfType (Control+Shift Click on icon.)
# TODO test out
def multiPanel(panel_type, unit_index):
# log.debug("Multi Panel:\ntype=%d\nunit_index=%d" % (panel_type, unit_index))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.multi_panel.type = panel_type
action.action_ui.multi_panel.unit_index = unit_index
return request
# Not sure what this is...
# TODO untested
def cargoPanel(unit_index):
# log.debug("Cargo Panel:\nunit_index=%d" % (unit_index))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.cargo_panel.unit_index = unit_index
return request
# Not sure what this is...
# TODO untested
def productionPanel(unit_index):
# log.debug("Production Panel:\nunit_index=%d" % (unit_index))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.production_panel.unit_index = unit_index
return request
# Toggle autocast on an ability
# TODO untested
def toggleAutocast(ability_id):
# log.debug("Toggle Autocast:\nability_id=%d" % (ability_id))
request = sc2api_pb2.Request()
action = request.action.actions.add()
action.action_ui.toggle_autocast.ability_id = ability_id
return request
|
# Copyright 2022 The etils Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tree API."""
import concurrent.futures
import functools
from typing import Any, Callable, Iterable, Iterator, Optional, TypeVar
from etils import enp
from etils import etqdm
from etils.array_types import Array
from etils.etree import backend as backend_lib
from etils.etree.typing import Tree
_T = Any # TODO(pytype): Replace by `TypeVar`
_Tin = Any # Could make this TypeVar if typing support variadic
_Tout = TypeVar('_Tout')
class TreeAPI:
"""Tree API, using either `jax.tree_utils`, `tf.nest` or `tree` backend."""
def __init__(self, backend: backend_lib.Backend):
self.backend = backend
def parallel_map(
self,
map_fn: Callable[..., _Tout], # Callable[[_Tin0, _Tin1,...], Tout]
*trees: Tree[_Tin], # _Tin0, _Tin1,...
num_threads: Optional[int] = None,
progress_bar: bool = False,
) -> Tree[_Tout]:
"""Same as `tree.map_structure` but apply `map_fn` in parallel.
Args:
map_fn: Worker function
*trees: Nested input to pass to the `map_fn`
num_threads: Number of workers (default to CPU count * 5)
progress_bar: If True, display a progression bar.
Returns:
The nested structure after `map_fn` has been applied.
"""
# TODO(epot): Allow nesting `parallel_map` while keeping max num threads
# constant. How to avoid dead locks ?
with concurrent.futures.ThreadPoolExecutor(
max_workers=num_threads) as executor:
launch_worker = functools.partial(executor.submit, map_fn)
futures = self.backend.map(launch_worker, *trees)
leaves, _ = self.backend.flatten(futures)
itr = concurrent.futures.as_completed(leaves)
if progress_bar:
itr = etqdm.tqdm(itr, total=len(leaves))
for f in itr: # Propagate exception to main thread.
if f.exception():
raise f.exception()
return self.backend.map(lambda f: f.result(), futures)
def unzip(self, tree: Tree[Iterable[_T]]) -> Iterator[Tree[_T]]:
"""Unpack a tree of iterable.
This is the reverse operation of `tree.map_structure(zip, *trees)`
Example:
```python
etree.unzip({'a': np.array([1, 2, 3])}) == [{'a': 1}, {'a': 2}, {'a': 3}]
```
Args:
tree: The tree to unzip
Yields:
Trees of same structure than the input, but with individual elements.
"""
leaves, treedef = self.backend.flatten(tree)
for leaf_elems in zip(*leaves): # TODO(py310): check=True
yield self.backend.unflatten(treedef, leaf_elems)
def spec_like(
self,
tree: Tree[Array],
*,
ignore_other: bool = True,
) -> Tree[enp.ArraySpec]:
"""Inspect a tree of array, works with any array type.
Example:
```python
model = MyModel()
variables = model.init(jax.random.PRNGKey(0), x)
# Inspect the `variables` tree structures
print(etree.spec_like(variables))
```
Args:
tree: The tree of array
ignore_other: If `True`, non-array are forwarded as-is.
Returns:
The tree of `enp.ArraySpec`.
"""
def _to_spec_array(array):
if not enp.ArraySpec.is_array(array):
if ignore_other:
return array
else:
raise TypeError(f'Unknown array type: {array!r}')
else:
return enp.ArraySpec.from_array(array)
return self.backend.map(_to_spec_array, tree)
|
"""Django views for core CAP collector functionality."""
__author__ = "Arkadii Yakovets (arcadiy@google.com)"
import json
from bs4 import BeautifulSoup
from core import models
from core import utils
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.template.loader import render_to_string
from django.utils.decorators import method_decorator
from django.views.generic import TemplateView
from django.views.generic import View
class FeedView(View):
"""Feed representation (either XML or HTML)."""
def get(self, request, *args, **kwargs):
feed_type = kwargs["feed_type"]
if "alert_id" in kwargs:
try:
alert = models.Alert.objects.get(uuid=kwargs["alert_id"])
except models.Alert.DoesNotExist:
raise Http404
if feed_type == "html":
context = {
"alert": utils.ParseAlert(alert.content, feed_type, alert.uuid)
}
response = render_to_string("core/alert.html.tmpl", context)
return HttpResponse(BeautifulSoup(response, feed_type).prettify())
return HttpResponse(alert.content, content_type="text/xml")
return HttpResponse(utils.GenerateFeed(feed_type),
content_type="text/%s" % feed_type)
class AlertTemplateView(View):
"""Area/message templates view."""
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AlertTemplateView, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
template_id = request.GET.get("template_id")
if "template_type" not in kwargs or not template_id:
return HttpResponseBadRequest()
template_type = kwargs["template_type"]
if template_type == "area":
template_model = models.AreaTemplate
elif template_type == "message":
template_model = models.MessageTemplate
try:
template = template_model.objects.get(id=template_id)
except template_model.DoesNotExist:
raise Http404
return HttpResponse(template.content, content_type="text/xml")
class IndexView(TemplateView):
template_name = "index.html.tmpl"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(IndexView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
area_templates = models.AreaTemplate.objects.order_by("title")
message_templates = models.MessageTemplate.objects.order_by("title")
context["area_templates"] = area_templates
context["message_templates"] = message_templates
context['map_default_viewport'] = settings.MAP_DEFAULT_VIEWPORT
return context
class PostView(View):
"""Handles new alert creation."""
@method_decorator(login_required)
def post(self, request, *args, **kwargs):
username = request.POST.get("uid")
password = request.POST.get("password")
xml_string = request.POST.get("xml")
if not username or not password or not xml_string:
return HttpResponseBadRequest()
user = authenticate(username=username, password=password)
alert_id = None
error_message = ""
is_valid = False
if (not user or
not user.groups.filter(name=settings.ALERT_CREATORS_GROUP_NAME)):
raise PermissionDenied
alert_id, is_valid, error_message = utils.CreateAlert(xml_string, username)
response = {
"error": error_message,
"uuid": alert_id,
"valid": is_valid,
}
return HttpResponse(json.dumps(response), content_type="application/json")
|
#!/usr/bin/env python
"""
Generic PUT/update script
usage: create.py key url [key=value ...]
"""
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import update
data = {}
for k, v in [ kwarg.split('=', 1) for kwarg in sys.argv[3:]]:
data[k] = v
update( sys.argv[1], sys.argv[2], data )
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, IntegerField
from wtforms.validators import DataRequired
from flask_login import current_user
from app.models import Address
class Delivery(FlaskForm):
phone_number = IntegerField('Phone Number', validators=[DataRequired()])
street_address = StringField('Address', validators=[DataRequired()])
postal_code = StringField('Postal Address', validators=[DataRequired()])
city = StringField('City', validators=[DataRequired()])
submit = SubmitField('Post')
|
import argparse
import sys
import os
from . import LModule, bash, yn, optionSelect
class Stop (LModule):
"""
Stop node
"""
NAME = "stop"
DESCRIPTION = 'stop node'
def parseArgs(self):
self.args = self.parser.parse_args (sys.argv[2::])
self.lnode.setPath(self.args.basepath)
return self
def run(self):
if not self.lnode.isRunning():
print ('An instance of lisk-core is not running!')
print ('=> Stopping Lisk')
os.system('pm2 stop %s/pm2.conf.json' % self.args.basepath)
|
# Copyright (c) 2018 FlashX, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import base64
import graphene
import os
import flask
import requests
from gtmcore.inventory.inventory import InventoryManager
from gtmcore.configuration import Configuration
from gtmcore.dispatcher import Dispatcher, jobs
from gtmcore.exceptions import GigantumException
from gtmcore.logging import LMLogger
from gtmcore.workflows import MergeOverride
from gtmcore.workflows.gitlab import GitLabManager, ProjectPermissions
from gtmcore.labbook import LabBook
from lmsrvcore.api import logged_mutation
from lmsrvcore.auth.identity import parse_token
from lmsrvcore.api.mutations import ChunkUploadMutation, ChunkUploadInput
from lmsrvcore.auth.user import get_logged_in_username, get_logged_in_author
from lmsrvlabbook.api.connections.labbook import LabbookConnection
from lmsrvlabbook.api.objects.labbook import Labbook as LabbookObject
logger = LMLogger.get_logger()
class PublishLabbook(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
set_public = graphene.Boolean(required=False)
job_key = graphene.String()
@classmethod
@logged_mutation
def mutate_and_get_payload(cls, root, info, owner, labbook_name, set_public=False,
client_mutation_id=None):
# Load LabBook
username = get_logged_in_username()
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
# Extract valid Bearer token
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
else:
raise ValueError("Authorization header not provided. Must have a valid session to query for collaborators")
job_metadata = {'method': 'publish_labbook',
'labbook': lb.key}
job_kwargs = {'repository': lb,
'username': username,
'access_token': token,
'public': set_public}
dispatcher = Dispatcher()
job_key = dispatcher.dispatch_task(jobs.publish_repository, kwargs=job_kwargs, metadata=job_metadata)
logger.info(f"Publishing LabBook {lb.root_dir} in background job with key {job_key.key_str}")
return PublishLabbook(job_key=job_key.key_str)
class SyncLabbook(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
pull_only = graphene.Boolean(required=False, default=False)
override_method = graphene.String(default="abort")
job_key = graphene.String()
@classmethod
@logged_mutation
def mutate_and_get_payload(cls, root, info, owner, labbook_name, pull_only=False,
override_method="abort", client_mutation_id=None):
# Load LabBook
username = get_logged_in_username()
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
# Extract valid Bearer token
token = None
if hasattr(info.context.headers, 'environ'):
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
if not token:
raise ValueError("Authorization header not provided. "
"Must have a valid session to query for collaborators")
default_remote = lb.client_config.config['git']['default_remote']
admin_service = None
for remote in lb.client_config.config['git']['remotes']:
if default_remote == remote:
admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service']
break
if not admin_service:
raise ValueError('admin_service could not be found')
# Configure git creds
mgr = GitLabManager(default_remote, admin_service, access_token=token)
mgr.configure_git_credentials(default_remote, username)
override = MergeOverride(override_method)
job_metadata = {'method': 'sync_labbook',
'labbook': lb.key}
job_kwargs = {'repository': lb,
'pull_only': pull_only,
'username': username,
'override': override}
dispatcher = Dispatcher()
job_key = dispatcher.dispatch_task(jobs.sync_repository, kwargs=job_kwargs, metadata=job_metadata)
logger.info(f"Syncing LabBook {lb.root_dir} in background job with key {job_key.key_str}")
return SyncLabbook(job_key=job_key.key_str)
class SetVisibility(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
visibility = graphene.String(required=True)
new_labbook_edge = graphene.Field(LabbookConnection.Edge)
@classmethod
@logged_mutation
def mutate_and_get_payload(cls, root, info, owner, labbook_name, visibility,
client_mutation_id=None):
# Load LabBook
username = get_logged_in_username()
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
# Extract valid Bearer token
token = None
if hasattr(info.context.headers, 'environ'):
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
if not token:
raise ValueError("Authorization header not provided. Must have a valid session to query for collaborators")
default_remote = lb.client_config.config['git']['default_remote']
admin_service = None
for remote in lb.client_config.config['git']['remotes']:
if default_remote == remote:
admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service']
break
if not admin_service:
raise ValueError('admin_service could not be found')
# Configure git creds
mgr = GitLabManager(default_remote, admin_service, access_token=token)
mgr.configure_git_credentials(default_remote, username)
if visibility not in ['public', 'private']:
raise ValueError(f'Visibility must be either "public" or "private";'
f'("{visibility}" invalid)')
with lb.lock():
mgr.set_visibility(namespace=owner, repository_name=labbook_name, visibility=visibility)
cursor = base64.b64encode(f"{0}".encode('utf-8'))
lbedge = LabbookConnection.Edge(node=LabbookObject(owner=owner, name=labbook_name),
cursor=cursor)
return SetVisibility(new_labbook_edge=lbedge)
class ImportRemoteLabbook(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
remote_url = graphene.String(required=True)
job_key = graphene.String()
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name, remote_url, client_mutation_id=None):
username = get_logged_in_username()
logger.info(f"Importing remote labbook from {remote_url}")
lb = LabBook(author=get_logged_in_author())
default_remote = lb.client_config.config['git']['default_remote']
admin_service = None
for remote in lb.client_config.config['git']['remotes']:
if default_remote == remote:
admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service']
break
# Extract valid Bearer token
if hasattr(info.context, 'headers') and "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
else:
raise ValueError("Authorization header not provided. Must have a valid session to query for collaborators")
gl_mgr = GitLabManager(default_remote, admin_service=admin_service, access_token=token)
gl_mgr.configure_git_credentials(default_remote, username)
job_metadata = {'method': 'import_labbook_from_remote'}
job_kwargs = {
'remote_url': remote_url,
'username': username
}
dispatcher = Dispatcher()
job_key = dispatcher.dispatch_task(jobs.import_labbook_from_remote, metadata=job_metadata,
kwargs=job_kwargs)
logger.info(f"Dispatched import_labbook_from_remote({remote_url}) to Job {job_key}")
return ImportRemoteLabbook(job_key=job_key.key_str)
class AddLabbookRemote(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
remote_name = graphene.String(required=True)
remote_url = graphene.String(required=True)
success = graphene.Boolean()
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name,
remote_name, remote_url,
client_mutation_id=None):
username = get_logged_in_username()
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
with lb.lock():
lb.add_remote(remote_name, remote_url)
return AddLabbookRemote(success=True)
class AddLabbookCollaborator(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
username = graphene.String(required=True)
permissions = graphene.String(required=True)
updated_labbook = graphene.Field(LabbookObject)
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name, username, permissions,
client_mutation_id=None):
#TODO(billvb/dmk) - Here "username" refers to the intended recipient username.
# it should probably be renamed here and in the frontend to "collaboratorUsername"
logged_in_username = get_logged_in_username()
lb = InventoryManager().load_labbook(logged_in_username, owner, labbook_name,
author=get_logged_in_author())
# TODO: Future work will look up remote in LabBook data, allowing user to select remote.
default_remote = lb.client_config.config['git']['default_remote']
admin_service = None
for remote in lb.client_config.config['git']['remotes']:
if default_remote == remote:
admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service']
break
# Extract valid Bearer token
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
else:
raise ValueError("Authorization header not provided. "
"Must have a valid session to query for collaborators")
if permissions == 'readonly':
perm = ProjectPermissions.READ_ONLY
elif permissions == 'readwrite':
perm = ProjectPermissions.READ_WRITE
elif permissions == 'owner':
perm = ProjectPermissions.OWNER
else:
raise ValueError(f"Unknown permission set: {permissions}")
mgr = GitLabManager(default_remote, admin_service, token)
existing_collabs = mgr.get_collaborators(owner, labbook_name)
if username not in [n[1] for n in existing_collabs]:
logger.info(f"Adding user {username} to {owner}/{labbook_name}"
f"with permission {perm}")
mgr.add_collaborator(owner, labbook_name, username, perm)
else:
logger.warning(f"Changing permission of {username} on"
f"{owner}/{labbook_name} to {perm}")
mgr.delete_collaborator(owner, labbook_name, username)
mgr.add_collaborator(owner, labbook_name, username, perm)
create_data = {"owner": owner,
"name": labbook_name}
return AddLabbookCollaborator(updated_labbook=LabbookObject(**create_data))
class DeleteLabbookCollaborator(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
username = graphene.String(required=True)
updated_labbook = graphene.Field(LabbookObject)
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name, username, client_mutation_id=None):
logged_in_username = get_logged_in_username()
lb = InventoryManager().load_labbook(logged_in_username, owner, labbook_name,
author=get_logged_in_author())
# TODO: Future work will look up remote in LabBook data, allowing user to select remote.
default_remote = lb.client_config.config['git']['default_remote']
admin_service = None
for remote in lb.client_config.config['git']['remotes']:
if default_remote == remote:
admin_service = lb.client_config.config['git']['remotes'][remote]['admin_service']
break
# Extract valid Bearer token
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
else:
raise ValueError("Authorization header not provided. Must have a valid session to query for collaborators")
# Add collaborator to remote service
mgr = GitLabManager(default_remote, admin_service, token)
mgr.delete_collaborator(owner, labbook_name, username)
create_data = {"owner": owner,
"name": labbook_name}
return DeleteLabbookCollaborator(updated_labbook=LabbookObject(**create_data))
class DeleteRemoteLabbook(graphene.ClientIDMutation):
"""Delete a labbook from the remote repository."""
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
confirm = graphene.Boolean(required=True)
success = graphene.Boolean()
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None):
if confirm is True:
# Load config data
configuration = Configuration().config
# Extract valid Bearer token
token = None
if hasattr(info.context.headers, 'environ'):
if "HTTP_AUTHORIZATION" in info.context.headers.environ:
token = parse_token(info.context.headers.environ["HTTP_AUTHORIZATION"])
if not token:
raise ValueError("Authorization header not provided. Cannot perform remote delete operation.")
# Get remote server configuration
default_remote = configuration['git']['default_remote']
admin_service = None
for remote in configuration['git']['remotes']:
if default_remote == remote:
admin_service = configuration['git']['remotes'][remote]['admin_service']
index_service = configuration['git']['remotes'][remote]['index_service']
break
if not admin_service:
raise ValueError('admin_service could not be found')
# Perform delete operation
mgr = GitLabManager(default_remote, admin_service, access_token=token)
mgr.remove_repository(owner, labbook_name)
logger.info(f"Deleted {owner}/{labbook_name} from the remote repository {default_remote}")
# Call Index service to remove project from cloud index and search
# Don't raise an exception if the index delete fails, since this can be handled relatively gracefully
# for now, but do return success=false
success = True
access_token = flask.g.get('access_token', None)
id_token = flask.g.get('id_token', None)
repo_id = mgr.get_repository_id(owner, labbook_name)
response = requests.delete(f"https://{index_service}/index/{repo_id}",
headers={"Authorization": f"Bearer {access_token}",
"Identity": id_token}, timeout=30)
if response.status_code != 204:
logger.error(f"Failed to remove project from cloud index. "
f"Status Code: {response.status_code}")
logger.error(response.json())
else:
logger.info(f"Deleted remote repository {owner}/{labbook_name} from cloud index")
# Remove locally any references to that cloud repo that's just been deleted.
try:
username = get_logged_in_username()
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
lb.remove_remote()
lb.remove_lfs_remotes()
except GigantumException as e:
logger.warning(e)
return DeleteRemoteLabbook(success=True)
else:
logger.info(f"Dry run deleting {labbook_name} from remote repository -- not deleted.")
return DeleteRemoteLabbook(success=False)
class ExportLabbook(graphene.relay.ClientIDMutation):
class Input:
owner = graphene.String(required=True)
labbook_name = graphene.String(required=True)
job_key = graphene.String()
@classmethod
def mutate_and_get_payload(cls, root, info, owner, labbook_name, client_mutation_id=None):
username = get_logged_in_username()
working_directory = Configuration().config['git']['working_directory']
lb = InventoryManager().load_labbook(username, owner, labbook_name,
author=get_logged_in_author())
job_metadata = {'method': 'export_labbook_as_zip',
'labbook': lb.key}
job_kwargs = {'labbook_path': lb.root_dir,
'lb_export_directory': os.path.join(working_directory, 'export')}
dispatcher = Dispatcher()
job_key = dispatcher.dispatch_task(jobs.export_labbook_as_zip,
kwargs=job_kwargs,
metadata=job_metadata)
return ExportLabbook(job_key=job_key.key_str)
class ImportLabbook(graphene.relay.ClientIDMutation, ChunkUploadMutation):
class Input:
chunk_upload_params = ChunkUploadInput(required=True)
import_job_key = graphene.String()
@classmethod
def mutate_and_wait_for_chunks(cls, info, **kwargs):
return ImportLabbook()
@classmethod
def mutate_and_process_upload(cls, info, upload_file_path, upload_filename, **kwargs):
if not upload_file_path:
logger.error('No file uploaded')
raise ValueError('No file uploaded')
username = get_logged_in_username()
job_metadata = {'method': 'import_labbook_from_zip'}
job_kwargs = {
'archive_path': upload_file_path,
'username': username,
'owner': username
}
dispatcher = Dispatcher()
job_key = dispatcher.dispatch_task(jobs.import_labboook_from_zip,
kwargs=job_kwargs,
metadata=job_metadata)
return ImportLabbook(import_job_key=job_key.key_str)
|
"""
Copyright (c) 2015 SONATA-NFV
ALL RIGHTS RESERVED.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Neither the name of the SONATA-NFV [, ANY ADDITIONAL AFFILIATION]
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
This work has been performed in the framework of the SONATA project,
funded by the European Commission under Grant number 671517 through
the Horizon 2020 and 5G-PPP programmes. The authors would like to
acknowledge the contributions of their colleagues of the SONATA
partner consortium (www.sonata-nfv.eu).
"""
import logging
import os
import time
import yaml
import paramiko
import tempfile
from collections import namedtuple
from ansible.parsing.dataloader import DataLoader
from ansible.vars.manager import VariableManager
from ansible.inventory.manager import InventoryManager
from ansible.executor.playbook_executor import PlaybookExecutor
from sonsmbase.smbase import sonSMbase
import configparser
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.DEBUG)
class FirewallFSM(sonSMbase):
def __init__(self):
"""
:param specific_manager_type: specifies the type of specific manager
that could be either fsm or ssm.
:param service_name: the name of the service that this specific manager
belongs to.
:param function_name: the name of the function that this specific
manager belongs to, will be null in SSM case
:param specific_manager_name: the actual name of specific manager
(e.g., scaling, placement)
:param id_number: the specific manager id number which is used to
distinguish between multiple SSM/FSM that are created for the same
objective (e.g., scaling with algorithm 1 and 2)
:param version: version
:param description: description
"""
self.specific_manager_type = 'fsm'
self.service_name = 'psa-service'
self.function_name = 'firewall-vnf'
self.specific_manager_name = 'firewall-config'
self.id_number = '1'
self.version = 'v0.1'
self.description = "An FSM that subscribes to start, stop and configuration topic for Firewall VNF"
self.is_running_in_emulator = 'SON_EMULATOR' in os.environ
LOG.debug('Running in the emulator is %s', self.is_running_in_emulator)
super(self.__class__, self).__init__(specific_manager_type=self.specific_manager_type,
service_name=self.service_name,
function_name=self.function_name,
specific_manager_name=self.specific_manager_name,
id_number=self.id_number,
version=self.version,
description=self.description)
def on_registration_ok(self):
# The fsm registration was successful
LOG.debug("Received registration ok event.")
# send the status to the SMR
status = 'Subscribed, waiting for alert message'
message = {'name': self.specific_manager_id,
'status': status}
self.manoconn.publish(topic='specific.manager.registry.ssm.status',
message=yaml.dump(message))
# Subscribing to the topics that the fsm needs to listen on
topic = "generic.fsm." + str(self.sfuuid)
self.manoconn.subscribe(self.message_received, topic)
LOG.info("Subscribed to " + topic + " topic.")
def message_received(self, ch, method, props, payload):
"""
This method handles received messages
"""
LOG.debug('<-- message_received app_id=%s', props.app_id)
# Decode the content of the message
request = yaml.load(payload)
# Don't trigger on non-request messages
if "fsm_type" not in request.keys():
LOG.info("Received a non-request message, ignoring...")
return
LOG.info('Handling message with fsm_type=%s', request["fsm_type"])
# Create the response
response = None
# the 'fsm_type' field in the content indicates for which type of
# fsm this message is intended. In this case, this FSM functions as
# start, stop and configure FSM
if str(request["fsm_type"]) == "start":
LOG.info("Start event received: " + str(request["content"]))
response = self.start_event(request["content"])
if str(request["fsm_type"]) == "stop":
LOG.info("Stop event received: " + str(request["content"]))
response = self.stop_event(request["content"])
if str(request["fsm_type"]) == "configure":
LOG.info("Config event received: " + str(request["content"]))
response = self.configure_event(request["content"])
if str(request["fsm_type"]) == "scale":
LOG.info("Scale event received: " + str(request["content"]))
response = self.scale_event(request["content"])
# If a response message was generated, send it back to the FLM
if response is not None:
# Generated response for the FLM
LOG.info("Response to request generated:" + str(response))
topic = "generic.fsm." + str(self.sfuuid)
corr_id = props.correlation_id
self.manoconn.notify(topic,
yaml.dump(response),
correlation_id=corr_id)
return
# If response is None:
LOG.info("Request received for other type of FSM, ignoring...")
def start_event(self, content):
"""
This method handles a start event.
"""
LOG.info("Performing life cycle start event")
LOG.info("content: " + str(content.keys()))
# TODO: Add the start logic. The content is a dictionary that contains
# the required data
vnfr = content["vnfr"]
mgmt_ip = None
vm_image = 'http://files.sonata-nfv.eu/son-psa-pilot/pfSense-vnf/' \
'pfSense.qcow2'
if (vnfr['virtual_deployment_units']
[0]['vm_image']) == vm_image:
mgmt_ip = (vnfr['virtual_deployment_units']
[0]['vnfc_instance'][0]['connection_points'][0]
['interface']['address'])
if not mgmt_ip:
LOG.error("Couldn't obtain mgmt IP address from VNFR during start")
return
ssh = self._create_ssh_connection(mgmt_ip)
if not ssh:
LOG.error('Unable to establish an SSH connection during the start event')
return;
LOG.info("Remove the static route to 8.8.8.8")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route del -host 8.8.8.8 || true")
sout = ssh_stdout.read().decode('utf-8')
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(sout, serr))
LOG.info("Get the subnet of vtnet1 (input)")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"netstat -rn4 | grep -E 'link[#].+vtnet2' | awk '{print $1}'")
vtnet1_subnet = ssh_stdout.read().decode('utf-8').strip()
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(vtnet1_subnet, serr))
LOG.info("Fix the routing and force {0} to use vtnet1".format(vtnet1_subnet))
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route change -net {0} -interface vtnet1 -ifp vtnet1".format(vtnet1_subnet))
sout = ssh_stdout.read().decode('utf-8')
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(sout, serr))
sp_ip = '10.30.0.112'
LOG.info("Retrieve FSM IP address")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"echo $SSH_CLIENT | awk '{ print $1}'")
sout = ssh_stdout.read().decode('utf-8')
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(sout, serr))
fsm_ip = sout.strip()
LOG.info("FSM IP: {0}".format(fsm_ip))
LOG.info("Get current default GW")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"/usr/bin/netstat -nr| awk '/default/ { print $2 }'")
sout = ssh_stdout.read().decode('utf-8')
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(sout, serr))
default_gw = sout.strip()
LOG.info("Default GW: {0}".format(str(default_gw)))
LOG.info("Configure route for FSM IP")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route add {0} {1}"
.format(fsm_ip, default_gw))
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
if sp_ip != fsm_ip:
LOG.info("Configure route for monitoring ")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route add {0} {1}"
.format(sp_ip, default_gw))
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
else:
LOG.info("The SP and the FSM have the same ip")
LOG.info("Always use ethO (mgmt) for connection from 10.230.x.x for debug")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route add -net 10.230.0.0/16 {0}".format(default_gw))
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
#activate firewall
#command = "pfctl -e"
#(stdin, stdout, stderr) = ssh.exec_command(command)
#LOG.debug('Stdout: ' + str(stdout))
#LOG.debug('Stderr: ' + str(stderr))
#ssh.close()
# Create a response for the FLM
response = {}
response['status'] = 'COMPLETED'
# TODO: complete the response
return response
def stop_event(self, content):
"""
This method handles a stop event.
"""
LOG.info("Performing life cycle stop event")
LOG.info("content: " + str(content.keys()))
# TODO: Add the stop logic. The content is a dictionary that contains
# the required data
vnfr = content["vnfr"]
mgmt_ip = None
vm_image = 'http://files.sonata-nfv.eu/son-psa-pilot/pfSense-vnf/' \
'pfSense.qcow2'
if (vnfr['virtual_deployment_units']
[0]['vm_image']) == vm_image:
mgmt_ip = (vnfr['virtual_deployment_units']
[0]['vnfc_instance'][0]['connection_points'][0]
['interface']['address'])
if not mgmt_ip:
LOG.error("Couldn't obtain IP address from VNFR during stop")
return
ssh = self._create_ssh_connection(mgmt_ip)
if not ssh:
LOG.error('Unable to establish an SSH connection during the stop event')
return;
#desactivate firewall
#command = "pfctl -d"
#(stdin, stdout, stderr) = ssh.exec_command(command)
#LOG.debug('Stdout: ' + str(stdout))
#LOG.debug('Stderr: ' + str(stderr))
#ssh.close()
# Create a response for the FLM
response = {}
response['status'] = 'COMPLETED'
# TODO: complete the response
return response
def configure_event(self, content):
"""
This method handles a configure event.
"""
LOG.info("Performing life cycle configure event")
LOG.info("content: " + str(content.keys()))
# TODO: Add the configure logic. The content is a dictionary that
# contains the required data
mgmt_ip = None
next_ip = None
vm_image = 'http://files.sonata-nfv.eu/son-psa-pilot/pfSense-vnf/' \
'pfsense.qcow2'
#sp address (retrieve it from NSR)
#sp_ip = 'sp.int3.sonata-nfv.eu'
sp_ip = '10.30.0.112'
if content['management_ip']:
mgmt_ip = content['management_ip']
if content['next_ip']:
next_ip=content['next_ip']
if not mgmt_ip:
LOG.error("Couldn't obtain mgmt IP address from VNFR during configuration")
return
ssh = self._create_ssh_connection(mgmt_ip)
if not ssh:
LOG.error('Unable to establish an SSH connection during the configure event')
return;
# remove default GW
LOG.info("Delete default GW")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route del default")
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
# if next VNF do not exists use vtnet2 (wan) gateway stored by pfSense
if not next_ip:
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"cat /tmp/vtnet2_router")
sout = ssh_stdout.read().decode('utf-8')
serr = ssh_stderr.read().decode('utf-8')
LOG.info("stdout: {0}\nstderr: {1}"
.format(sout, serr))
next_ip = sout.strip()
LOG.info("New default GW: {0}".format(str(next_ip)))
LOG.info("Configure default GW for next VNF or gateway"
.format(next_ip))
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(
"route add default -ifp vtnet2 {0}".format(next_ip))
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
#Activate firewall
#command = "pfctl -e"
#(stdin, stdout, stderr) = ssh.exec_command(command)
#LOG.debug('Stdout: ' + str(stdout))
#LOG.debug('Stderr: ' + str(stderr))
#Configure and activate monitoring probe
LOG.info("Create remote directory")
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command("mkdir -p /home/sonata/monitoring")
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
LOG.info("Create monitoring conf")
self.createConf(sp_ip, 4, 'vfw-vnf')
sftp = ssh.open_sftp()
LOG.info("SFTP connection established")
sftp.put('node.conf', '/home/sonata/monitoring/node.conf')
sftp.close()
command = "/etc/rc.d/sonmonprobe start"
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command)
LOG.info("stdout: {0}\nstderr: {1}"
.format(ssh_stdout.read().decode('utf-8'),
ssh_stderr.read().decode('utf-8')))
ssh.close()
# Create a response for the FLM
response = {}
response['status'] = 'COMPLETED'
# TODO: complete the response
LOG.info("Returning response")
return response
def scale_event(self, content):
"""
This method handles a scale event.
"""
LOG.info("Performing life cycle scale event")
LOG.info("content: " + str(content.keys()))
# TODO: Add the configure logic. The content is a dictionary that
# contains the required data
# Create a response for the FLM
response = {}
response['status'] = 'COMPLETED'
# TODO: complete the response
return response
def fw_configure(self, fw_vnfr):
fw_cpinput_ip = '10.30.0.2'
fw_cpinput_netmask = '255.255.255.252'
fw_cpinput_network = '10.30.0.2/30'
# configure vm using ansible playbook
loader = DataLoader()
with tempfile.NamedTemporaryFile() as fp:
fp.write(b'[firewallserver]\n')
if self.is_running_in_emulator:
fp.write(b'mn.vnf_fw')
else:
fp.write(mgmt_ip.encode('utf-8'))
fp.flush()
inventory = InventoryManager(loader=loader, sources=[fp.name])
variable_manager = VariableManager(loader=loader, inventory=inventory)
playbook_path = os.path.abspath('./ansible/site.yml')
LOG.debug('Targeting the ansible playbook: %s', playbook_path)
if not os.path.exists(playbook_path):
LOG.error('The playbook does not exist')
return False
Options = namedtuple('Options',
['listtags', 'listtasks', 'listhosts',
'syntax', 'connection', 'module_path',
'forks', 'remote_user', 'private_key_file',
'ssh_common_args', 'ssh_extra_args',
'sftp_extra_args', 'scp_extra_args',
'become', 'become_method', 'become_user',
'verbosity', 'check', 'diff'])
options = Options(listtags=False, listtasks=False, listhosts=False,
syntax=False, connection='ssh', module_path=None,
forks=100, remote_user='slotlocker',
private_key_file=None, ssh_common_args=None,
ssh_extra_args=None, sftp_extra_args=None,
scp_extra_args=None, become=True,
become_method=None, become_user='root',
verbosity=None, check=False, diff=True)
options = options._replace(connection='docker', become=False)
variable_manager.extra_vars = {'FW_CPINPUT_NETWORK': fw_cpinput_network,
'SON_EMULATOR': self.is_running_in_emulator }
pbex = PlaybookExecutor(playbooks=[playbook_path],
inventory=inventory,
variable_manager=variable_manager,
loader=loader, options=options,
passwords={})
results = pbex.run()
return True
def _create_ssh_connection(self, mgmt_ip):
port = 22
username = 'root'
password = 'pfsense'
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
num_retries = 30
retry = 0
while retry < num_retries:
try:
ssh.connect(mgmt_ip, port, username, password)
break
except paramiko.BadHostKeyException as e:
LOG.info("Bad entry in ~/.ssh/known_hosts: %s", e)
time.sleep(1)
retry += 1
except EOFError:
LOG.info('Unexpected Error from SSH Connection, retry in 5 seconds')
time.sleep(10)
retry += 1
except Exception as e:
LOG.info('SSH Connection refused from %s, will retry in 5 seconds (%s)', mgmt_ip, e)
time.sleep(10)
retry += 1
if retry == num_retries:
LOG.error('Could not establish SSH connection within max retries')
return None;
return ssh
#create conf for monitoring
def createConf(self, pw_ip, interval, name):
config = configparser.RawConfigParser()
config.add_section('vm_node')
config.add_section('Prometheus')
config.set('vm_node', 'node_name', name)
config.set('vm_node', 'post_freq', interval)
config.set('Prometheus', 'server_url', 'http://'+pw_ip+':9091/metrics')
with open('node.conf', 'w') as configfile: # save
config.write(configfile)
f = open('node.conf', 'r')
LOG.debug('Mon Config-> '+"\n"+f.read())
f.close()
def main(working_dir=None):
if working_dir:
os.chdir(working_dir)
LOG.info('Welcome to the main in %s', __name__)
FirewallFSM()
while True:
time.sleep(10)
if __name__ == '__main__':
main()
|
from robot.api.deco import keyword # imported from here by keyword modules
from .action import ActionKeywords
from .context import (
ActionNotPossible,
ElementNotFound,
LibraryContext,
ControlNotFound,
MultipleControlsFound,
TimeoutException,
WindowControlError,
with_timeout,
)
from .elements import ElementKeywords
from .locators import Locator, LocatorKeywords
from .window import WindowKeywords
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# pylint: disable=C0301
# pylint: disable=C0111
import os
import re
import sys
import time
from urllib import parse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import Select
def post_ad_mandatory_fields_set(driver, ad):
for el in driver.find_elements_by_xpath('//*[@class="formgroup-label-mandatory"]'):
sForId = el.get_attribute("for")
if sForId is not None:
print("Detected mandatory field (Name='%s', ID='%s')" % (el.text, sForId))
reMatch = re.search('.*\.(.*)_s.*', sForId, re.IGNORECASE)
if reMatch is not None:
sForIdRaw = reMatch.group(1)
if sForIdRaw in ad:
Select(driver.find_element_by_id(sForId)).select_by_visible_text(ad[sForIdRaw])
else:
print("*** Warning: No value for combo box '%s' defined, setting to default (first entry)" % (sForIdRaw,))
s = Select(driver.find_element_by_id(sForId))
iOpt = 0
for o in s.options:
if len(o.get_attribute("value")):
break
iOpt += 1
s.select_by_value(s.options[iOpt].get_attribute("value"))
else:
sForIdRaw = sForId
if "field_" + sForIdRaw in ad:
sValue = ad["field_" + sForIdRaw]
else:
print("*** Warning: No value for text field '%s' defined, setting to empty value" % (sForIdRaw,))
sValue = 'Nicht angegeben'
try:
driver.find_element_by_id(sForId).send_keys(sValue)
except:
pass
dQuery = parse.parse_qs('https://www.ebay-kleinanzeigen.de/p-kategorie-aendern.html#?path=161/225/netzwerk_modem&isParent=false')
for sPathCat in dQuery.get('https://www.ebay-kleinanzeigen.de/p-kategorie-aendern.html#?path')[0].split('/'):
sPathCat = 'cat_' + sPathCat
ad = {
'field_postad-title' : 'foobar',
'field_groesse' : '110'
}
#
#driver = webdriver.Firefox()
#driver.get("file:///home/jiinx/Downloads/kleinanzeigen_test/fields.html")
#post_ad_mandatory_fields_set(driver, ad)
#driver.close()
#
|
# Keep this file separate
def oauth() :
return { "consumer_key" : "8nGiBTEaZQ9wYlcXonD80GyaK",
"consumer_secret" : "tPSTUnHqFLLLtpXVIv7ZMH0x4RoXgHhT6FtV7YqCgvk9GWy4ej",
"token_key" : "215665928-IEJ3KCbG4NX1xfb0dE2M2qvgbWSTmLj15ZRfO5TI",
"token_secret" : "kqunLoUGTTvcpdzKriIwMbSsK3M52dA0rdsawRGrxU5dP" }
|
# This data processing script for a customly segmented dataset is heavily based
# on the preparation scripts of fairseq speech-to-text
# https://github.com/pytorch/fairseq/blob/main/examples/speech_to_text/
import argparse
import shutil
from itertools import groupby
from pathlib import Path
from typing import Tuple
import pandas as pd
import soundfile as sf
import torch
import yaml
from torch.utils.data import Dataset
from tqdm import tqdm
from examples.speech_to_text.data_utils import (convert_waveform, create_zip,
extract_fbank_features,
filter_manifest_df,
get_zip_manifest,
save_df_to_tsv)
from fairseq.data.audio.audio_utils import get_waveform
MANIFEST_COLUMNS = ["id", "audio", "n_frames", "tgt_text", "speaker", "tgt_lang"]
SR = 16_000
class CustomDataset(Dataset):
"""
Create a Dataset from a yaml segmentation file.
Each item is a tuple of the form:
waveform, sample_rate, source utterance, target utterance, speaker_id,
utterance_id
"""
def __init__(self, path_to_yaml: str, path_to_wavs: str) -> None:
# Load audio segments
with open(path_to_yaml) as f:
segments = yaml.load(f, Loader=yaml.BaseLoader)
# (str -> float) to have a correct sorting of the segments for each talk
for i, segm in enumerate(segments):
segments[i]["offset"] = float(segm["offset"])
# Gather info
self.data = []
for wav_filename, _seg_group in groupby(segments, lambda x: x["wav"]):
wav_path = path_to_wavs / wav_filename
sample_rate = sf.info(wav_path.as_posix()).samplerate
seg_group = sorted(_seg_group, key=lambda x: x["offset"])
for i, segment in enumerate(seg_group):
offset = int(float(segment["offset"]) * sample_rate)
n_frames = int(float(segment["duration"]) * sample_rate)
_id = f"{wav_path.stem}_{i}"
self.data.append(
(
wav_path.as_posix(),
offset,
n_frames,
sample_rate,
"NA",
"NA",
"NA",
_id,
)
)
def __getitem__(self, n: int) -> Tuple[torch.Tensor, int, str, str, str, str]:
wav_path, offset, n_frames, sr, src_utt, tgt_utt, spk_id, utt_id = self.data[n]
waveform, _ = get_waveform(wav_path, frames=n_frames, start=offset)
waveform = torch.from_numpy(waveform)
return waveform, sr, src_utt, tgt_utt, spk_id, utt_id
def __len__(self) -> int:
return len(self.data)
def prepare_custom_dataset(
path_to_yaml: str,
path_to_wavs: str,
tgt_lang: str,
use_audio_input: int,
):
use_audio_input = bool(use_audio_input)
path_to_yaml = Path(path_to_yaml)
path_to_wavs = Path(path_to_wavs)
path_to_custom_dataset = path_to_yaml.parent
yaml_name = path_to_yaml.stem
# Extract features
audio_root = path_to_custom_dataset / ("flac" if use_audio_input else "fbank80")
audio_root.mkdir(exist_ok=True, parents=True)
zip_path = path_to_custom_dataset / f"{audio_root.name}.zip"
dataset = CustomDataset(path_to_yaml, path_to_wavs)
for waveform, sample_rate, _, _, _, utt_id in tqdm(dataset):
if use_audio_input:
wf, _ = convert_waveform(
waveform,
sample_rate,
to_mono=True,
to_sample_rate=SR,
)
sf.write(
audio_root / f"{utt_id}.flac",
wf.numpy().T,
SR,
)
else:
_ = extract_fbank_features(
waveform, sample_rate, audio_root / f"{utt_id}.npy"
)
# Pack features into ZIP
print("ZIPing audios/features...")
create_zip(audio_root, zip_path)
print("Fetching ZIP manifest...")
audio_paths, audio_lengths = get_zip_manifest(
zip_path,
is_audio=use_audio_input,
)
# Generate TSV manifest
print("Generating manifest...")
manifest = {c: [] for c in MANIFEST_COLUMNS}
for _, _, _, tgt_utt, speaker_id, utt_id in tqdm(dataset):
manifest["id"].append(utt_id)
manifest["audio"].append(audio_paths[utt_id])
manifest["n_frames"].append(audio_lengths[utt_id])
manifest["tgt_text"].append(tgt_utt)
manifest["speaker"].append(speaker_id)
manifest["tgt_lang"].append(tgt_lang)
df = pd.DataFrame.from_dict(manifest)
df = filter_manifest_df(df, is_train_split=False, is_audio=use_audio_input)
save_df_to_tsv(df, path_to_custom_dataset / f"{yaml_name}.tsv")
# Clean up
shutil.rmtree(audio_root)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--path_to_yaml",
"-y",
type=str,
required=True,
help="absolute path to the yaml of the custom segmentation",
)
parser.add_argument(
"--path_to_wavs",
"-w",
type=str,
required=True,
help="absolute path to the directory with wavs",
)
parser.add_argument(
"--tgt_lang",
"-l",
type=str,
default="",
help="optionally indicate the target language",
)
parser.add_argument(
"--use_audio_input",
"-i",
type=int,
default=0,
help="whether the input is waveforms or fbank features",
)
args = parser.parse_args()
prepare_custom_dataset(
args.path_to_yaml,
args.path_to_wavs,
args.tgt_lang,
args.use_audio_input,
)
|
"""Converter that converts HTML files from the Ghbook library
to OpenITI mARkdown.
The converter has two main functions:
* convert_file: convert a single html file.
* convert_files_in_folder: convert all html files in a given folder
Usage examples:
>>> from html_converter_Ghbook import convert_file
>>> folder = r"test/Ghbook/"
>>> convert_file(folder+"10584.html", dest_fp=folder+"converted/10584")
>>> from html_converter_Ghbook import convert_files_in_folder
>>> convert_files_in_folder(folder, dest_folder=folder+"converted")
Both functions use the GhbookHtmlConverter class to do the heavy lifting.
The GhbookHtmlConverter is a subclass of GenericHtmlConverter,
which in its turn inherits many functions from the GenericConverter.
GenericConverter
\_ GenericHtmlConverter
\_ EShiaHtmlConverter
\_ GhbookHtmlConverter
\_ ...
Overview of the methods of these classes:
(methods of GenericConverter are inherited by GenericHtmlConverter;
and methods of GenericHtmlConverter are inherited by GhbookHtmlConverter.
Methods of the superclass with the same name
in the subclass are overwritten by the latter)
================================== ========================== ==========================
GenericConverter GenericHtmlConverter GhbookHtmlConverter
================================== ========================== ==========================
__init__ __init__ (inherited)
convert_files_in_folder (inherited) (inherited)
convert file (inherited) (inherited)
make_dest_fp (inherited - generic!) (inherited - generic!)
get_metadata (dummy) (inherited - dummy!) get_metadata
get_data get_data (inherited)
pre_process (inherited) pre_process
add_page_numbers (dummy) (inherited - dummy!) add_page_numbers
add_structural_annotations (dummy) add_structural_annotations add_structural_annotations
remove_notes (dummy) remove_notes remove_notes
reflow (inherited) (inherited)
add_milestones (dummy) (inherited - dummy!) (inherited - dummy!)
post_process (inherited - generic!) post_process
compose (inherited) (inherited)
save_file (inherited) (inherited)
inspect_tags_in_html (inherited)
inspect_tags_in_folder (inherited)
find_example_of_tag (inherited)
================================== ========================== ==========================
The GhbookHtmlConverter's add_structural_annotations method uses html2md_Ghbook,
an adaptation of the generic html2md (based on markdownify)
to convert the html tags to OpenITI annotations.
Examples:
>>> from html_converter_Ghbook import GhbookHtmlConverter
>>> conv = GhbookHtmlConverter()
>>> conv.dest_folder = r"test/Ghbook/converted"
>>> conv.VERBOSE = False
>>> folder = r"test/Ghbook/"
>>> conv.convert_file(folder+"10584.html")
>>> conv.convert_files_in_folder(folder, ["html"])
"""
from bs4 import BeautifulSoup
import re
if __name__ == '__main__':
from os import sys, path
root_folder = path.dirname(path.dirname(path.abspath(__file__)))
root_folder = path.dirname(path.dirname(root_folder))
sys.path.append(root_folder)
from openiti.new_books.convert.html_converter_generic import GenericHtmlConverter
from openiti.new_books.convert.helper import html2md_Ghbook
def convert_file(fp, dest_fp=None):
"""Convert one file to OpenITI format.
Args:
source_fp (str): path to the file that must be converted.
dest_fp (str): path to the converted file. Defaults to None
(in which case, the converted folder will be put in a folder
named "converted" in the same folder as the source_fp)
Returns:
None
"""
conv = GhbookHtmlConverter()
conv.convert_file(fp, dest_fp=dest_fp)
def convert_files_in_folder(src_folder, dest_folder=None,
extensions=["html"], exclude_extensions=["yml"],
fn_regex=None):
"""Convert all files in a folder to OpenITI format.\
Use the `extensions` and `exclude_extensions` lists to filter\
the files to be converted.
Args:
src_folder (str): path to the folder that contains
the files that must be converted.
dest_folder (str): path to the folder where converted files
will be stored.
extensions (list): list of extensions; if this list is not empty,
only files with an extension in the list should be converted.
exclude_extensions (list): list of extensions;
if this list is not empty,
only files whose extension is not in the list will be converted.
fn_regex (str): regular expression defining the filename pattern
e.g., "-(ara|per)\d". If `fn_regex` is defined,
only files whose filename matches the pattern will be converted.
Returns:
None
"""
conv = GhbookHtmlConverter()
conv.convert_files_in_folder(src_folder, dest_folder=dest_folder,
extensions=extensions,
exclude_extensions=exclude_extensions,
fn_regex=fn_regex)
################################################################################
class GhbookHtmlConverter(GenericHtmlConverter):
def pre_process(self, text):
"""Remove superfluous elements from the html file before processing."""
def remove_html_elements(soup, tag_name, class_=None, contains_str=None):
"""Remove all html elements with tag `tag` and class `class_` \
if they contain `contains_str`
Args:
soup (BeautifulSoup object): BeautifulSoup representation
of the html file
tag_name (str): name of the tag that needs to be removed
(e.g., "p", "div", "span").
class_ (str): class of the tag that needs to be removed.
Defaults to None. If None, all `tag_name` elements
will be removed, regardless of their class.
contains_str (str): defaults to None. If not None,
`tag_name` tags will only be removed if the text within them
contains the `contains_str` string.
"""
if class_:
elements = soup.find_all(tag_name, class_=class_)
else:
elements = soup.find_all(tag_name)
for el in elements:
if contains_str:
if contains_str in el.text:
el.extract()
else:
el.extract()
text = super().pre_process(text)
# attach separated wa- and a- prefixes to the following word:
text = re.sub(r"\b([وأ])[\s~]+", r"\1", text)
# remove superfluous html elements:
soup = BeautifulSoup(text)
remove_html_elements(soup, "STYLE")
remove_html_elements(soup, "TITLE")
for fn_line in soup.find_all("hr", class_="content_hr"):
fn_line.insert_after("FOOTNOTES")
text = soup.prettify()
return text
def add_page_numbers(self, text, source_fp):
"""Convert the page numbers in the text into OpenITI mARkdown format
In Ghbook texts, the page numbers are in the page header
(<div class="PageHead">).
Volume numbers are not mentioned in the html files,
but every volume is a different html file
and volume numbers should be marked in the file names as VOLxxx.
The script gets the volume number from the file name
and the page number from the page header,
joins these together in the OpenITI page number format PageVxxPxxx
and adds this into the html at the end of the page.
It also deletes the page header after extracting the page number.
"""
# try to get the volume number from the filename:
try:
vol_no = int(re.findall("VOL(\d+)", source_fp)[0])
vol_no = "PageV{:02d}P{}".format(vol_no, "{:03d}")
except:
vol_no = "PageV01P{:03d}"
# add the page number
soup = BeautifulSoup(text)
for span in soup.find_all("SPAN", class_="content_text"):
span_text = span.text.strip()
if span_text.startswith("ص:"):
page_no = re.findall("\d+", span_text)[0]
page_no = vol_no.format(int(page_no))
span.insert_after(page_no)
span.extract()
return soup.prettify()
def remove_notes(self, text):
"""Remove footnotes from text, and format them as endnotes.
Footnotes in Ghbook html files are below a horizontal line
(<HR class=content_hr>), located just below the page number
(<P class=content_paragraph><SPAN class=content_text>ص:45</SPAN></P></DIV></DIV>)
each footnote in a <DIV id=content_note_PAGE_NOTENUMBER class=content_note>(footnote text)</DIV>
This function extracts the footnotes from the texts
and turns them into endnotes.
The markers that indicate the location of the notes
within the text are not removed.
"""
split_text = re.split("(PageV\d+P\d+)", text)
text = []
footnotes = []
for i, t in enumerate(split_text):
if re.match("PageV\d+P\d+", t):
text.append(t)
else:
notes = re.findall("content_note>([^<]+)", t)
if notes:
try:
notes = "\n".join(notes) + "\n" + split_text[i-1] + "\n\n"
except:
notes = "\n".join(notes) + "\n" + "PageV00P000\n\n"
footnotes.append(notes)
text.append(re.sub("<DIV [^>]+? class=content_note>[^<]+?</DIV>", "", t))
text = "\n\n".join(text)
notes = "\n\n".join(footnotes)
notes = re.sub("\n+#* *\n+", "\n\n", notes)
notes = self.endnote_splitter + notes
return text, notes
def add_structural_annotations(self, html):
"""Convert html to mARkdown text using a html2md converter."""
text = html2md_Ghbook.markdownify(html)
return text
def post_process(self, text):
"""Deal with formatting probles introduced during the conversion process."""
text = super().post_process(text)
# remove page numbers of empty pages:
text = re.sub("(PageV\d+P\d+)\s*PageV\d+P\d+", r"\1", text)
# remove empty paragraphs:
text = re.sub(r"[\r\n]+# *[\r\n]+", "\n", text)
# adjust spacing after closing brackets and punctuation:
fmt = ")»،؛:.!؟\-"
fmt2 = fmt + "\d\s"
text = re.sub("([{}]+)([^{}])".format(fmt, fmt2), r"\1 \2", text)
text = re.sub("\) ([{}])".format(fmt), r")\1", text)
# adjust spacing before opening brackets:
text = re.sub("(\w)([(«])", r"\1 \2", text)
# remove superfluous new lines before a new paragraph/page number
text = re.sub("[\r\n]+(# |Page)", r"\n\1", text)
return text
if __name__ == "__main__":
import doctest
doctest.testmod()
input("Passed all tests. Continue?")
|
import logging
from typing import Awaitable, Callable, Dict, Optional, Set, Tuple, Type, Union
from immutables import Map
from tickit.core.management.event_router import InverseWiring, Wiring
from tickit.core.management.schedulers.base import BaseScheduler
from tickit.core.state_interfaces.state_interface import StateConsumer, StateProducer
from tickit.core.typedefs import (
Changes,
ComponentID,
ComponentPort,
Input,
Output,
PortID,
SimTime,
)
LOGGER = logging.getLogger(__name__)
class SlaveScheduler(BaseScheduler):
"""A slave scheduler which orchestrates nested tickit simulations."""
def __init__(
self,
wiring: Union[Wiring, InverseWiring],
state_consumer: Type[StateConsumer],
state_producer: Type[StateProducer],
expose: Dict[PortID, ComponentPort],
raise_interrupt: Callable[[], Awaitable[None]],
) -> None:
"""A slave scheduler constructor which adds wiring and saves values for reference.
Args:
wiring (Union[Wiring, InverseWiring]): A wiring or inverse wiring object
representing the connections between components in the system.
state_consumer (Type[StateConsumer]): The state consumer class to be used
by the component.
state_producer (Type[StateProducer]): The state producer class to be used
by the component.
expose (Dict[PortID, ComponentPort]): A mapping of slave scheduler
outputs to internal component ports.
raise_interrupt (Callable[[], Awaitable[None]]): A callback to request that
the slave scheduler is updated immediately.
"""
wiring = self.add_exposing_wiring(wiring, expose)
super().__init__(wiring, state_consumer, state_producer)
self.raise_interrupt = raise_interrupt
self.interrupts: Set[ComponentID] = set()
@staticmethod
def add_exposing_wiring(
wiring: Union[Wiring, InverseWiring],
expose: Dict[PortID, ComponentPort],
) -> InverseWiring:
"""Adds wiring to expose slave scheduler outputs.
Adds wiring to expose slave scheduler outputs, this is performed creating a
mock "expose" component with inverse wiring set by expose.
Args:
wiring (Union[Wiring, InverseWiring]): A wiring or inverse wiring object
representing the connections between components in the system.
expose (Dict[PortID, ComponentPort]): A mapping of slave scheduler
outputs to internal component ports.
Returns:
InverseWiring:
An inverse wiring object representing the connections between
components in the system and the "expose" component which acts as the
slave scheduler output.
"""
if isinstance(wiring, Wiring):
wiring = InverseWiring.from_wiring(wiring)
wiring[ComponentID("expose")].update(expose)
return wiring
async def update_component(self, input: Input) -> None:
"""Sends an input to the corresponding component. Mocks I/O for "external" or "expose".
For real components the input is sent in a message to their input topic, for
the mock component named "external", external inputs are injected, whilst for
the mock component and named "expose" the input is stored for use as the
scheduler output.
Args:
input (Input): The input message to be sent to the component.
"""
if input.target == ComponentID("external"):
await self.ticker.propagate(
Output(ComponentID("external"), input.time, self.input_changes, None)
)
elif input.target == ComponentID("expose"):
self.output_changes = input.changes
await self.ticker.propagate(
Output(ComponentID("expose"), input.time, Changes(Map()), None)
)
else:
await super().update_component(input)
async def on_tick(
self, time: SimTime, changes: Changes
) -> Tuple[Changes, Optional[SimTime]]:
"""Routes inputs, performs a tick and returns output changes and a callback time.
An asyhcnronous method which determines which components within the simulation
require being woken up, sets the input changes for use by the "external" mock
component, performs a tick, determines the period in which the slave scheduler
should next be updated, and returns the changes collated by the "expose" mock
component.
Args:
time (SimTime): The current simulation time (in nanoseconds).
changes (Changes): A mapping of changed component inputs and their new
values.
Returns:
Tuple[Changes, Optional[SimTime]]:
A tuple of a mapping of the changed exposed outputs and their new
values and optionally a duration in simulation time after which the
slave scheduler should be called again.
"""
wakeup_components = {
component for component, when in self.wakeups.items() if when <= time
}
root_components: Set[ComponentID] = {
*self.interrupts,
*wakeup_components,
ComponentID("external"),
}
for component in wakeup_components:
del self.wakeups[component]
self.interrupts.clear()
self.input_changes = changes
self.output_changes = Changes(Map())
await self.ticker(time, root_components)
_, call_at = self.get_first_wakeups()
return self.output_changes, call_at
async def run_forever(self) -> None:
"""Delegates to setup which instantiates the ticker and state interfaces."""
await self.setup()
async def schedule_interrupt(self, source: ComponentID) -> None:
"""Schedules the interrupt of a component immediately.
An asynchronous method which schedules an interrupt immediately by adding it to
a set of queued interrupts and raising the interrupt to the master scheduler.
Args:
source (ComponentID): The source component of the interrupt.
"""
LOGGER.debug("Adding {} to interrupts".format(source))
self.interrupts.add(source)
await self.raise_interrupt()
|
"""
Topology-Attributes Coupling Simmilarity (TACSim) measure.
"""
#!/usr/bin/env python
# Copyright (C) 2015 by
# Xiaming Chen <chen_xm@sjtu.edu.cn>
# All rights reserved.
# BSD license.
import itertools, copy
import numpy as np
import networkx as nx
from typedecorator import params
__author__ = "Xiaming Chen"
__email__ = "chen_xm@sjtu.edu.cn"
__all__ = [ 'tacsim', 'tacsim_combined', 'normalized', 'node_edge_adjacency' ]
def _strength_nodes(nw1, nw2, ew):
return 1.0 * nw1 * nw2 / np.power(ew, 2)
def _strength_edges(ew1, ew2, nw):
return 1.0 * np.power(nw, 2) / (ew1 * ew2)
def _coherence(s1, s2):
return 2.0 * np.sqrt(s1 * s2) / (s1 + s2)
def _converged(nsim, nsim_prev, esim, esim_prev, eps=1e-4):
if np.allclose(nsim, nsim_prev, atol=eps) and \
np.allclose(esim, esim_prev, atol=eps):
return True
return False
def normalized(a, axis=None, order=None):
l2 = np.atleast_1d(np.linalg.norm(a, order, axis))
l2[l2==0] = 1
return a / l2
def _mask_lower_values(m, tol=1e-6):
m[abs(m) < tol] = 0.0
return m
def _graph_elements(G, node_attribute='weight', edge_attribute='weight', dummy_eps=1e-3):
""" Generate strength matrices and node-edge indexes mapping of nodes and edges.
"""
nodes = G.nodes()
edges = G.edges()
V = len(nodes)
E = len(edges)
node_id_lookup_tbl = {}
edge_id_lookup_tbl = {}
node_weight_vec = np.ones(V)
edge_weight_vec = np.ones(E)
node_strength_mat = np.empty((V, V)); node_strength_mat.fill(-1)
edge_strength_mat = np.empty((E, E)); edge_strength_mat.fill(-1)
node_edge_map = {}
edge_node_map = {}
for i, n in enumerate(nodes):
node_id_lookup_tbl[n] = i
try:
node_weight_vec[i] = G.node[n][node_attribute]
if node_weight_vec[i] < 0: # to fix dummy nodes
node_weight_vec[i] = dummy_eps
except KeyError:
node_weight_vec[i] = 1
node_weight_vec = normalized(node_weight_vec)
for i, e in enumerate(edges):
edge_id_lookup_tbl[e] = i
try:
edge_weight_vec[i] = G.edge[e[0]][e[1]][edge_attribute]
if edge_weight_vec[i] <= 0: # to fix dummy edges
edge_weight_vec[i] = dummy_eps
except KeyError:
edge_weight_vec[i] = 1
edge_weight_vec = normalized(edge_weight_vec)
for e in edges:
n0, n1 = node_id_lookup_tbl[e[0]], node_id_lookup_tbl[e[1]]
e01 = edge_id_lookup_tbl[e]
node_strength_mat[n0][n1] = \
_strength_nodes(node_weight_vec[n0], node_weight_vec[n1], edge_weight_vec[e01])
# record node-node intersection
if n0 not in node_edge_map:
node_edge_map[n0] = {}
node_edge_map[n0][n1] = e01
for n in nodes:
n01 = node_id_lookup_tbl[n]
preds = G.predecessors(n)
succs = G.successors(n)
for p in preds:
for s in succs:
e0, e1 = edge_id_lookup_tbl[(p, n)], edge_id_lookup_tbl[(n, s)]
edge_strength_mat[e0][e1] = \
_strength_edges(edge_weight_vec[e0], edge_weight_vec[e1], node_weight_vec[n01])
# record edge-edge intersection
if e0 not in edge_node_map:
edge_node_map[e0] = {}
edge_node_map[e0][e1] = n01
return node_strength_mat, node_edge_map, edge_strength_mat, edge_node_map
def tacsim(G1, G2=None, node_attribute='weight', edge_attribute='weight', max_iter=100, eps=1e-4, tol=1e-6):
""" Calculate the TACSim measure of two attributed, directed graph.
"""
if isinstance(G1, nx.MultiDiGraph):
assert("MultiDiGraph is not supported by TACSim.")
nsm1, nem1, esm1, enm1 = _graph_elements(G1, node_attribute, edge_attribute)
if G2 is None:
nsm2, nem2, esm2, enm2 = nsm1, nem1, esm1, enm1
G2 = G1
else:
nsm2, nem2, esm2, enm2 = _graph_elements(G2, node_attribute, edge_attribute)
N = len(G1.nodes())
M = len(G2.nodes())
nsim_prev = np.zeros((N, M))
nsim = np.ones((N, M))
P = len(G1.edges())
Q = len(G2.edges())
esim_prev = np.zeros((P, Q))
esim = np.ones((P, Q))
for itrc in range(max_iter):
if _converged(nsim, nsim_prev, esim, esim_prev):
break
nsim_prev = copy.deepcopy(nsim)
esim_prev = copy.deepcopy(esim)
# Update node similarity, in and out node neighbors
for i, j in itertools.product(range(N), range(M)):
u_in = [u for u in range(N) if nsm1[u,i] >= 0]
v_in = [v for v in range(M) if nsm2[v,j] >= 0]
for u, v in itertools.product(u_in, v_in):
u_edge = nem1[u][i]
v_edge = nem2[v][j]
nsim[i][j] += 0.5 * _coherence(nsm1[u,i], nsm2[v,j]) * (nsim_prev[u,v] + esim_prev[u_edge][v_edge])
u_out = [u for u in range(N) if nsm1[i,u] >= 0]
v_out = [v for v in range(M) if nsm2[j,v] >= 0]
for u, v in itertools.product(u_out, v_out):
u_edge = nem1[i][u]
v_edge = nem2[j][v]
nsim[i][j] += 0.5 * _coherence(nsm1[i,u], nsm2[j,v]) * (nsim_prev[u,v] + esim_prev[u_edge][v_edge])
# Update edge similarity, in and out edge neighbors
for i, j in itertools.product(range(P), range(Q)):
u_in = [u for u in range(P) if esm1[u,i] >= 0]
v_in = [v for v in range(Q) if esm2[v,j] >= 0]
for u, v in itertools.product(u_in, v_in):
u_node = enm1[u][i]
v_node = enm2[v][j]
esim[i][j] += 0.5 * _coherence(esm1[u,i], esm2[v,j]) * (esim_prev[u,v] + nsim_prev[u_node][v_node])
u_out = [u for u in range(P) if esm1[i,u] >= 0]
v_out = [v for v in range(Q) if esm2[j,v] >= 0]
for u, v in itertools.product(u_out, v_out):
u_node = enm1[i][u]
v_node = enm2[j][v]
esim[i][j] += 0.5 * _coherence(esm1[i,u], esm2[j,v]) * (esim_prev[u,v] + nsim_prev[u_node][v_node])
nsim = normalized(nsim)
esim = normalized(esim)
print("Converge after %d iterations (eps=%f)." % (itrc, eps))
return _mask_lower_values(nsim, tol), _mask_lower_values(esim, tol)
@params(G=nx.DiGraph)
def node_edge_adjacency(G):
""" Node-edge adjacency matrix: source nodes
"""
edges = G.edges()
nodes = G.nodes()
node_index = {}
for i in range(0, len(nodes)):
node_index[nodes[i]] = i
ne_src_mat = np.zeros([len(nodes), len(edges)])
ne_dst_mat = np.zeros([len(nodes), len(edges)])
for i in range(0, len(edges)):
s, t = edges[i]
ne_src_mat[node_index[s]][i] = 1
ne_dst_mat[node_index[t]][i] = 1
return ne_src_mat, ne_dst_mat
def tacsim_combined(G1, G2=None, node_attribute='weight', edge_attribute='weight', lamb = 0.5, norm=True):
""" Combined similarity based on original tacsim scores. Refer to paper Mesos.
"""
# X: node similarity; Y: edge similarity
X, Y = tacsim(G1, G2, node_attribute, edge_attribute)
As, At = node_edge_adjacency(G1)
if G2 is None:
Bs, Bt = As, At
else:
Bs, Bt = node_edge_adjacency(G2)
Z = Y + lamb * np.dot(np.dot(As.T, X), Bs) + (1-lamb) * np.dot(np.dot(At.T, X), Bt)
if norm:
return normalized(Z)
else:
return Z
if __name__ == '__main__':
G1 = nx.DiGraph()
G1.add_weighted_edges_from([(1,0,8), (0,2,12), (1,2,10), (2,3,15)])
G1.node[0]['weight'] = 1
G1.node[1]['weight'] = 1
G1.node[2]['weight'] = 5
G1.node[3]['weight'] = 1
G2 = nx.DiGraph()
G2.add_weighted_edges_from([(0,1,15), (1,2,10)])
G2.node[0]['weight'] = 1
G2.node[1]['weight'] = 3
G2.node[2]['weight'] = 1
print(tacsim(G1, G2))
print(tacsim(G1))
print(tacsim_combined(G1, G2))
|
from __future__ import print_function
import sys
from msp import MSP, MSP_SET_WP, MSP_GET_WP
from serial import Serial
COMMENT_START_CHAR = '#'
MSP_WAYPOINT_ACTIONS = {
"MISSION_WAYPOINT" : 1, # Set waypoint
"MISSION_HOLD_UNLIM" : 2, # Poshold unlimited
"MISSION_HOLD_TIME" : 3, # Hold for a predetermined time
"MISSION_RTH" : 4, # Return to HOME
"MISSION_SET_POI" : 5, # Set POINT of interest
"MISSION_JUMP" : 6, # Jump to the given step (#times)
"MISSION_SET_HEADING" : 7, # Set heading to a given orientation (parameter 1 is the waym 0-359 degree
"MISSION_LAND" : 8, # Land at the given position
}
# Commented out flags seem to be internal to the flight controller
MSP_WAYPOINT_FLAGS = {
"MISSION_FLAG_NONE" : 0x00,
"MISSION_FLAG_END" : 0xA5, # Flags that this is the last step
# "MISSION_FLAG_CRC_ERROR" : 0xFE, # Returned WP had an EEPROM CRC error
"MISSION_FLAG_HOME" : 0x01, # Returned WP is the home position
"MISSION_FLAG_HOLD" : 0x02, # Returned WP is the hold position
"MISSION_FLAG_DO_LAND" : 0x20, # Land when reached desired point (used in RTH)
# "MISSION_FLAG_NAV_IN_PROG" : 0xff, # Navigation is in progress, returned wp is home
}
# Indices in the waypoint string
IDX_LAT = 0
IDX_LON = 1
IDX_ACTION = 2
IDX_ALTITUDE = 3
IDX_PARAM1 = 4
IDX_PARAM2 = 5
IDX_PARAM3 = 6
IDX_FLAG = 7
MAX_WAYPOINTS = 255
def send_waypoint(protocol, params):
protocol.provide(MSP_SET_WP, params)
protocol.read_ack(MSP_SET_WP)
if __name__ == '__main__':
filename = 'waypoints.txt'
if len(sys.argv) > 1:
filename = sys.argv[1]
print('Using {0}'.format(filename))
with open(filename) as f:
transport = Serial(port='/dev/ttyACM0',
baudrate=115200,
timeout=5)
print(transport)
protocol = MSP(transport, initialization_delay=15)
for i in range(0,MAX_WAYPOINTS):
wp_no = i+1
line = f.readline().strip()
if not line:
break
waypoint = line.split(' ')
if waypoint[0].startswith(COMMENT_START_CHAR):
continue
print(waypoint)
send_waypoint(protocol,
{
'wp_no' : wp_no,
'action' : MSP_WAYPOINT_ACTIONS[waypoint[IDX_ACTION]],
'lat' : int(waypoint[IDX_LAT]),
'lon' : int(waypoint[IDX_LON]),
'altitude' : int(waypoint[IDX_ALTITUDE]),
'param1' : int(waypoint[IDX_PARAM1]),
'param2' : int(waypoint[IDX_PARAM2]),
'param3' : int(waypoint[IDX_PARAM3]),
'flag' : MSP_WAYPOINT_FLAGS[waypoint[IDX_FLAG]],
})
print(protocol.request(MSP_GET_WP, {'wp_no': wp_no}))
|
from django.shortcuts import resolve_url as r
from django.test import TestCase
from orcamentos.proposal.models import Work
from .test_base import BaseWorkTest
class WorkTest(BaseWorkTest, TestCase):
def test_create(self):
self.assertTrue(Work.objects.exists())
def test_str(self):
self.assertEqual('Ed. Atlanta', str(self.obj))
def test_ordering(self):
self.assertListEqual(['name_work'], Work._meta.ordering)
def test_get_absolute_url(self):
url = r('proposal:work_detail', slug=self.obj.slug)
self.assertEqual(url, self.obj.get_absolute_url())
|
class Solution:
def minWindow(self, S: str, T: str) -> str:
|
# This code is sourced (inspired) from and found at Github under the MIT License.
# All credits to the original authors. If you are the author, Thank You!
import math
import struct
import pyaudio as pa
def play_frequency(frequency, fs, stream, amplitude = 5, duration = 0.31102):
N = int(fs / frequency)
T = int(frequency * duration) # repeat for T cycles
dt = 1.0 / fs
# 1 cycle
tone = (amplitude * math.sin(2 * math.pi * frequency * n * dt)
for n in range(N))
# todo: get the format from the stream; this assumes Float32
data = b''.join(struct.pack('f', samp) for samp in tone)
for n in range(T):
stream.write(data)
import wave
from sys import byteorder
from array import array
from scipy.fftpack import fft
import numpy as np
CHUNK = 1024
FORMAT = pa.paInt32
CHANNELS = 1
RATE = 48000
def record(audio, time_to_listen):
stream = audio.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
# print("###RECORDING###")
RECORD_SECONDS = time_to_listen
s = array('h')
frames = []
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
frames.append(data)
snd_data = array('h', data)
if byteorder == 'big':
snd_data.byteswap()
s.extend(snd_data)
# print("###RECORDING DONE###")
stream.stop_stream()
stream.close()
audio.terminate()
return s #, frames
def record_frequency(time_to_listen):
X = fft(record(pa.PyAudio(), time_to_listen))
freq = (np.abs(X)).argmax() / time_to_listen + 1
return freq
|
"""
The draw_bus() function
"""
from easygraphics import *
def draw_bus():
"""
Draw a simple bus.
"""
set_color("lightgray")
rect(0, 0, 210, 130)
set_color("red")
set_fill_color("blue")
draw_circle(60, 100, 10) # draw tyre
draw_circle(140, 100, 10) # draw tyre
rect(20, 20, 190, 100)
# draw window
x = 30
while x < 115:
rect(x, 40, x + 10, 50)
x += 15
# draw door
rect(160, 40, 180, 100)
line(170, 40, 170, 100)
circle(170, 70, 5)
# draw text
draw_text(0, 130, "A good old bus.")
|
"""
Test suite for the entire program. Run this to execute all test cases.
Do not import this from other modules.
"""
import unittest
from util import time_function, time_class
import tanks
import tank_components
import shells
class TestStringMethods(unittest.TestCase):
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_split(self):
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
class TestAmmoRack(unittest.TestCase):
def setUp(self):
self.rack = tank_components.AmmoRack(2, {"a": 1})
def test_remove_1_key_True(self):
self.assertTrue(self.rack.remove("a"))
def test_remove_2_key_False(self):
self.assertTrue(self.rack.remove("a"))
self.assertFalse(self.rack.remove("a"))
def test_remove_nokey_False(self):
self.assertFalse(self.rack.remove("b"))
def test_add_1_notFull_True(self):
self.assertTrue(self.rack.add("a"))
def test_add_2_full_False(self):
self.assertFalse(self.rack.add("a", 2))
def test_get_shell_count_1(self):
self.assertEqual(1, self.rack.get_shell_count())
def test_get_shell_count_2(self):
self.rack.add("b")
self.assertEqual(2, self.rack.get_shell_count())
class TestTankFactory(unittest.TestCase):
def test_get_tank_valid(self):
tank = tanks.TankFactory.get_tank(tanks.TankName.SHERMAN)
self.assertEqual(str(tanks.TankName.SHERMAN), tank.name)
def test_get_tank_ValueError(self):
with self.assertRaises(ValueError):
tanks.TankFactory.get_tank("")
if __name__ == '__main__':
unittest.main()
else:
print("Do not import _test.py from other modules.")
|
"""Tests to check the full era5cli workflow."""
import logging
import pytest
from textwrap import dedent
from era5cli.cli import main
# combine calls with result and possible warning message
call_result = [
{
# orography is translated to geopotential in the query
"call": dedent("""\
era5cli hourly --variables orography --startyear 2008 --dryrun
"""),
"result": dedent("""\
reanalysis-era5-single-levels {'variable': 'geopotential', 'year':
2008, 'month': ['01', '02', '03', '04', '05', '06', '07', '08',
'09', '10', '11', '12'], 'time': ['00:00', '01:00', '02:00',
'03:00', '04:00', '05:00', '06:00', '07:00', '08:00', '09:00',
'10:00', '11:00', '12:00', '13:00', '14:00', '15:00', '16:00',
'17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],
'format': 'netcdf', 'product_type': 'reanalysis', 'day': ['01',
'02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23',
'24', '25', '26', '27', '28', '29', '30', '31']}
era5_orography_2008_hourly.nc"""),
"warn": "The variable 'orography' has been deprecated by CDS."
},
{
# geopotential needs '--levels surface' to be correctly interpreted
"call": dedent("""\
era5cli hourly --variables geopotential --startyear 2008 --dryrun
--levels surface"""),
"result": dedent("""\
reanalysis-era5-single-levels {'variable': 'geopotential', 'year':
2008, 'month': ['01', '02', '03', '04', '05', '06', '07', '08',
'09', '10', '11', '12'], 'time': ['00:00', '01:00', '02:00',
'03:00', '04:00', '05:00', '06:00', '07:00', '08:00', '09:00',
'10:00', '11:00', '12:00', '13:00', '14:00', '15:00', '16:00',
'17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],
'format': 'netcdf', 'product_type': 'reanalysis', 'day': ['01',
'02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23',
'24', '25', '26', '27', '28', '29', '30', '31']}
era5_geopotential_2008_hourly.nc"""),
"warn": "Getting variable from surface level data."
},
{
# without --levels surface, geopotential calls pressure level data
"call": dedent("""\
era5cli hourly --variables geopotential --startyear 2008
--dryrun"""),
"result": dedent("""\
reanalysis-era5-pressure-levels {'variable': 'geopotential',
'year': 2008, 'month': ['01', '02', '03', '04', '05', '06', '07',
'08', '09', '10', '11', '12'], 'time': ['00:00', '01:00', '02:00',
'03:00', '04:00', '05:00', '06:00', '07:00', '08:00', '09:00',
'10:00', '11:00', '12:00', '13:00', '14:00', '15:00', '16:00',
'17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],
'format': 'netcdf', 'pressure_level': [1, 2, 3, 5, 7, 10, 20, 30,
50, 70, 100, 125, 150, 175, 200, 225, 250, 300, 350, 400, 450, 500,
550, 600, 650, 700, 750, 775, 800, 825, 850, 875, 900, 925, 950,
975, 1000], 'product_type': 'reanalysis', 'day': ['01', '02', '03',
'04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14',
'15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25',
'26', '27', '28', '29', '30', '31']}
era5_geopotential_2008_hourly.nc"""),
"warn": "Getting variable from pressure level data."
},
{
# preliminary-back-extension is combined with monthly-means
"call": dedent("""\
era5cli monthly --variables temperature --startyear 1960 --prelimbe
--dryrun"""),
"result": dedent("""\
reanalysis-era5-pressure-levels-monthly-means-preliminary-back-extension
{'variable': 'temperature', 'year': 1960, 'month': ['01', '02',
'03', '04', '05', '06', '07', '08', '09', '10', '11', '12'],
'time': ['00:00'], 'format': 'netcdf', 'pressure_level': [1, 2, 3,
5, 7, 10, 20, 30, 50, 70, 100, 125, 150, 175, 200, 225, 250, 300,
350, 400, 450, 500, 550, 600, 650, 700, 750, 775, 800, 825, 850,
875, 900, 925, 950, 975, 1000], 'product_type':
'reanalysis-monthly-means-of-daily-means'}
era5_temperature_1960_monthly.nc""")
},
{
# era5-Land is combined with monthly means
"call": dedent("""\
era5cli monthly --variables snow_cover --startyear 2008 --land
--dryrun"""),
"result": dedent("""\
reanalysis-era5-land-monthly-means {'variable': 'snow_cover',
'year': 2008, 'month': ['01', '02', '03', '04', '05', '06', '07',
'08', '09', '10', '11', '12'], 'time': ['00:00'], 'format':
'netcdf', 'product_type': 'monthly_averaged_reanalysis'}
era5-land_snow_cover_2008_monthly.nc""")
}
]
def clean_ids(call):
call = call.replace('\n', ' ')
call = call.replace('--dryrun', '')
return(call)
ids = [clean_ids(item["call"]) for item in call_result]
@pytest.mark.parametrize("call_result", call_result, ids=ids)
def test_main(call_result, capsys, caplog):
call = call_result["call"].split()
result = call_result["result"].replace('\n', ' ') + '\n'
# until the actual fetch is monkeypatched, make sure the tests are dryruns
if '--dryrun' not in call:
pytest.fail('call must be a dryrun')
with caplog.at_level(logging.INFO):
main(call)
captured = capsys.readouterr().out
assert result == captured
try:
warn = call_result["warn"]
assert warn in caplog.text
except KeyError:
assert caplog.text == ''
|
import pytest
from numpy import expand_dims, ndarray
from nujo import Tensor
from nujo.autodiff._functions._elementary import _Addition
# ====================================================================================================
# Test Tensor value and creator properties
def test_tensor_value(tensors):
A, B, C = tensors
assert isinstance(A.value, ndarray)
assert isinstance(B.value, ndarray)
assert isinstance(C.value, ndarray)
def test_tensor_creator(tensors):
A, B, C = tensors
assert A.creator is None
assert B.creator is None
assert isinstance(C.creator, _Addition)
# ====================================================================================================
# Test Tensor backward method
def test_tensor_backward(tensors):
A, B, C = tensors
C.backward()
assert len(C.parents_outputs) == 0
assert (C.grad == 1).all()
assert len(A.parents_outputs) == 1
assert (A.parents_outputs[0] == C).all()
assert (A.grad == 1).all()
assert len(B.parents_outputs) == 1
assert (B.parents_outputs[0] == C).all()
assert (B.grad == 1).all()
# ====================================================================================================
# Test Tensor transpose and shape manipulation
# methods: reshape, repeat, squeeze, unsqueeze
def test_tensor_transpose(tensors):
A, _, _ = tensors
assert (A.T.value == A.value.T).all()
def test_tensor_shape_manipulation(tensors):
A, _, _ = tensors
assert A.shape == A.value.shape
A, A_np = A.reshape(-1, 1), A.value.reshape(-1, 1)
assert (A == A_np).all()
assert (A.squeeze(1) == A_np.squeeze(1)).all()
assert (A.unsqueeze(1) == expand_dims(A_np, 1)).all()
# ====================================================================================================
# Test gradient cleaning method
def test_tensor_zero_grad(tensors):
A, _, _ = tensors
A.zero_grad()
assert (A.grad == 0).all()
# ====================================================================================================
# Test inplace assignment operator
def test_tensor_inplace_assignment(tensors):
A, _, C = tensors
A <<= C
assert A.id != C.id
assert A.children == C.children or A.children is None
assert A.creator == C.creator or A.creator is None
assert (A.value == C.value).all()
assert (A.grad == C.grad).all()
# ====================================================================================================
# Unit Test fixtures
@pytest.fixture
def tensors():
A = Tensor([[1, 2], [3, 4]], diff=True)
B = Tensor([[5, 6], [7, 8]], diff=True)
C = A + B
return A, B, C
# ====================================================================================================
|
import pytest
from plenum.common.messages.fields import HexField
valid_hex_hash = "0123456789abcdefABCDEF"
validator = HexField(length=len(valid_hex_hash))
def test_valid_hex():
assert not validator.validate(valid_hex_hash)
def test_empty_string():
assert validator.validate('')
def test_invalid_length():
assert validator.validate(valid_hex_hash[:-1])
assert validator.validate(valid_hex_hash + "0")
def test_invalid_symbol():
assert validator.validate(valid_hex_hash[:-1] + 'X')
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""Admin panel for ldap synchronizations."""
import flask
from flask import Blueprint, current_app, flash, redirect, request
from flask_admin.base import expose
from flask_admin.contrib.sqla import ModelView
from flask_admin.helpers import get_redirect_target
from flask_babelex import gettext as _
from cds_ils.ldap.models import LdapSynchronizationLog
from cds_ils.ldap.tasks import synchronize_users_task
class LdapSynchronizationLogModelView(ModelView):
"""Invenio admin view for ldap users synchronization."""
# Entries are read-only
can_create = True
can_edit = False
can_delete = False
can_view_details = True
form_excluded_columns = LdapSynchronizationLog.__table__.columns
column_default_sort = (LdapSynchronizationLog.start_time, True)
@expose("/new/", methods=("GET", "POST"))
def create_view(self):
"""Override the creation form and replace it by an action button."""
if flask.request.method == "POST":
try:
self.start_task()
flash("The task was successfully started.", "success")
except Exception as e:
current_app.logger.exception(e)
flash("An error occurred while starting the task.", "error")
return redirect(request.path) # Redirect after POST
return_url = get_redirect_target() or self.get_url(".index_view")
return self.render(
"cds_ils_admin/create_task.html", return_url=return_url
)
@staticmethod
def start_task():
"""Start the task as requested by the user."""
synchronize_users_task.apply_async()
blueprint = Blueprint(
"cds_ils_admin",
__name__,
template_folder="templates",
static_folder="static",
)
ldap_sync = {
"model": LdapSynchronizationLog,
"modelview": LdapSynchronizationLogModelView,
"name": "Ldap Synchronization",
"category": _("User Management"),
}
__all__ = ("ldap_sync",)
|
from keras.models import Model
from keras.layers.convolutional import ZeroPadding2D
from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, Conv2DTranspose
from keras.utils.vis_utils import plot_model
def keras_model(x_train, save_model=True):
inputs = Input(x_train.shape[1:]) # Input(shape), return a tensor
# Initial shape: (None, 50, 150, 1). Here, the channel number / depth might get changed, but don't worry!
# 2 3x3 convolutions followed by a max pooling
conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(inputs) # (None, 50, 150, 32)
conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv1) # (None, 50, 150, 32)
conv1 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv1) # (None, 50, 150, 32)
conv2 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv1) # (None, 50, 150, 32)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv2) # (None, 25, 75, 32)
# 2 3x3 convolutions followed by a max pooling
conv3 = Conv2D(64, (3, 3), activation='relu', padding='same')(pool1) # (None, 25, 75, 64)
conv3 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv3) # (None, 25, 75, 64)
conv3 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv3) # (None, 25, 75, 64)
conv4 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv3) # (None, 25, 75, 64)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv4) # (None, 12, 37, 64)
# 2 3x3 convolutions followed by a max pooling
conv5 = Conv2D(128, (3, 3), activation='relu', padding='same')(pool2) # (None, 12, 37, 128)
conv5 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv5) # (None, 12, 37, 128)
conv5 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv5) # (None, 12, 37, 128)
conv6 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv5) # (None, 12, 37, 128)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv6) # (None, 6, 18, 128)
# 2 3x3 convolutions followed by a max pooling
conv7 = Conv2D(256, (3, 3), activation='relu', padding='same')(pool3) # (None, 6, 18, 256)
conv7 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv7) # (None, 6, 18, 256)
conv7 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv7) # (None, 6, 18, 256)
conv8 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv7) # (None, 6, 18, 256)
pool4 = MaxPooling2D(pool_size=(2, 2))(conv8) # (None, 3, 9, 256)
# 2 3x3 convolutions
conv9 = Conv2D(512, (3, 3), activation='relu', padding='same')(pool4) # (None, 3, 9, 512)
conv9 = Conv2D(512, (3, 3), activation='relu', padding='same')(conv9) # (None, 3, 9, 512)
conv9 = Conv2D(512, (3, 3), activation='relu', padding='same')(conv9) # (None, 3, 9, 512)
conv10 = Conv2D(512, (3, 3), activation='relu', padding='same')(conv9) # (None, 3, 9, 512)
# 1 3x3 transpose convolution and concat conv8 on the depth dim
# TODO Learn more about Conv2DTranspose output shape, usually here it double the input shape
concat1 = concatenate(
[Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv10), conv8],
axis=3) # (None, 3, 9, 512) -> (None, 6, 18, 256) -> (None, 6, 18, 512)
# 2 3x3 convolutions
conv11 = Conv2D(256, (3, 3), activation='relu', padding='same')(concat1) # (None, 6, 18, 256)
conv11 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv11) # (None, 6, 18, 256)
conv11 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv11) # (None, 6, 18, 256)
conv12 = Conv2D(256, (3, 3), activation='relu', padding='same')(conv11) # (None, 6, 18, 256)
# 1 3x3 transpose convolution and concat conv6 on the depth dim
concat2 = concatenate(
[ZeroPadding2D(((0, 0), (1, 0)))(Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv12)), conv6],
axis=3) # (None, 6, 18, 256) -> (None, 12, 36, 128) -> (None, 12, 37, 128) -> (None, 12, 37, 256)
# 2 3x3 convolutions
conv13 = Conv2D(128, (3, 3), activation='relu', padding='same')(concat2) # (None, 12, 37, 128)
conv13 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv13) # (None, 12, 37, 128)
conv13 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv13) # (None, 12, 37, 128)
conv14 = Conv2D(128, (3, 3), activation='relu', padding='same')(conv13) # (None, 12, 37, 128)
# 1 3x3 transpose convolution and concat conv4 on the depth dim
concat3 = concatenate(
[ZeroPadding2D(((1, 0), (1, 0)))(Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv14)), conv4],
axis=3) # (None, 12, 37, 128) -> (None, 24, 74, 64) -> (None, 25, 75, 64) -> (None, 25, 75, 128)
# 2 3x3 convolutions
conv15 = Conv2D(64, (3, 3), activation='relu', padding='same')(concat3) # (None, 25, 75, 64)
conv15 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv15) # (None, 25, 75, 64)
conv15 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv15) # (None, 25, 75, 64)
conv16 = Conv2D(64, (3, 3), activation='relu', padding='same')(conv15) # (None, 25, 75, 64)
# 1 3x3 transpose convolution and concat conv2 on the depth dim
concat4 = concatenate(
[Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv16), conv2],
axis=3) # (None, 25, 75, 64) -> (None, 50, 150, 32) -> (None, 50, 150, 64)
# 2 3x3 convolutions
conv17 = Conv2D(32, (3, 3), activation='relu', padding='same')(concat4) # (None, 50, 150, 32)
conv17 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv17) # (None, 50, 150, 32)
conv17 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv17) # (None, 50, 150, 32)
conv18 = Conv2D(32, (3, 3), activation='relu', padding='same')(conv17) # (None, 50, 150, 32)
# Final 1x1 convolutions to get to the correct depth dim 1, for now we only take magnitude of velocity
# TODO Will check the output with (x, y) velocity, pressure and other available results from simulation
conv19 = Conv2D(1, (1, 1), activation='linear')(conv18)
# TODO Check the activation for out model
model = Model(inputs=[inputs], outputs=[conv19])
if save_model:
plot_model(model, to_file='model_architecture.png', show_shapes=True, show_layer_names=True)
return model
# The more deep the model the more accuracy
# TODO Now try Inception model
# TODO In the official Keras documentation they showed residual networks by adding two layers, instead of concat.
# Find why?
def keras_model(inputs, save_model=True):
inputs = Input(x_train.shape[1:]) # Input(shape), return a tensor
# Initial shape: (None, 50, 150, 1). Here, the channel number / depth might get changed, but don't worry!
# 2 Conv. layer
conv1 = Conv2D(16, (3, 3), padding='same', activation='relu')(inputs) # (None, 50, 150, 16)
conv2 = Conv2D(16, (5, 5), padding='same', activation='relu')(conv1) # (None, 50, 150, 16)
# 4 Inception module followed by a max pooling
inception1 = Inception(filters=32, inputs=conv2) # (None, 50, 150, 32)
inception1 = Inception(filters=32, inputs=inception1)
inception1 = Inception(filters=32, inputs=inception1)
pool1 = MaxPooling2D(pool_size=(2, 2))(inception1) # (None, 25, 75, 32)
inception2 = Inception(filters=64, inputs=pool1) # (None, 25, 75, 64)
inception2 = Inception(filters=64, inputs=inception2)
inception2 = Inception(filters=64, inputs=inception2)
pool2 = MaxPooling2D(pool_size=(2, 2))(inception2) # (None, 12, 37, 64)
inception3 = Inception(filters=128, inputs=pool2) # (None, 12, 37, 128)
inception3 = Inception(filters=128, inputs=inception3)
pool3 = MaxPooling2D(pool_size=(2, 2))(inception3) # (None, 6, 18, 128)
inception4 = Inception(filters=256, inputs=pool3) # (None, 6, 18, 256)
inception4 = Inception(filters=256, inputs=inception4)
inception4 = Inception(filters=256, inputs=inception4)
pool4 = MaxPooling2D(pool_size=(2, 2))(inception4) # (None, 3, 9, 256)
# 2 Inception module
inception5 = Inception(filters=512, inputs=pool4) # (None, 3, 9, 512)
inception6 = Inception(filters=512, inputs=inception5) # (None, 3, 9, 512)
#inception6 = Inception(filters=512, inputs=inception6)
# 4 Residual connection
add1 = concatenate([Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(inception6),
inception4], axis=3) # (None, 3, 9, 512) -> (None, 6, 18, 256)
inception7 = Inception(filters=256, inputs=add1)
inception7 = Inception(filters=256, inputs=inception7)
# inception7 = Inception(filters=256, inputs=inception7)
add2 = concatenate(
[ZeroPadding2D(((0, 0), (1, 0)))(Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(inception7)),
inception3], axis=3) # (None, 6, 18, 256) -> (None, 12, 36, 128) -> (None, 12, 37, 128)
inception8 = Inception(filters=128, inputs=add2)
inception8 = Inception(filters=128, inputs=inception8)
# inception8 = Inception(filters=128, inputs=inception8)
add3 = concatenate(
[ZeroPadding2D(((1, 0), (1, 0)))(Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(inception8)),
inception2], axis=3) # (None, 12, 37, 128) -> (None, 24, 74, 64) -> (None, 25, 75, 64)
inception9 = Inception(filters=64, inputs=add3)
inception9 = Inception(filters=64, inputs=inception9)
# inception9 = Inception(filters=64, inputs=inception9)
add4 = concatenate(
[Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(inception9),
inception1], axis=3) # (None, 25, 75, 64) -> (None, 50, 150, 32)
# 1 Conv. for output with linear activation
outputs = Conv2D(1, (1, 1), activation='linear')(add4)
model = Model(inputs=[inputs], outputs=[outputs])
if save_model:
plot_model(model, to_file='model_architecture_with_inception.png', show_shapes=True, show_layer_names=True)
return model |
class Leverage:
def __init__(self):
self.leverage = 0.0
self.maxQty = 0.0
self.symbol = ""
@staticmethod
def json_parse(json_data):
result = Leverage()
result.leverage = json_data.get_float("leverage")
result.maxQty = json_data.get_float("maxQty")
result.symbol = json_data.get_string("symbol")
return result |
from mongoengine import Document, StringField
from mongoengine_plus.models import BaseModel
class TestModel(BaseModel, Document):
id = StringField()
secret_field = StringField()
__test__ = False
_hidden = ['secret_field']
def test_hide_field():
model = TestModel(id='12345', secret_field='secret')
model_dict = model.to_dict()
assert model_dict['secret_field'] == '********'
assert model_dict['id'] == '12345'
|
from django.apps import AppConfig
from suit.apps import DjangoSuitConfig
from suit.menu import ParentItem, ChildItem
class AuctionConfig(AppConfig):
name = 'auction.auction'
verbose_name = "Auction"
def ready(self):
"""Override this to put in:
Users system checks
Users signal registration
"""
pass
class SuitConfig(DjangoSuitConfig):
layout = 'horizontal'
menu = (
ParentItem('Auction', children=[
ChildItem(model='auction.donation'),
ChildItem(model='auction.lot'),
]),
)
|
#!/usr/bin/env python3
# Copyright (c) 2017, John Skinner
import argparse
import bson
import numpy as np
import matplotlib.pyplot as pyplot
import matplotlib.animation as animation
import functools
from arvet.config.global_configuration import load_global_config
import arvet.database.connection as dbconn
import arvet.database.image_manager as im_manager
from arvet.core.image_collection import ImageCollection
def make_display_image(image, stereo=False):
if stereo:
shape = list(image.data.shape)
shape[1] *= 2 # Double the width, so that we can
composite_image = np.zeros(shape, dtype=np.uint8)
composite_image[:, 0:image.data.shape[1]] = image.data
if hasattr(image, 'right_data') and image.right_data is not None:
composite_image[:, image.data.shape[1]:shape[1]] = image.right_data
elif image.depth_data is not None:
uint_depth = np.asarray(np.floor(255 * image.depth_data / 4), dtype=np.uint8)
if len(shape) >= 3:
for i in range(shape[2]):
composite_image[:, image.data.shape[1]:shape[1], i] = uint_depth
else:
composite_image[:, image.data.shape[1]:shape[1]] = uint_depth
return composite_image
else:
return image.data
def update_figure(idx, image_source, matplotlib_im, stereo=False, *_, **__):
_, image = image_source[idx]
matplotlib_im.set_array(make_display_image(image, stereo))
return matplotlib_im, # Comma is important
def visualize_dataset(dataset_id: bson.ObjectId):
image_collection = ImageCollection.objects.get({'_id': dataset_id})
fig = pyplot.figure()
_, first_image = image_collection[0]
im = pyplot.imshow(make_display_image(first_image,
image_collection.is_stereo_available or image_collection.is_depth_available))
# Note, we apparently have to store the animation, at least temporarily, or it doesn't play
ani = animation.FuncAnimation(
fig, functools.partial(update_figure, image_source=image_collection, matplotlib_im=im,
stereo=image_collection.is_stereo_available or image_collection.is_depth_available),
frames=image_collection.timestamps, blit=True)
pyplot.show()
def main():
"""
Visualize a random generated dataset, to make sure we're generating them right
:return:
"""
parser = argparse.ArgumentParser(
description='Visualize a dataset, specified by ID on the command line.')
parser.add_argument('dataset_id', help='The ID of the dataset to visualize.')
args = parser.parse_args()
# Load the configuration
config = load_global_config('config.yml')
# Configure the database and the image manager
dbconn.configure(config['database'])
im_manager.configure(config['image_manager'])
visualize_dataset(bson.ObjectId(args.dataset_id))
if __name__ == '__main__':
main()
|
import traceback
import sys
import os
from functools import partial
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = '1' #hiding pygame greeting console output
import pygame
import json
from engine import *
import tkinter
import tkinter.filedialog
import tkinter.messagebox as mb
from tkinter import scrolledtext as stxt
from tkinter import ttk
import random
import time
import shutil
import zipfile
import threading
import copy
def show_exception_and_exit(exc_type, exc_value, tb):
text = traceback.format_exc()
show_error(text = text)
sys.excepthook = show_exception_and_exit
sys.unraisablehook = show_exception_and_exit #configuring python, so it wouldn`t just crash when a error occures
args = sys.argv
def show_error(title = 'Ooops!', text = 'ERROR'):
top = tkinter.Tk()
top.title(title)
top.withdraw()
mb.showerror(title, text) #function to show python tracebacks
top.destroy()
try:
a = open('map_editor/config.json', 'r')
setts = json.loads(a.read())
a.close()
a = open(setts['blk_file_path'], 'r')
blk_data = json.loads(a.read())
a.close()
a = open(setts['ent_file_path'], 'r', encoding = "UTF-8")
ent_data = json.loads(a.read())
a.close()
a = open(setts['help_file_path'], 'r', encoding = "UTF-8")
TXT_HELP = a.read()
a.close()
a = open(setts['lang_file'], 'r', encoding = "UTF-8")
lang = json.loads(a.read())
a.close()
except:
text = traceback.format_exc()
show_error(text = text)
IDMAP = {} #id map contains block data according to is ID. this was made to optimise world data
ENTMAP = {} #same map but for entities
TEXTURES = {} #textures data
ENT_TEXTURES = {}
ENTITY = [] #entity list
WIRES = [] #wires list
for key, value in blk_data.items():
IDMAP[int(value['id'])] = value
for key, value in ent_data.items():
ENTMAP[int(value['id'])] = value
WIDTH = setts['WIDTH']
HEIGHT = setts['HEIGHT'] #screen size in pixels
W_w = setts['world_size'] #map size
W_h = W_w
ENT_COUNTER = 0
T_SIZE = setts['texture_size'] #variable than defines texture size. T_SIZE can change during program work, but T_SIZE_ORIG can`t
T_SIZE_ORIG = setts['texture_size']
X0, Y0 = 0,0
CAM_X = 0
CAM_Y = 0 #camera position
def merge_dicts(dictOne, dictTwo):
dictThree = dictOne.copy()
dictThree.update(dictTwo)
return dictThree
def open_file(title = 'Открытие'):
top = tkinter.Tk()
top.title(title) #function for beautiful GUI file opening
top.withdraw()
file_name = tkinter.filedialog.askopenfilename(parent=top)
top.destroy()
return file_name
def save_file(title = 'Открытие'):
top = tkinter.Tk()
top.title(title)
top.withdraw() #function for beautiful GUI file saving
file_name = tkinter.filedialog.asksaveasfilename(parent=top)
top.destroy()
return file_name
def compile_map(name): #the heart of CMF system, the compiler
print(lang['compiler']['start'].format(time.ctime()[11:19]))
try:
os.mkdir('maps_raw/' + name)
except:
shutil.rmtree('maps_raw/' + name) #check if folder with same name exists, then delete it
os.mkdir('maps_raw/' + name)
prefix = 'maps_raw/' + name + '/' #defining prefix
FILES = []
print(lang['compiler']['dir_copy'].format(time.ctime()[11:19]))
for key, value in TEXTURES.items():
path = key
try:
if path != False:
os.makedirs(prefix + '/'.join(path.split('/')[0:-1]))
print('\t' + prefix + '/'.join(path.split('/')[0:-1])) #copying all directories hierarhy
except:
pass
for x in range(W_w):
for y in range(W_h):
j = ent.get(x, y)
if type(j) != int:
if j['name'] != 'ent_noname':
for key, value in j.items():
try:
if os.path.isfile(str(value)):
os.makedirs(prefix + '/'.join(value.split('/')[0:-1]))
print('\t' + prefix + '/'.join(value.split('/')[0:-1]))
except:
pass
for key, value in j['attributes'].items():
try:
if os.path.isfile(str(value)):
os.makedirs(prefix + '/'.join(value.split('/')[0:-1])) #copying all directories hierarhy
print('\t' + prefix + '/'.join(value.split('/')[0:-1]))
except:
pass
for key, value in ent_data.items():
try:
os.makedirs(prefix + '/'.join(value['image'].split('/')[0:-1])) #copying all directories hierarhy
except:
pass
print(lang['compiler']['file_copy'].format(time.ctime()[11:19]))
for key, value in TEXTURES.items():
path = key
try:
shutil.copy(str(path), prefix + path)
print('\t' + path + ' > ' + prefix + path) #the code below cheking all data for files and then copying it
FILES.append(path)
except:
print(lang['compiler']['failed_copy'] + prefix + path)
for x in range(W_w):
for y in range(W_h):
j = ent.get(x, y)
if type(j) != int:
if j['name'] != 'ent_noname':
for key, value in j.items():
try:
shutil.copy(str(value), prefix + str(value))
print('\t' + str(value) + ' > ' + prefix + str(value))
FILES.append(prefix + str(value))
except:
pass
for key, value in j['attributes'].items():
try:
shutil.copy(str(value), prefix + str(value))
print('\t' + str(value) + ' > ' + prefix + str(value))
FILES.append(prefix + str(value))
except:
pass
for key, value in ent_data.items():
if os.path.isfile(value['image']):
FILES.append(value['image'])
shutil.copy(str(value['image']), prefix + str(value['image']))
print(lang['compiler']['blocks_copy'].format(time.ctime()[11:19]))
shutil.copy(setts['blk_file_path'], prefix + 'blocks.json') #writing main map data to file
FILES.append(prefix + 'blocks.json')
print(lang['compiler']['map_patch'].format(time.ctime()[11:19]))
a = open(prefix + 'world.json', 'w')
FILES.append(prefix + 'world.json')
PLPS = []
x, y = 0, 0
for x in range(W_w):
for y in range(W_h):
if type(ent.get(x, y)) != int:
if ent.get(x, y)['func'] != False:
if ent.get(x, y)['func'] == 'spawnpoint': #defining player`s spawn positions
PLPS.append([x, y])
ENTITY = []
for x in range(W_w):
for y in range(W_h):
unit = ent.get(x,y)
if type(unit) != int:
if unit['name'] != 'ent_noname': #defining all map entities
ENTITY.append(unit)
idmap_ = {}
for key, value in IDMAP.items():
idmap_[int(key)] = value
data = {
'player_pos':'none',
'world':world.arr,
'idmap':idmap_,
'world_size':W_w, #patching all map data to this dictionary
'entity':ENTITY,
'player_pos':PLPS,
'ent_data':ent_data,
'wire':WIRES,
'cam':[CAM_X, CAM_Y]
}
json.dump(data, a, separators=(',', ':')) #and dumping it to JSON
a.close()
arh = zipfile.ZipFile('maps/' + name + '.cmf', 'w')
for root, dirs, files in os.walk('maps_raw/'+name): #copying all defined files
for file in files:
print(lang['compiler']['added'] + ''.join(os.path.join(root, file).split(name)[1][1:]))
arh.write(prefix + ''.join(os.path.join(root, file).split(name)[1][1:]), arcname=''.join(os.path.join(root, file).split(name)[1][1:]))
arh.close()
print(lang['compiler']['done'].format(time.ctime()[11:19]))
def load_map(out = False): #function to load CMF files
global world, IDMAP, TEXTURES, blk_data, W_w, W_h, WIRES, CAM_X, CAM_Y, ent
ent.fill(0) #clearing all entities
path = open_file() #get the path of CMF
if path != '':
name = path.split('/')[-1].split('.')[0] #defining the map name
arh = zipfile.ZipFile(path, 'r') #opening zip (yes, CMF is actually a zip)
try:
os.mkdir('maps_raw/' + name)
except:
shutil.rmtree('maps_raw/' + name) #if a folder with this name already exist, delete it
os.mkdir('maps_raw/' + name)
prefix = 'maps_raw/' + name + '/' #defining prefix for all file paths in map
for file_info in arh.infolist():
if out:
print(lang['loader']['exctracted']+file_info.filename) #exctracting map files in temporary folder
try:
arh.extract(file_info.filename, 'maps_raw/'+name)
except:
pass
a = open(prefix + 'world.json', 'r') #reading main map data
data = json.loads(a.read())
a.close()
try:
W_w, W_h = data['world_size'], data['world_size'] #defining world size
except:
print(lang['loader']['world_size_failed'])
W_w, W_h = 128, 128
world = arr2.arr2(W_w, W_h, 0) #recreating the world
world.arr = data['world']
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
for j in data['entity']:
ent.put(j['pos_x'], j['pos_y'], j) #loading entities
a = open(prefix + 'blocks.json', 'r') #reding map blocks info
blk_data = json.loads(a.read())
a.close()
for key, value in blk_data.items():
if value['image'] != False:
TEXTURES[prefix + value['image']] = image(value['image']) #defining blocks textures
for key, value in blk_data.items():
if value['image'] != False:
unit = value
unit['image'] = prefix + value['image'] #defining blocks id map
IDMAP[int(value['id'])] = unit
else:
IDMAP[int(value['id'])] = value
try:
WIRES = data['wire']
except:
print(lang['loader']['wire_failed']) #loading wire data
WIRES = []
try:
CAM_X, CAM_Y = data['cam'] #loading camera position info
except:
print(lang['loader']['camera_failed'])
pygame.display.set_caption(setts['TITLE'] + ' - ' + name + '.cmf')
def load_map_by_file(path): #outdated function that loads map by given path
global world, IDMAP, TEXTURES, blk_data, W_w, W_h, ent
if os.path.isfile(path):
name = 'maps_raw/' + os.getcwd().join(path.split(os.getcwd())[1:])[1:].split('.')[0]
arh = zipfile.ZipFile(path, 'r')
try:
os.mkdir(name)
except:
shutil.rmtree(name)
os.mkdir(name)
prefix = 'maps_raw/' + os.getcwd().join(path.split(os.getcwd())[1:])[1:].split('.')[0] + '/'
for file_info in arh.infolist():
#print(file_info.filename)
try:
arh.extract(file_info.filename, name)#+file_info.filename.split('/')[-1])
except:
pass
a = open(prefix + 'world.json', 'r')
data = json.loads(a.read())
a.close()
W_w, W_h = data['world_size'], data['world_size']
world.arr = data['world']
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
for j in data['entity']:
ent.put(j['pos_x'], j['pos_y'], j)
a = open(prefix + 'blocks.json', 'r')
blk_data = json.loads(a.read())
a.close
for key, value in blk_data.items():
if value['image'] != False:
TEXTURES[prefix + value['image']] = image(value['image'])
for key, value in blk_data.items():
if value['image'] != False:
unit = value
unit['image'] = prefix + value['image']
IDMAP[int(value['id'])] = unit
else:
IDMAP[int(value['id'])] = value
def cam(x, y):
global CAM_X, CAM_Y
CAM_X, CAM_Y = x, y
def fg(ID):
global fg_block #some "commands" for old console
fg_block = ID
def bg(ID):
global bg_block
bg_block = ID
def console(): #old console thread function
while c_loop:
com = str(input('Python >>> '))
try:
exec(com, globals())
except Exception as ex:
text = traceback.format_exc()
show_error(text = text)
def block(x, y, ID):
try:
world.put(x, y, ID)
except:
pass
def image(path): #returns pygame surface image
try:
return pygame.image.load(path)
except:
print(lang['misc']['image_failed'].format(path))
def invert(var):
if var:
return False
elif not var:
return True
def get_block(ID):
try:
return IDMAP[ID]
except:
print(lang['misc']['block_failed'].format(ID))
def get_image(ID):
try:
return TEXTURES[get_block(ID)['image']]
except:
return EMO
def exist(key, dic):
try:
dic[key]
return True
except:
return False
def get_ent_image(ID):
try:
return ENT_TEXTURES[get_entity(ID)['image']]
except:
return EMO
def get_entity(ID):
try:
return ENTMAP[ID]
except:
pass
def draw(): #main render function
x, y = 0, 0
for x in range(SCR_X):
for y in range(SCR_Y):
if CAM_X + x >= 0 and CAM_X + x < W_w and CAM_Y + y >= 0 and CAM_Y + y < W_h:
if world.get(CAM_X + x, CAM_Y + y) != setts['nodraw_id']:
map_layer.blit(pygame.transform.scale(get_image(world.get(CAM_X + x, CAM_Y + y)), (T_SIZE, T_SIZE)), (X0 + x*T_SIZE, Y0 + y*T_SIZE)) #render block texture
y += 1
x += 1
def draw_entity(): #entity render
x, y = 0, 0
for x in range(SCR_X):
for y in range(SCR_Y):
try:
unit = ent.get(CAM_X + x, CAM_Y + y)
if unit['name'] != 'ent_noname':
info_layer.blit(Consolas.render(unit['attributes']['name'], False, (255,255,255)), (X0 + x*T_SIZE, Y0 + y*T_SIZE - 20))
if unit['func'] == 'texture_resizable':
map_layer.blit(pygame.transform.scale(image(unit['attributes']['image']), (T_SIZE*unit['attributes']['size_x'], T_SIZE*unit['attributes']['size_y'])),(unit['attributes']['pad_x'] + X0 + x*T_SIZE, unit['attributes']['pad_y'] + Y0 + y*T_SIZE))
pygame.draw.rect(info_layer, (255,255,255), (X0 + x*T_SIZE, Y0 + y*T_SIZE, T_SIZE, T_SIZE), 2)
elif unit['func'] == 'npc':
map_layer.blit(pygame.transform.scale(image(unit['attributes']['skin']), (T_SIZE, T_SIZE)), (X0 + x*T_SIZE, Y0 + y*T_SIZE))
elif unit['func'] == 'snd_ambient':
pygame.draw.circle(info_layer, (0,38,255), (X0 + x*T_SIZE + T_SIZE//2, Y0 + y*T_SIZE + T_SIZE//2), unit['attributes']['radius']*T_SIZE, 2)
map_layer.blit(pygame.transform.scale(get_ent_image(ent.get(CAM_X + x, CAM_Y + y)['id']), (T_SIZE, T_SIZE)), (X0 + x*T_SIZE, Y0 + y*T_SIZE))
info_layer.blit(Consolas.render(unit['attributes']['sound_file'].split('/')[-1], False, (0, 38, 255)), (X0 + x*T_SIZE, Y0 + y*T_SIZE - 40))
elif unit['func'] == 'snd_point':
pygame.draw.circle(info_layer, (0,38,255), (X0 + x*T_SIZE + T_SIZE//2, Y0 + y*T_SIZE + T_SIZE//2), unit['attributes']['radius']*T_SIZE, 2)
map_layer.blit(pygame.transform.scale(get_ent_image(ent.get(CAM_X + x, CAM_Y + y)['id']), (T_SIZE, T_SIZE)), (X0 + x*T_SIZE, Y0 + y*T_SIZE))
info_layer.blit(Consolas.render(unit['attributes']['sound_file'].split('/')[-1], False, (0, 38, 255)), (X0 + x*T_SIZE, Y0 + y*T_SIZE - 40))
else:
map_layer.blit(pygame.transform.scale(get_ent_image(ent.get(CAM_X + x, CAM_Y + y)['id']), (T_SIZE, T_SIZE)), (X0 + x*T_SIZE, Y0 + y*T_SIZE))
except:
continue
y += 1
x += 1
def draw_blk_choose(): #function to draw blocks preview
x, y = 0, 0
for x in range(WIDTH//(T_SIZE_ORIG+5)):
for y in range(HEIGHT//(T_SIZE_ORIG+5)):
#print(x, y)
if blk_arr.get(x, y) != False:
pygame.draw.rect(screen, (50,50,50), (x*(T_SIZE_ORIG+5), 45 + y*(T_SIZE_ORIG+5), 30,30))
screen.blit(get_image(blk_arr.get(x, y)), (x*(T_SIZE_ORIG+5)+5, 45 + y*(T_SIZE_ORIG+5)+5))
if [ax, ay] == [x, y]:
pygame.draw.rect(screen, (200,200,200), (x*(T_SIZE_ORIG+5)+5, 50 + y*(T_SIZE_ORIG+5), 20,20), 2)
m_uni = get_block(m_unit)
pygame.draw.rect(screen, (70,70,70), (mx+8, my+8, len(m_uni['name'] + '({})'.format(m_uni['id']))*setts['font_size']//1.8+2, setts['font_size']+2))
name_text = Consolas.render(m_uni['name'] + '({})'.format(m_uni['id']), False, (255,255,255))
screen.blit(name_text, (mx+10, my+10))
y += 1
x += 1
def draw_ent_choose(): #function to draw entity preview
x, y = 0, 0
for x in range(WIDTH//(T_SIZE_ORIG+5)):
for y in range(HEIGHT//(T_SIZE_ORIG+5)):
if ent_arr.get(x, y)['name'] != 'ent_noname':
pygame.draw.rect(screen, (50,50,50), (x*(T_SIZE_ORIG+5), 45 + y*(T_SIZE_ORIG+5), 30,30))
screen.blit(get_ent_image(ent_arr.get(x, y)['id']), (x*(T_SIZE_ORIG+5)+5, 45 + y*(T_SIZE_ORIG+5)+5))
if [ax, ay] == [x, y]:
pygame.draw.rect(screen, (200,200,200), (x*(T_SIZE_ORIG+5)+5, 50 + y*(T_SIZE_ORIG+5), 20,20), 2)
m_uni = m_unit
pygame.draw.rect(screen, (70,70,70), (mx+8, my+8, len(m_uni['name'] + '({})'.format(m_uni['id']))*setts['font_size']//1.8+2, setts['font_size']+2))
if exist("desc", m_uni):
pygame.draw.rect(screen, (70,70,70), (mx+8, my+24, (len(m_uni['desc']) + 1.5)*setts['font_size']//1.8+2, setts['font_size']+2))
desc_text = Consolas.render(m_uni['desc'], False, (255,255,255))
screen.blit(desc_text, (mx+10, my+10+setts['font_size']))
name_text = Consolas.render(m_uni['name'] + '({})'.format(m_uni['id']), False, (255,255,255))
screen.blit(name_text, (mx+10, my+10))
y += 1
x += 1
def help_window():
root = tkinter.Tk()
root.title(lang['windows']['help'])
root.geometry('600x500')
txt = stxt.ScrolledText(root, width = 70, height = 30, wrap = 'word')
txt.place(x = 10, y = 10)
txt.insert(1.0, TXT_HELP)
txt['state'] = 'disable'
root.mainloop()
#help_window()
def property_editor(unit): #entity property editor
global ax1, ay1, ent
def appl():
for var in ENTRYS:
dic = unit
DATA[var] = globals()[var].get()
#print(DATA[var])
for key, value in DATA.items():
try:
DATA[key] = int(value)
except:
DATA[key] = str(value)
for key, value in DATA.items():
DATA1['_'.join(key.split('_')[2:])] = value
unit2 = copy.deepcopy(unit)
unit2['attributes'] = copy.deepcopy(DATA1)
for a in ENTITY:
if a['pos_x'] == unit['pos_x'] and a['pos_y'] == unit['pos_y'] and a['name'] == unit['name']:
ENTITY.remove(a)
ENTITY.append(unit2)
ent.put(ax1, ay1, unit2)
#print(ent.get(ax1,ay1)['attributes'])
root.destroy()
def default():
attrs = copy.deepcopy(ent_data[unit['name']]['attributes'])
for key, value in attrs.items():
exec('var_entry_{}.delete(0, tkinter.END)'.format(key))
exec('var_entry_{}.insert(0, value)'.format(key))
#print(unit)
dic = {}
attrs = unit['attributes']
#print(attrs)
LEN = 0
TXTS = [0]
DATA = {}
DATA1 = {}
for key, value in attrs.items():
LEN += 1
TXTS.append(len(key))
MAX = max(TXTS)
axx1, ayy1 = ax1, ay1
root = tkinter.Tk()
root.geometry('{}x{}'.format(10 + MAX*11 + 10 + 200 + 30,LEN*40 + 60))
root.title(unit['name'] + ' ({},{})'.format(axx1, ayy1))
root.resizable(width=False, height=False)
if LEN == 0:
root.geometry('300x200')
a = tkinter.Label(text = lang['misc']['ent_noattrs'])
a.place(x = 50, y = 50)
else:
ENTRYS = []
y = 20
for key, value in attrs.items():
wired = False
wired_to = {'pos_x':0, 'pos_y':0, "attr":'none'}
for wire in WIRES:
if wire[0]['pos_x'] == unit['pos_x'] and wire[0]['pos_y'] == unit['pos_y'] and wire[0]['attr'] == key:
wired = True
wired_to = wire
if not wired:
globals()['var_entry_' + key] = tkinter.Entry(width = 40)
exec('var_entry_{}.place(x={},y={})'.format(key, MAX*7 + 20, y))
exec('var_entry_{}.insert(0, value)'.format(key))
else:
globals()['var_wirelabel_{}'.format(key)] = tkinter.Label(root, fg = 'red', text='Wired to {} ({}, {})({})'.format(ent.get(wired_to[1]['pos_x'], wired_to[1]['pos_y'])['attributes']['name'], wired_to[1]['pos_x'], wired_to[1]['pos_y'], wired_to[1]['attr']))
exec('var_wirelabel_{}.place(x = {}, y = {})'.format(key, MAX*7+20, y))
globals()['var_wirelabel_{}_1'.format(key)] = tkinter.Label(root, fg = 'red', text = '({})'.format(ent.get(wired_to[1]['pos_x'], wired_to[1]['pos_y'])['attributes'][wired_to[1]['attr']]))
exec('var_wirelabel_{}_1.place(x = {}, y = {})'.format(key, MAX*7+20, y+15))
globals()['var_label_' + key] = tkinter.Label(text = key + ':')
exec('var_label_{}.place(x={},y={})'.format(key, 10, y))
ENTRYS.append('var_entry_{}'.format(key))
y += 40
if unit['attributes']['name'] == '0':
exec('var_entry_name.delete(0, tkinter.END)')
exec('var_entry_name.insert(0, "{}")'.format('entity_{}'.format(ENT_COUNTER)))
apply = tkinter.Button(root, text = 'Apply', command = appl)
apply.place(x = 10, y = y)
cancel = tkinter.Button(root, text = 'Default', command = default)
cancel.place(x = 60, y = y)
root.mainloop()
def view_vars():
print(pretty_out.box(pretty_out.listing(VARS)))
pygame.init()
pygame.mixer.init()
if setts['fullscreen']:
screen = pygame.display.set_mode((0,0),pygame.RESIZABLE)
else:
screen = pygame.display.set_mode((WIDTH, HEIGHT))
map_layer = pygame.Surface((WIDTH, HEIGHT))
pygame.display.set_caption(setts['TITLE'])
pygame.display.set_icon(image('map_editor/icon.png'))
clock = pygame.time.Clock()
Consolas = pygame.font.Font('map_editor/consolas.ttf', setts['font_size'])
info_layer = pygame.Surface((WIDTH, HEIGHT))
info_layer.set_colorkey(setts['colorkey'])
for key, value in blk_data.items():
if value['image'] != False:
TEXTURES[value['image']] = image(value['image']) #patching textures info
for key, value in ent_data.items():
if value['image'] != False:
ENT_TEXTURES[value['image']] = image(value['image'])
SCR_X = WIDTH//T_SIZE #screen size in blocks
SCR_Y = HEIGHT//T_SIZE
world = worldgen.CaveChunk(W_w, W_h, W_w*W_h*3, 3, 1, 2, 3) #generating default world
blk_arr = arr2.arr2(WIDTH//(T_SIZE_ORIG+5), HEIGHT//(T_SIZE_ORIG+5), False)
ent_arr = arr2.arr2(WIDTH//(T_SIZE_ORIG+5), HEIGHT//(T_SIZE_ORIG+5), ent_data['ent_noname'])
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
EMO = image('map_editor/missing.png') #loading emo-texture for replacing undefined images
x, y = 0, 0
for key, value in blk_data.items():
x += 1
if x == WIDTH // (T_SIZE+10):
x = 0
y += 1
if y == HEIGHT // (T_SIZE+10):
y = 0
break
blk_arr.put(x, y, value['id']) #loading entities and blocks to 2d arrays for preview menus
x, y = 0, 0
for key, value in ent_data.items():
x += 1
if x == WIDTH // (T_SIZE+10):
x = 0
y += 1
if y == HEIGHT // (T_SIZE+10):
y = 0
break
ent_arr.put(x, y, value)
running = True
c_loop = True
fg_drawing = False
bg_drawing = False
blk_choose = False #some state variables
ent_choose = False
tool_choose = False
full_view = False
fg_block = 3
bg_block = 2
brackets = image('map_editor/block_br_black.png')
brackets_wrong = image('map_editor/wrong.png')
ax, ay = CAM_X, CAM_Y
ax1, ay1 = ax, ay
mx, my = 0, 0
MOVE = ''
move_right = False
move_left = False #variables to describe camera movement
move_up = False
move_down = False
WMOD = False
LINE_POS1 = [0,0]
LINE_POS2 = [0,0]
line_c = 0
RECT_POS1 = [0,0]
RECT_POS2 = [0,0]
rect_c = 0
C_CENTER = [0,0] #variables to describe primitives drawing (circle, rect and line)
C_RADIUS = 0
C_WIDTH = 1
c_c = 0
C_POS_ORIG = []
T_SIZE_1 = 20
SCR_SCALE_1 = []
move_counter = 0
move_speed = setts['move_speed']
m_unit = 0
fg_ent = False
VARS = {}
ON_WIRE = False
TEST_ARR = [[4,4,4,4],
[4,2,2,4],
[4,2,2,4],
[4,4,4,4]]
full_counter = 0
map_rect = pygame.rect.Rect((0,0, SCR_X*T_SIZE, SCR_Y*T_SIZE))
map_r = pygame.rect.Rect((0,0,WIDTH,HEIGHT-T_SIZE_ORIG*2))
#threading.Thread(target=console).start()
if len(args) > 1:
if os.path.isfile(args[1]):
load_map_by_file(args[1])
#T_SIZE = WIDTH//W_w
#full_view = True
def comp(): #compilation window
def f():
global NAM
NAM = a.get()
root.destroy()
compile_map(NAM)
NAM = 'noname'
root = tkinter.Tk()
root.geometry("300x60")
root.title(lang['windows']['compiler'])
root.resizable(width=False, height=False)
a = tkinter.Entry(root)
a.place(x = 20, y = 21)
b = tkinter.Label(root, text = lang['gui']['map_name'])
b.place(x = 20, y = 0)
c = tkinter.Button(root, command = f, text = lang['gui']['compile'])
c.place(x = 150, y = 20)
root.mainloop()
def set_wire_mode():
global WMOD
if WMOD:
WMOD = False
elif not WMOD:
WMOD = True
def wire_editor(unit): #wiring menu
global ON_WIRE, ax1, ay1
def on_wire(unit, key):
global ON_WIRE
#print(key) #wiring first entity(input)
ON_WIRE = copy.deepcopy({'pos_x':ax1, 'pos_y':ay1, 'attr':key})
root.destroy()
def on_wire2(unit, key):
global ON_WIRE
#print(key) #wiring second entity(output)
WIRES.append([ON_WIRE, {'pos_x':ax1, 'pos_y':ay1, 'attr':key}])
ON_WIRE = False
root.destroy()
def unwire(wired_to):
WIRES.remove(wired_to)
root.destroy()
if ON_WIRE == False:
dic = {}
attrs = unit['attributes']
#print(attrs)
LEN = 0
TXTS = [0]
DATA = {}
DATA1 = {}
for key, value in attrs.items():
LEN += 1
TXTS.append(len(key))
#print(TXTS)
MAX = max(TXTS)
axx1, ayy1 = ax1, ay1
root = tkinter.Tk()
root.geometry('{}x{}'.format(10 + MAX*11 + 10 + 200 + 30,LEN*40+30))
root.title(unit['name'] + ' ({},{})'.format(axx1, ayy1))
root.resizable(width=False, height=False)
if LEN == 0: #if entitys attributes list is empty, show this message
root.geometry('300x200')
a = tkinter.Label(text = lang['misc']['ent_noattrs'])
a.place(x = 50, y = 50)
else:
BUTTONS = []
y = 20
for key, value in attrs.items():
wired = False
wired_to = {'pos_x':0, 'pos_y':0, "attr":'none'}
for wire in WIRES:
if wire[0]['pos_x'] == unit['pos_x'] and wire[0]['pos_y'] == unit['pos_y'] and wire[0]['attr'] == key:
wired = True
wired_to = wire
if not wired:
globals()['var_button_' + key] = tkinter.Button(width = 30, text = 'Wire (input)',command = partial(on_wire, unit, key))
exec('var_button_{}.place(x={},y={})'.format(key, MAX*7 + 20, y))
else:
globals()['var_button_' + key + '_1'] = tkinter.Button(width = 30,fg = 'red', text = 'Unwire',command = partial(unwire, wired_to))
exec('var_button_{}_1.place(x={},y={})'.format(key, MAX*7 + 20, y))
globals()['var_label_' + key] = tkinter.Label(text = key)
exec('var_label_{}.place(x={},y={})'.format(key, 10, y))
BUTTONS.append('var_button_{}'.format(key))
y += 40
root.mainloop()
else:
dic = {}
attrs = unit['attributes']
#print(attrs)
LEN = 0
TXTS = [0]
DATA = {}
DATA1 = {}
for key, value in attrs.items():
LEN += 1
TXTS.append(len(key))
#print(TXTS)
MAX = max(TXTS)
axx1, ayy1 = ax1, ay1
root = tkinter.Tk()
root.geometry('{}x{}'.format(10 + MAX*11 + 10 + 200 + 30,LEN*40+30))
root.title(unit['name'] + ' ({},{})'.format(axx1, ayy1))
root.resizable(width=False, height=False)
if LEN == 0:
root.geometry('300x200')
a = tkinter.Label(text = lang['gui']['ent_noattrs'])
a.place(x = 50, y = 50)
else:
BUTTONS = []
y = 20
for key, value in attrs.items():
wired = False
wired_to = {'pos_x':0, 'pos_y':0, "attr":'none'}
for wire in WIRES:
if wire[0]['pos_x'] == unit['pos_x'] and wire[0]['pos_y'] == unit['pos_y'] and wire[0]['attr'] == key:
wired = True
wired_to = wire
if not wired:
globals()['var_button_' + key] = tkinter.Button(width = 30, text = 'Wire (output)',command = partial(on_wire2, unit, key))
exec('var_button_{}.place(x={},y={})'.format(key, MAX*7 + 20, y))
else:
globals()['var_button_' + key + '_1'] = tkinter.Button(width = 30, fg = 'red',text = 'Unwire',command = partial(unwire, wired_to))
exec('var_button_{}_1.place(x={},y={})'.format(key, MAX*7 + 20, y))
globals()['var_label_' + key] = tkinter.Label(text = key)
exec('var_label_{}.place(x={},y={})'.format(key, 10, y))
BUTTONS.append('var_button_{}'.format(key))
y += 40
root.mainloop()
def map_menu(): #generator properties window
global world
def f():
global world, W_w, W_h, ent
blk = list(map(int, ent_grass.get().split(',')))
world = worldgen.CaveChunk(int(ent_size.get()), int(ent_size.get()), int(ent_moves.get()), blk[0], blk[1], blk[2], int(ent_smt.get()))
W_w = int(ent_size.get())
W_h = int(ent_size.get())
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
root.destroy()
def g():
global world, W_w, W_h, ent
blk = list(map(int, ent_grass2.get().split(',')))
world = worldgen.TunnelChunk(int(ent_size2.get()), int(ent_size2.get()), int(ent_moves2.get()), int(ent_tun.get()), blk[0], blk[1], blk[2], int(ent_smt2.get()))
W_w = int(ent_size2.get())
W_h = int(ent_size2.get())
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
root.destroy()
def h():
global world, W_w, W_h, ent
world = arr2.arr2(int(ent_size3.get()), int(ent_size3.get()), int(ent_grass3.get()))
W_w = int(ent_size3.get())
W_h = int(ent_size3.get())
ent = arr2.arr2(W_w, W_h, ent_data['ent_noname'])
root.destroy()
root = tkinter.Tk()
root.geometry("300x230")
root.title(lang['windows']['generator'])
root.resizable(width=False, height=False)
tabs = ttk.Notebook(root)
tab_cave = ttk.Frame(tabs)
tabs.add(tab_cave, text = lang['generator']['cavechunk'])
tab_tunnel = ttk.Frame(tabs)
tabs.add(tab_tunnel, text=lang['generator']['tunnelchunk'])
tab_flat = ttk.Frame(tabs)
tabs.add(tab_flat, text=lang['generator']['flat'])
tabs.pack(expand=1, fill="both")
l_size = tkinter.Label(tab_cave, text = lang['generator']['world_size'])
l_size.place(x = 20, y = 10)
ent_size = tkinter.Entry(tab_cave)
ent_size.place(x = 130, y = 10)
l_moves = tkinter.Label(tab_cave, text = lang['generator']['gen_moves'])
l_moves.place(x = 20, y = 40)
ent_moves = tkinter.Entry(tab_cave)
ent_moves.place(x = 130, y = 40)
l_grass = tkinter.Label(tab_cave, text = lang['generator']['gen_blocks'])
l_grass.place(x = 20, y = 70)
ent_grass = tkinter.Entry(tab_cave)
ent_grass.place(x = 130, y = 70)
l_smt= tkinter.Label(tab_cave, text = lang['generator']['smooth'])
l_smt.place(x = 20, y = 100)
ent_smt = tkinter.Entry(tab_cave)
ent_smt.place(x = 130, y = 100)
gen = tkinter.Button(tab_cave, text = lang['generator']['generate'], command = f) #DONT LOOK HERE
gen.place(x = 20, y = 130)
l_size2 = tkinter.Label(tab_tunnel, text = lang['generator']['world_size']) #THIS CODE DOES NOT EXIST
l_size2.place(x = 20, y = 10)
ent_size2 = tkinter.Entry(tab_tunnel)
ent_size2.place(x = 130, y = 10)
l_moves2 = tkinter.Label(tab_tunnel, text = lang['generator']['gen_moves'])
l_moves2.place(x = 20, y = 40)
ent_moves2 = tkinter.Entry(tab_tunnel)
ent_moves2.place(x = 130, y = 40)
l_grass2 = tkinter.Label(tab_tunnel, text = lang['generator']['gen_blocks'])
l_grass2.place(x = 20, y = 70)
ent_grass2 = tkinter.Entry(tab_tunnel)
ent_grass2.place(x = 130, y = 70)
l_tun= tkinter.Label(tab_tunnel, text = lang['generator']['tunnel_num'])
l_tun.place(x = 20, y = 100)
ent_tun = tkinter.Entry(tab_tunnel)
ent_tun.place(x = 130, y = 100)
l_smt2= tkinter.Label(tab_tunnel, text = lang['generator']['smooth'])
l_smt2.place(x = 20, y = 130)
ent_smt2 = tkinter.Entry(tab_tunnel)
ent_smt2.place(x = 130, y = 130)
gen2 = tkinter.Button(tab_tunnel, text = lang['generator']['generate'], command = g)
gen2.place(x = 20, y = 160)
l_size3 = tkinter.Label(tab_flat, text = lang['generator']['world_size'])
l_size3.place(x = 20, y = 10)
ent_size3 = tkinter.Entry(tab_flat)
ent_size3.place(x = 130, y = 10)
l_grass3 = tkinter.Label(tab_flat, text = lang['generator']['fill_block'])
l_grass3.place(x = 20, y = 40)
ent_grass3 = tkinter.Entry(tab_flat)
ent_grass3.place(x = 130, y = 40)
gen3 = tkinter.Button(tab_flat, text = lang['generator']['generate'], command = h)
gen3.place(x = 20, y = 70)
root.mainloop()
gui_layer = pygame.Surface((WIDTH, HEIGHT))
gui_layer.set_colorkey((69,69,69))
gui = GUI(gui_layer, 60, screen)
gui.colorkey = (69,69,69)
gui.draw.button(1, pos = (20,HEIGHT-30), scale=(100,20), size = 15, text = lang['gui']['load_map'], border_width = 2, function = load_map)
gui.draw.button(2, pos = (140,HEIGHT-30), scale=(100,20), size=15, text=lang['gui']['compile'], border_width=2, function = comp)
gui.draw.button(3, pos = (260,HEIGHT-30), scale=(100,20), size=15, text=lang['gui']['worldgen'], border_width=2, function = map_menu)
gui.draw.button(4, pos = (380, HEIGHT-30), scale=(100,20), size=15, text=lang['gui']['wiremode'], border_width=2, function= set_wire_mode)
while running:
clock.tick(setts['FPS'])
move_counter += 1
screen.fill((0,0,0))
info_layer.fill(setts['colorkey'])
SCR_X = WIDTH//T_SIZE
SCR_Y = HEIGHT//T_SIZE - 2 #recalculating this variables to allow dynamic resizing
map_rect_ = pygame.Rect((0,0,SCR_X*T_SIZE,SCR_Y*T_SIZE))
blk_prefix = get_block(fg_block)['name'].split('_')[0]
if WMOD:
gui.units[4]['fg'] = (255,0,0) #coloring "wiring mode" button
else:
gui.units[4]['fg'] = (0,0,0)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEMOTION:
if map_rect.collidepoint(event.pos):
mx, my = event.pos #defining mouse position
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_a:
move_left = True
elif event.key == pygame.K_d:
move_right = True
elif event.key == pygame.K_w: #camera movement
move_up = True
elif event.key == pygame.K_s:
move_down = True
if event.key == pygame.K_b:
blk_choose = invert(blk_choose)
if event.key == pygame.K_m:
full_view = invert(full_view)
if full_view:
C_POS_ORIG = [CAM_X, CAM_Y]
T_SIZE_1 = T_SIZE
CAM_X, CAM_Y = 0, 0
T_SIZE = min([HEIGHT//W_h, WIDTH//W_w]) #global map mode
SCR_SCALE_1 = [SCR_X, SCR_Y]
if T_SIZE == 0:
T_SIZE += 1
draw()
draw_entity()
elif not full_view:
CAM_X, CAM_Y = C_POS_ORIG
T_SIZE = T_SIZE_1
if event.key == pygame.K_l:
line_c += 1
if line_c == 1:
LINE_POS1 = [ax1, ay1]
elif line_c == 2:
line_c = 0
LINE_POS2 = [ax1, ay1]
points = point_engine.get_line(LINE_POS1[0], LINE_POS1[1], LINE_POS2[0], LINE_POS2[1]) #line
for a in points:
try:
if a[0] >= 0 and a[0] < W_w and a[1] >= 0 and a[1] < W_h:
world.put(a[0], a[1], fg_block)
except:
pass
if event.key == pygame.K_r:
rect_c += 1
if rect_c == 1:
RECT_POS1 = [ax1, ay1]
elif rect_c == 2:
rect_c = 0
RECT_POS2 = [ax1, ay1]
points = point_engine.get_rect(RECT_POS1[0], RECT_POS1[1], RECT_POS2[0], RECT_POS2[1]) #rect
for a in points:
try:
if a[0] >= 0 and a[0] < W_w and a[1] >= 0 and a[1] < W_h:
world.put(a[0], a[1], fg_block)
except:
pass
if event.key == pygame.K_c:
c_c += 1
if c_c == 1:
C_CENTER = [ax1, ay1]
elif c_c == 2:
C_RADIUS = point_engine.way(C_CENTER, (ax1, ay1))
c_c = 0
points = point_engine.get_hollow_circle(C_CENTER[0], C_CENTER[1], C_RADIUS, C_WIDTH) #circle
for a in points:
try:
if a[0] >= 0 and a[0] < W_w and a[1] >= 0 and a[1] < W_h:
world.put(int(a[0]), int(a[1]), fg_block)
except:
pass
if event.key == pygame.K_e:
ent_choose = invert(ent_choose) #switching to entity list
if event.key == pygame.K_DOWN:
fg_block, bg_block = bg_block, fg_block #swapping blocks
if event.key == pygame.K_p:
if ent.get(ax1, ay1)['name'] != 'ent_noname':
try:
if fg_ent['attributes'] == {}:
print()
print(pretty_out.box(pretty_out.listing(ent.get(ax1, ay1)))) #block or entity info output
else:
dic = copy.deepcopy(ent.get(ax1, ay1))
del dic['attributes']
print()
print(pretty_out.box(pretty_out.listing(dic)))
print(lang['misc']['attributes'])
print(pretty_out.box(pretty_out.listing(ent.get(ax1, ay1)['attributes'])))
except:
pass
else:
try:
print()
print(pretty_out.box(pretty_out.listing(get_block(m_unit))))
except:
pass
if event.key == pygame.K_j:
TEST_ARR = worldgen.room(30, 20, walls = [{12:100}, {10:100}, {11:100}, {9:100}], floor = [{8:50}, {13:1}]) #broken room generator
if event.key == pygame.K_INSERT:
world.paste(ax1, ay1, TEST_ARR)
if event.type == pygame.KEYUP:
if event.key == pygame.K_a:
move_left = False
if event.key == pygame.K_d:
move_right = False
if event.key == pygame.K_w: #camera movement
move_up = False
if event.key == pygame.K_s:
move_down = False
if event.type == pygame.MOUSEBUTTONDOWN:
#print(fg_ent)
if event.button == 1 and map_r.collidepoint(event.pos):
if blk_choose:
fg_block = m_unit
fg_ent = False
blk_choose = False
elif ent_choose:
fg_ent = copy.deepcopy(ent_data[m_unit['name']])
ent_choose = False
elif tool_choose:
pass
elif full_view:
C_POS_ORIG = ax, ay
else:
if fg_ent == False:
fg_drawing = True
else:
if ax1 >= 0 and ax1 < W_w and ay1 >= 0 and ay1 < W_h:
ENT_COUNTER += 1
enti = copy.deepcopy(ent_data[fg_ent['name']])
enti['pos_x'] = ax1
enti['pos_y'] = ay1
ent.put(ax1, ay1, enti)
if event.button == 2 and map_r.collidepoint(event.pos):
try:
if ent.get(ax1, ay1)['name'] == 'ent_noname':
fg_block = m_unit
else:
if not WMOD:
property_editor(ent.get(ax1, ay1))
else:
wire_editor(ent.get(ax1, ay1))
#print(WIRES)
except:
pass
if event.button == 3 and map_r.collidepoint(event.pos):
if blk_choose:
bg_block = m_unit
blk_choose = False
elif ent_choose:
pass
elif tool_choose:
pass
elif ON_WIRE != False:
ON_WIRE = False
else:
if fg_ent == False:
bg_drawing = True
else:
if ax1 >= 0 and ax1 < W_w and ay1 >= 0 and ay1 < W_h:
ent.put(ax1, ay1, ent_data['ent_noname'])
if event.button == 4 and map_r.collidepoint(event.pos):
if c_c == 1:
C_WIDTH += 1
else:
if T_SIZE > 1:
T_SIZE -= 1
#X0 = (WIDTH - T_SIZE*SCR_X)
#Y0 = (HEIGHT - T_SIZE*SCR_Y)
if event.button == 5 and map_r.collidepoint(event.pos):
if c_c == 1:
C_WIDTH -= 1
else:
T_SIZE += 1
#X0 = (WIDTH - T_SIZE*SCR_X)
#Y0 = (HEIGHT - T_SIZE*SCR_Y)
if event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
fg_drawing = False
if event.button == 3:
bg_drawing = False
if blk_choose:
ax, ay = (mx)//(T_SIZE_ORIG+5), (my-50)//(T_SIZE_ORIG+5) #some maths
m_unit = blk_arr.get(ax, ay)
pygame.draw.rect(screen, (255,255,255), (ax,ay,30,30), 2)
elif ent_choose:
ax, ay = (mx)//(T_SIZE_ORIG+5), (my-50)//(T_SIZE_ORIG+5)
m_unit = ent_arr.get(ax, ay)
pygame.draw.rect(screen, (255,255,255), (ax,ay,30,30), 2)
elif tool_choose:
pass
else:
ax, ay = mx//T_SIZE, my//T_SIZE
ax1, ay1 = CAM_X + mx//T_SIZE, CAM_Y + my//T_SIZE
try:
m_unit = world.get(ax1, ay1)
except:
pass
if move_counter*round(T_SIZE_ORIG/T_SIZE, 2) >= move_speed:
move_counter = 0
if move_up:
CAM_Y -= 1
if move_down:#camera movement
CAM_Y += 1
if move_right:
CAM_X += 1
if move_left:
CAM_X -= 1
if fg_drawing:
try:
if ax1 >= 0 and ax1 < W_w and ay1 >= 0 and ay1 < W_h:
world.put(ax1, ay1, fg_block)
except:
pass
if bg_drawing:
try:
if ax1 >= 0 and ax1 < W_w and ay1 >= 0 and ay1 < W_h:
world.put(ax1, ay1, bg_block)
except:
pass
if blk_choose:
screen.fill((128,128,128))
pygame.draw.rect(screen, (64,64,64), (0,0,WIDTH,40))
screen.blit(Consolas.render(lang['misc']['blk_choose'], False, (255,255,255)), (20,15))
draw_blk_choose()
elif ent_choose:
screen.fill((128,128,128))
pygame.draw.rect(screen, (64,64,64), (0,0,WIDTH,40))
screen.blit(Consolas.render(lang['misc']['ent_choose'], False, (255,255,255)), (20,15))
draw_ent_choose()
elif tool_choose:
pass
else:
map_layer.fill((50,50,50))
draw()
draw_entity()
pygame.draw.rect(info_layer, (50,50,50), (WIDTH - 50 - 10, HEIGHT - (HEIGHT-50-10), 45, 35))
info_layer.blit(get_image(bg_block), (WIDTH - 40, HEIGHT - (HEIGHT-70)))
if full_view:
pygame.draw.rect(info_layer, (255,0,0), (C_POS_ORIG[0]*T_SIZE, C_POS_ORIG[1]*T_SIZE, SCR_SCALE_1[0]*T_SIZE, SCR_SCALE_1[1]*T_SIZE), 2)
if bg_drawing:
if not blk_choose and not ent_choose and not tool_choose:
pygame.draw.rect(info_layer, (200,200,200), (WIDTH - 40, HEIGHT - (HEIGHT-70), T_SIZE_ORIG, T_SIZE_ORIG), 2)
info_layer.blit(get_image(fg_block), (WIDTH - 55, HEIGHT - (HEIGHT-65)))
if fg_drawing:
if not blk_choose and not ent_choose and not tool_choose:
pygame.draw.rect(info_layer, (200,200,200), (WIDTH - 55, HEIGHT - (HEIGHT-65), T_SIZE_ORIG, T_SIZE_ORIG), 2)
pygame.draw.rect(info_layer, (30,30,30), (WIDTH - 50 - 10, HEIGHT - (HEIGHT-50-10), 45, 35), 2)
cx, cy = CAM_X + SCR_X//2, CAM_Y + SCR_Y//2
scal = round(T_SIZE_ORIG/T_SIZE, 2)
info_layer.blit(Consolas.render(lang['gui']['cam_pos'].format(cx, cy), False, (255,255,255)), (20,20))
info_layer.blit(Consolas.render(lang['gui']['map_size'].format(W_w, W_h), False, (255,255,255)), (20,35))
info_layer.blit(Consolas.render(lang['gui']['scale'].format(scal), False, (255,255,255)), (20,50))
if fg_ent != False:
info_layer.blit(Consolas.render(lang['gui']['ent_mode'], False, (255,0,0)), (WIDTH//2,35))
if full_view != False:
info_layer.blit(Consolas.render(lang['gui']['full_view'], False, (255,0,0)), (WIDTH//3,35))
if line_c == 1:
pygame.draw.rect(info_layer, (255,0,0), ((LINE_POS1[0] - CAM_X)*T_SIZE, (LINE_POS1[1] - CAM_Y)*T_SIZE, T_SIZE, T_SIZE), 3)
pygame.draw.line(info_layer, (255,255,255), ((LINE_POS1[0] - CAM_X)*T_SIZE + T_SIZE//2, (LINE_POS1[1] - CAM_Y)*T_SIZE + T_SIZE//2),
((ax)*T_SIZE + T_SIZE//2, (ay)*T_SIZE + T_SIZE//2), 2)
pygame.draw.rect(info_layer, (255,0,0), ((ax)*T_SIZE, (ay)*T_SIZE, T_SIZE, T_SIZE), 3)
if rect_c == 1:
delta_x = abs(ax1 - RECT_POS1[0])
delta_y = abs(ay1 - RECT_POS1[1])
x1, y1 = RECT_POS1
x2, y2 = ax1, ay1
tx, ty = (ax1-CAM_X)*T_SIZE, (ay1-CAM_Y)*T_SIZE
if x2 > x1 and y2 > y1:
x1, y1, x2, y2 = x1, y1, x2+1, y2+1
tx, ty = (ax1-CAM_X+1)*T_SIZE, (ay1-CAM_Y+1)*T_SIZE
if x2 > x1 and y2 < y1:
x1, y1, x2, y2 = x1, y2, x2+1, y1
tx, ty = (ax1-CAM_X+1)*T_SIZE, (ay1-CAM_Y)*T_SIZE
if x2 < x1 and y2 < y1:
x1, y1, x2, y2 = x2, y2, x1, y1
tx, ty = (ax1-CAM_X)*T_SIZE, (ay1-CAM_Y-1)*T_SIZE
if y2 > y1 and x2 < x1:
x1, y1, x2, y2 = x2, y1, x1, y2+1
tx, ty = (ax1-CAM_X+1)*T_SIZE, (ay1-CAM_Y-1)*T_SIZE
pygame.draw.rect(info_layer, (255,0,0), ((x1-CAM_X)*T_SIZE, (y1-CAM_Y)*T_SIZE, (x2-x1)*T_SIZE, (y2-y1)*T_SIZE), 3)
info_layer.blit(Consolas.render(f'{delta_x+1},{delta_y+1}', False, (255,0,0)), (tx, ty))
if c_c == 1:
pygame.draw.circle(info_layer, (255,0,0), ((C_CENTER[0] - CAM_X)*T_SIZE, (C_CENTER[1] - CAM_Y)*T_SIZE), 4, 2)
pygame.draw.circle(info_layer, (255,0,0), ((C_CENTER[0] - CAM_X)*T_SIZE, (C_CENTER[1] - CAM_Y)*T_SIZE), C_RADIUS*T_SIZE, 2)
pygame.draw.circle(info_layer, (255,0,0), ((C_CENTER[0] - CAM_X)*T_SIZE, (C_CENTER[1] - CAM_Y)*T_SIZE), (C_RADIUS - C_WIDTH)*T_SIZE, 2)
if ON_WIRE != False:
pygame.draw.line(info_layer, (255,0,0), ((ON_WIRE['pos_x'] - CAM_X)*T_SIZE + T_SIZE//2, (ON_WIRE['pos_y'] - CAM_Y)*T_SIZE + T_SIZE//2), (ax*T_SIZE+T_SIZE//2, ay*T_SIZE+T_SIZE//2), 2)
for wire in WIRES:
pos1 = ((wire[0]['pos_x'] - CAM_X)*T_SIZE + T_SIZE//2, (wire[0]['pos_y'] - CAM_Y)*T_SIZE + T_SIZE//2)
pos2 = ((wire[1]['pos_x'] - CAM_X)*T_SIZE + T_SIZE//2, (wire[1]['pos_y'] - CAM_Y)*T_SIZE + T_SIZE//2)
pygame.draw.line(info_layer, (200,0,0), pos1, pos2, 2)
pygame.draw.circle(info_layer, (200,0,0), pos1, 3)
pygame.draw.circle(info_layer, (200,0,0), pos2, 3)
if ent.get(wire[0]['pos_x'], wire[0]['pos_y'])['name'] == 'ent_noname' or ent.get(wire[1]['pos_x'], wire[1]['pos_y'])['name'] == 'ent_noname':
WIRES.remove(wire) #check if one of wired entitys invalid, then remove the wire
continue
else:
ent_in = copy.deepcopy(ent.get(wire[0]['pos_x'], wire[0]['pos_y']))
ent_out = copy.deepcopy(ent.get(wire[1]['pos_x'], wire[1]['pos_y']))
ent_in['attributes'][wire[0]['attr']] = ent_out['attributes'][wire[1]['attr']]
ent.put(wire[0]['pos_x'], wire[0]['pos_y'], ent_in)
#pygame.draw.rect(info_layer, (255,255,255), (ax*T_SIZE, ay*T_SIZE, get_image(fg_block).get_width(), get_image(fg_block).get_height()), 1)
screen.blit(map_layer, (0,0))
if ax1 >= 0 and ax1 < W_w and ay1 >= 0 and ay1 < W_h:
screen.blit(pygame.transform.scale(brackets, (T_SIZE, T_SIZE)), (ax*T_SIZE, ay*T_SIZE))
else:
screen.blit(pygame.transform.scale(brackets_wrong, (T_SIZE, T_SIZE)), (ax*T_SIZE, ay*T_SIZE))
pygame.draw.rect(info_layer, (255,0,0), ((0-CAM_X)*T_SIZE, (0-CAM_Y)*T_SIZE, W_w*T_SIZE, W_h*T_SIZE), 2)
screen.blit(info_layer, (0,0))
pygame.draw.rect(screen, (64,64,64), (0, HEIGHT-40, WIDTH, 40))
gui.render()
C_RADIUS = point_engine.way(C_CENTER, (ax1, ay1))
map_rect.bottomright = (SCR_X*T_SIZE, SCR_Y*T_SIZE)
pygame.display.flip()
pygame.quit()
|
# Copyright 2018 Michael DeHaan LLC, <michael@michaeldehaan.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opsmop.core.field import Field
from opsmop.core.fields import Fields
from opsmop.facts.platform import Platform
from opsmop.types.type import Type
from opsmop.core.errors import ValidationError, NoSuchProviderError
class Package(Type):
def __init__(self, name=None, **kwargs):
self.setup(name=name, **kwargs)
def fields(self):
return Fields(
self,
name = Field(kind=str, help="the name of the package to install"),
version = Field(kind=str, default=None, help="what version to install"),
latest = Field(kind=bool, default=False, help="if true, upgrade the package regardless of version"),
absent = Field(kind=bool, default=False, help="if true, remove the package"),
update_cache = Field(kind=bool, default=False, help="if true, update the package cache")
)
def validate(self):
# FIXME: latest and absent are incompatible, as are version and absent
pass
def get_provider(self, method):
if method == 'brew':
from opsmop.providers.package.brew import Brew
return Brew
elif method == 'yum':
from opsmop.providers.package.yum import Yum
return Yum
elif method == 'apt':
from opsmop.providers.package.apt import Apt
return Apt
raise NoSuchProviderError(self, method)
def default_provider(self):
return Platform.default_package_manager()
|
# Author: Jochen Gast <jochen.gast@visinf.tu-darmstadt.de>
import os
import numpy as np
import torch
from utils.matlab import load, save
from . import types
def default_collate(inputs):
if np.isscalar(inputs):
return inputs
else:
return {key: default_collate([d[key] for d in inputs]) for key in inputs[0]}
class MATRecordWriter(types.RecordWriter):
def __init__(self, args, root):
super().__init__()
self.args = args
self.root = root
if not os.path.exists(root):
os.makedirs(root)
def handle_image(self, record):
record = types.record2list(record)
batch_size = record.data.size(0)
np_data = types.tensor2numpy(record.data)
for i in range(batch_size):
filename = "{}/{}/epoch-{:03d}/{}.mat".format(
self.root, record.dataset, record.epoch, record.example_basename[i])
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
if os.path.isfile(filename):
raise ValueError("Error in MATRecordWriter. '%s' already exists." % filename)
np_image = np_data[i, ...].squeeze()
_save_mat(filename, vardict={"data": np_image})
def handle_scalar_dict(self, record):
filename = "%s/%s_%s.mat" % (self.root, record.dataset, record.example_basename)
dict_of_values = dict(record.data)
dict_of_values["epoch"] = record.epoch
if record.step is not None:
dict_of_values["step"] = record.step
if record.example_index is not None:
dict_of_values["example_index"] = record.example_index
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
if not os.path.isfile(filename):
_save_mat(filename, vardict=dict_of_values)
else:
old_record = _load_mat(filename)
new_record = {key: value for key, value in old_record.items()
if key in dict_of_values.keys()}
for key, value in dict_of_values.items():
if np.isscalar(old_record[key]):
new_record[key] = [old_record[key], value]
else:
new_record[key] = np.append(old_record[key], value)
_save_mat(filename, new_record)
def handle_record(self, record):
if isinstance(record, types.ImageRecord):
return self.handle_image(record)
elif isinstance(record, types.ScalarDictRecord):
return self.handle_scalar_dict(record)
class MATExhaustiveRecordWriter(types.RecordWriter):
def __init__(self, args, root):
self.args = args
self.root = root
if not os.path.exists(root):
os.makedirs(root)
def handle_record(self, record):
if isinstance(record, types.DictionaryRecord):
return self.handle_dictionary(record)
def handle_dictionary(self, record):
example_dict = record.example_dict
output_dict = record.output_dict
filename = "%s/%s_exhaustive.mat" % (self.root, record.dataset)
def _convert_to_numpy(x):
if isinstance(x, torch.autograd.Variable) or isinstance(x, torch.Tensor) or isinstance(x, torch.LongTensor):
return types.tensor2numpy(x)
return x
dict_of_values = {}
for key, value in example_dict.items():
dict_of_values[key] = _convert_to_numpy(value)
for key, value in output_dict.items():
dict_of_values[key] = _convert_to_numpy(value)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
if not os.path.isfile(filename):
save(filename, vardict=dict_of_values)
else:
old_record = load(filename)
new_record = {key: value for key, value in old_record.items()
if key in dict_of_values.keys()}
for key, value in dict_of_values.items():
if np.isscalar(old_record[key]):
new_record[key] = [old_record[key], value]
else:
if isinstance(value, list):
new_record[key] = np.append(old_record[key], value)
elif isinstance(value, np.ndarray) and value.ndim == 1:
new_record[key] = np.append(old_record[key], value)
else:
new_record[key] = np.concatenate((old_record[key], value.squeeze()), axis=0)
save(filename, new_record)
|
"""BB2 api agent modul"""
import requests
from .adapter import adapter
class BB2APINotAvailable(Exception):
"""Exception to raise for BB2API timeout issues"""
class Agent:
DEFAULT_VERSION = 2
DEFAULT_PLATFORM = 'pc'
"""BB2 api agent"""
BASE_URL = "http://web.cyanide-studio.com/ws/bb2/"
def __init__(self, api_key):
self.api_key = api_key
http = requests.Session()
http.mount("https://", adapter)
http.mount("http://", adapter)
self.http = http
def team(self, name, **kwargs):
"""Pulls team data"""
# stats: 0|1
kwargs['team'] = name
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
#if 'bb' not in kwargs:
#kwargs['bb'] = self.DEFAULT_VERSION
r = self.call("team", **kwargs)
data = r.json()
return data
def match(self, id, **kwargs):
"""Pulls match id data"""
kwargs['match_id'] = id
#kwargs['id'] = id
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
r = self.call("match", **kwargs)
data = r.json()
return data
def league(self, name, **kwargs):
"""Pulls league data
@param league: League name or id.
"""
kwargs['league'] = name
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
r = self.call("league", **kwargs)
data = r.json()
return data
def leagues(self, name, **kwargs):
"""Pulls leagues data"""
kwargs['league'] = name
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
if 'teams' not in kwargs:
kwargs['teams'] = 0 # min no. registered teams
if 'limit' not in kwargs:
kwargs['limit'] = 100 # max no. leagues to return
r = self.call("leagues", **kwargs)
data = r.json()
return data
def competitions(self, leagues):
"""Pulls competitions data"""
r = self.call("competitions", league=leagues)
data = r.json()
return data
def contests(self, league, **kwargs):
"""Pulls contests data"""
#competition: %
#status: scheduled, in_progress, played
#round:
kwargs['league'] = league
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
if 'limit' not in kwargs:
kwargs['limit'] = 100 # max no. leagues to return
if 'exact' not in kwargs:
kwargs['exact'] = 0
r = self.call("contests", **kwargs)
data = r.json()
return data
def matches(self, league, **kwargs):
"""Pull matches"""
kwargs['league'] = league
if 'limit' not in kwargs:
kwargs['limit'] = 10000
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
if 'exact' not in kwargs:
kwargs['exact'] = 0
if 'start' not in kwargs:
kwargs['start'] = '2016-01-01'
r = self.call("matches", **kwargs)
data = r.json()
return data
def teammatches(self, team_id, **kwargs):
"""Pull matches for one team NB: This does not work! """
kwargs['team_id'] = team_id
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'limit' not in kwargs:
kwargs['limit'] = 10000
if 'bb' not in kwargs:
kwargs['bb'] = self.DEFAULT_VERSION
if 'start' not in kwargs:
kwargs['start'] = '2016-01-01'
#if 'order' not in kwargs:
#kwargs['order'] = 'started'
r = self.call("matches", **kwargs)
data = r.json()
return data
def player(self, id, **kwargs):
"""Pull player"""
kwargs['player'] = id
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
r = self.call("player", **kwargs)
data = r.json()
return data
def ladder(self, league, competition, **kwargs):
"""Pull ladder"""
kwargs['league'] = league #name
kwargs['competition'] = competition #name
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
r = self.call("ladder", **kwargs)
data = r.json()
return data
def teams(self, league, **kwargs):
"""Pull teams """
kwargs['league'] = league #name, can also add competition
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'limit' not in kwargs:
kwargs['limit'] = 10000
if 'sensitive' not in kwargs:
kwargs['sensitive'] = 1
r = self.call("teams", **kwargs)
data = r.json()
return data
def halloffame(self, league, **kwargs):
"""Pull teams """
kwargs['league'] = league #name, can also add competition
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'limit' not in kwargs:
kwargs['limit'] = 10000
if 'exact' not in kwargs:
kwargs['exact'] = 1
r = self.call("halloffame", **kwargs)
data = r.json()
return data
def coaches(self, league, competition='%', **kwargs):
"""Pull coaches"""
kwargs['league'] = league
kwargs['competition'] = competition
if 'platform' not in kwargs:
kwargs['platform'] = self.DEFAULT_PLATFORM
if 'limit' not in kwargs:
kwargs['limit'] = 10000
r = self.call("coaches", **kwargs)
data = r.json()
return data
def call(self, method, **kwargs):
"""Call the api method with kwargs parameters"""
url = self.__class__.BASE_URL + method+"/"
kwargs['key'] = self.api_key
kwargs['order'] = 'CreationDate'
try:
return self.http.get(url=url, params=kwargs)
except requests.exceptions.Timeout:
raise BB2APINotAvailable("Service down")
|
#
# This file is part of AceQL Python Client SDK.
# AceQL Python Client SDK: Remote SQL access over HTTP with AceQL HTTP.
# Copyright (C) 2021, KawanSoft SAS
# (http://www.kawansoft.com). All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
class UserLoginStore(object):
""" Stores the session Id per server_url/username/database triplet in order to
get new AceQL Connection with /get_connection without new login action."""
__logged_users = {}
def __init__(self, server_url: str, username: str, database: str):
"""Constructor"""
if server_url is None:
raise TypeError("serverUrl is null!")
if username is None:
raise TypeError("username is null!")
if database is None:
raise TypeError("database is null!")
self.__server_url = server_url
self.__username = username
self.__database = database
def build_key(self) -> str:
"""Builds the Dict key for the (:, username, database) triplet key."""
return self.__server_url + "/" + self.__username + "/" + self.__database;
def is_already_logged(self) -> bool:
"""Says if user is already logged, aka key exists in Dict"""
key = self.build_key()
data = UserLoginStore.__logged_users.get(key)
if data is None:
return False
else:
return True
def get_session_id(self) -> str:
"""Returns the session Id of logged user with (server_url, username, database) triplet."""
key = self.build_key()
session_id = UserLoginStore.__logged_users[key]
return session_id
def set_session_id(self, session_id: str):
"""Stores the session Id of a logged user with (server_url, username, database) triplet."""
key = self.build_key()
UserLoginStore.__logged_users[key] = session_id
def remove_store(self):
"""Removes (server_url, username, database) triplet. This is to be called at /logout API.
"""
key = self.build_key()
del UserLoginStore.__logged_users[key]
|
import argparse
import json
import pickle
#
import ray
from ray import tune
import gym
#
from utils.config import read_tune_params
def train(env: str,
run: str,
stop: dict = None,
config: dict = None,
results_dir: str = None,
verbose: int = 2,
checkpoint_freq: int = 20,
checkpoint_at_end: bool = True,
num_samples: int = 1,
restore: str = None):
"""
"""
if not config:
config = {"env": env}
else:
config["env"] = env
#
ray.shutdown()
ray.init(ignore_reinit_error=True)
#
def make_env(env_config):
import pybulletgym
return gym.make(env)
tune.register_env(env, make_env)
#
analysis = tune.run(
run,
config=config,
stop=stop or {},
local_dir = results_dir or (r'/home/bsc31/bsc31874/ray-results/' + env),
verbose = verbose,
checkpoint_freq = checkpoint_freq,
checkpoint_at_end = checkpoint_at_end,
num_samples= num_samples or 1,
restore = restore
)
#
ray.shutdown()
parser = argparse.ArgumentParser(description='Training script for PyBullet environments.')
parser.add_argument('--params-file', type=str, required=True, help='The JSON params file for PyBullet training.')
parser.add_argument('--results-dir', type=str, help='The directory to store the results.')
parser.add_argument('--verbose', type=str, default=2, help='')
parser.add_argument('--checkpoint-freq', type=str, default=20, help='')
parser.add_argument('--checkpoint-at-end', type=bool, default=True, help='')
parser.add_argument('--num-samples', type=int, help='')
parser.add_argument('--restore', type=str, help='')
if __name__ == '__main__':
args = parser.parse_args()
params = json.load(open(args.params_file, 'r'))
print(params, flush=True)
train(env=params['env'],
run=params['run'],
stop=params.get('stop'),
config=params.get('config'),
results_dir=args.results_dir,
verbose=args.verbose,
checkpoint_freq=args.checkpoint_freq,
checkpoint_at_end=args.checkpoint_at_end,
num_samples=args.num_samples,
restore=args.restore) |
from unittest.mock import MagicMock
from snowflake.connector.cursor import SnowflakeCursor, DictCursor
import pytest
import snowflake.connector as sf
from prefect.tasks.snowflake import SnowflakeQuery
class TestSnowflakeQuery:
def test_construction(self):
task = SnowflakeQuery(
account="test", user="test", password="test", warehouse="test"
)
assert task.autocommit is None
def test_query_string_must_be_provided(self):
task = SnowflakeQuery(
account="test", user="test", password="test", warehouse="test"
)
with pytest.raises(ValueError, match="A query string must be provided"):
task.run()
def test_execute_error_must_pass_through(self, monkeypatch):
snowflake_module_connect_method = MagicMock()
connection = MagicMock(spec=sf.SnowflakeConnection)
cursor = MagicMock(spec=sf.DictCursor)
# link all the mocks together appropriately
snowflake_module_connect_method.return_value = connection
connection.cursor = cursor
# database cursors can be ugly to mock given the use of __enter__
cursor.return_value.__enter__.return_value.execute.side_effect = (
sf.DatabaseError("Invalid query")
)
snowflake_connector_module = MagicMock(connect=snowflake_module_connect_method)
monkeypatch.setattr(
"prefect.tasks.snowflake.snowflake.sf", snowflake_connector_module
)
task = SnowflakeQuery(
account="test", user="test", password="test", warehouse="test"
)
with pytest.raises(sf.errors.DatabaseError, match="Invalid query"):
task.run(query="SELECT * FROM foo")
def test_execute_fetchall(self, monkeypatch):
"""
Tests that the SnowflakeQuery Task calls the fetchall method on the
cursor. This is to prevent future code edits from returning the cursor
object because that cursors are not pickleable.
"""
snowflake_module_connect_method = MagicMock()
connection = MagicMock(spec=sf.SnowflakeConnection)
cursor = MagicMock(spec=sf.DictCursor)
# link all the mocks together appropriately
snowflake_module_connect_method.return_value = connection
connection.cursor = cursor
# setting fetchall return
cursor.return_value.__enter__.return_value.execute.return_value.fetchall.return_value = [
"TESTDB"
]
snowflake_connector_module = MagicMock(connect=snowflake_module_connect_method)
monkeypatch.setattr(
"prefect.tasks.snowflake.snowflake.sf", snowflake_connector_module
)
query = "SHOW DATABASES"
output = SnowflakeQuery(
account="test", user="test", password="test", query=query
).run()
assert output == ["TESTDB"]
def test_run_method_accepts_alternate_cursor(self, monkeypatch):
"""
Tests that the default cursor used by the Snowflake connection
is of the type `SnowflakeCursor`, and that the cursor type can
be overriden by providing a different cursor type to the
`cursor_type` keyword argument.
"""
snowflake_module_connect_method = MagicMock()
connection = MagicMock(spec=sf.SnowflakeConnection)
default_cursor = MagicMock(spec=sf.cursor.SnowflakeCursor)
dict_cursor = MagicMock(spec=sf.cursor.DictCursor)
# link all the mocks together appropriately
snowflake_module_connect_method.return_value = connection
# setting fetchall return
default_cursor.__enter__.return_value.execute.return_value.fetchall.return_value = [
"1",
"2",
"3",
]
dict_cursor.__enter__.return_value.execute.return_value.fetchall.return_value = {
"one": "1",
"two": "2",
"three": "3",
}
snowflake_connector_module = MagicMock(connect=snowflake_module_connect_method)
def mock_cursor(cursor_class: SnowflakeCursor) -> SnowflakeCursor:
if cursor_class == DictCursor:
return dict_cursor
else:
return default_cursor
connection.cursor = mock_cursor
monkeypatch.setattr(
"prefect.tasks.snowflake.snowflake.sf", snowflake_connector_module
)
query = "select * from numbers"
task = SnowflakeQuery(account="test", user="test", password="test", query=query)
default_cursor_output = task.run(cursor_type=SnowflakeCursor)
dict_cursor_output = task.run(cursor_type=DictCursor)
assert default_cursor_output == ["1", "2", "3"]
assert dict_cursor_output == {"one": "1", "two": "2", "three": "3"}
|
from matplotlib import ticker
thousands_ticker_formatter = ticker.FuncFormatter(lambda x, p: "%dk" % int(x / 1000))
def fmt_thousands(val: int) -> str:
return format(val, ",")
def fmt_percentage(total: int):
return lambda val: "%.1f%%" % (100 * val / total)
def fmt_barplot(ax, values, total):
ax.bar_label(ax.containers[0], list(map(fmt_percentage(total), values)))
if values.max() < 2000:
return
elif values.max() < 4500:
ax.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: "%.1fk" % (x / 1000)))
else:
ax.get_yaxis().set_major_formatter(thousands_ticker_formatter)
|
import numpy as np
import itertools as it
import scipy
import statsmodels.api as sm
import os
from sys import argv
def read_chrom(chrom_file):
"""
Read a list of chromosomes.
Args:
chrom_file (str): name of the file (ex: 'dm3.chrom.sizes')
Returns:
chrom_list (list): list of chromosome names
"""
chrom_list = []
with open(chrom_file) as f:
for line in f:
chrom_list.append(line.strip().split("\t")[0])
return chrom_list
def read_tad(directory, file_name, chrom_list):
"""
Read a TAD coordinates .
Args:
directory (str): directory of the file location (ex: '/Users/kimm/')
file_name (str): name of the file (ex: 'tad.bed')
Returns:
in_tad (list): [chrom, start, end, tad_id, tad_len, tot_tad_in_chrom, tot_tad_len_in_chrom]
tad_dict (dictionary): dictionary of tad coordinates by tad_id
"""
in_tad = []
tad_dict = {}
with open(directory + file_name) as f:
for line in f:
tmplist = line.strip().split("\t")
tmplist[1] = int(tmplist[1])
tmplist[2] = int(tmplist[2])
tmplist[3] = 'T'+tmplist[3].split('T')[1].zfill(3)
tmplist.append(tmplist[2]-tmplist[1])
in_tad.append(tmplist)
tad_dict[tmplist[3]] = tmplist[0]+':'+str(tmplist[1])+'-'+str(tmplist[2])
tad_summ = [[x] for x in chrom_list]
for k in range(len(tad_summ)):
subset = [x for x in in_tad if x[0]==tad_summ[k][0]]
tad_summ[k].append(len(subset))
tad_summ[k].append(sum([x[4] for x in subset]))
for i in range(len(in_tad)):
for m in range(len(tad_summ)):
if in_tad[i][0]==tad_summ[m][0]:
in_tad[i].extend(tad_summ[m][1:])
return in_tad, tad_dict
def read_interactions(directory, annot_file, tad_coord):
"""
Read a master file with TAD annotation.
Args:
directory (str): directory of the file location (ex: '/Users/kimm/')
annot_file (str): name of the file
tad_coord (list): tad coordinates list
Returns:
tad_comb_list (list): unique tad combinations with counts
"""
with open(directory + annot_file) as f:
next(f)
tad_comb = []
for line in f:
tad_ann = line.strip().split("\t")[13]
tad_list = ['T'+x.split('T')[1].zfill(3) for x in tad_ann.split(";") if x != '-']
tad_dict = {x:tad_list.count(x) for x in tad_list}
sub_dict = dict((k,v) for k,v in tad_dict.items() if v > 1)
if len(sub_dict) > 1:
tad_str = ','.join([key for key, value in sub_dict.items()])
tad_comb.append(tad_str)
tad_comb_dict = {x:tad_comb.count(x) for x in tad_comb}
tad_comb_list = []
for key, value in tad_comb_dict.items():
tad_indx = int(key.split(',')[0][1:])-1
chrom = tad_coord[tad_indx][0]
num_tad = len(key.split(','))
tad_comb_list.append([key, value, chrom, num_tad])
return tad_comb_list
def compute_sig(intr_list, tad_coord, intr_num, chrom_list, pval_thresh):
"""
Compute significance for <intr_num> TADs.
Args:
intr_list (list): list of interactions
tad_coord (list): tad coordinates list
intr_num (int): number of TADs in interactions
chrom_list (list): list of chromosome names
pval_thresh (float): significance cut-off threshold (ex: 0.05)
Returns:
None
"""
subset_tads = [x for x in intr_list if x[3]==intr_num]
gem_summ = {key: 0 for key in chrom_list}
for key in gem_summ.keys():
gem_summ[key] += sum([x[6] for x in subset_tads if x[2]==key])
comb_summ = {key: 0 for key in chrom_list}
for key in comb_summ.keys():
comb_summ[key] += len([x for x in subset_tads if x[2]==key])
for k in range(len(subset_tads)):
subset_tads[k].append(gem_summ[subset_tads[k][2]])
subset_tads[k].append(comb_summ[subset_tads[k][2]])
for k in range(len(subset_tads)):
tmp = subset_tads[k]
pval_uniobs = scipy.stats.binom_test(tmp[6], n=tmp[7], p=1/tmp[8], alternative='greater')
subset_tads[k].append(pval_uniobs)
if pval_uniobs < pval_thresh:
subset_tads[k].append('PASS')
else:
subset_tads[k].append('FAIL')
if __name__ == '__main__':
### Set directory and input file name ###
directory = argv[1] # ex: '/Users/kimm/Documents/MultiChIA/'
chrom_file = argv[2] # ex: 'dm3.chrom.sizes'
tad_file = argv[3] # ex: 'GSM3347523_FDR_0.1_ratiothresh_2_pseudoGEM_100000_distTest_PASS_subgem_cov_gem_wgt.1000bp_binned_TAD.bed'
annot_file = argv[4] # ex: 'GSM3347523_FDR_0.1_ratiothresh_2_pseudoGEM_100000_distTest_master_PASS_annot.txt'
prefix = argv[5] # ex: 'GSM3347523'
pval_thresh = float(argv[6]) # ex: 0.05
out_file = prefix + '_interTAD_BinomTest_sig.tsv'
#### Log file ####
out = open(directory + prefix + "_BinomTest_logFile.txt", "a")
out.write("Software version: v0.1 (2019-05-21, Kim)" + "\n")
out.write("Directory: " + directory + "\n")
out.write("Chrom file name: " + chrom_file + "\n")
out.write("TAD file name: " + tad_file + "\n")
out.write("Library name: " + prefix + "\n")
out.write("p-value threshold: " + str(pval_thresh) + "\n")
out.write("Started processing frequency-based binomial test for inter-TAD contacts. \n")
out.write("================================= \n")
### Read input GEM file ###
chrom_list = read_chrom(chrom_file)
tad_coord, tad_dictionary = read_tad(directory, tad_file, chrom_list)
tad_intrx = read_interactions(directory, annot_file, tad_coord)
out.write("Finished reading files. \n")
out.write("Chromosomes: " + ','.join(map(str, chrom_list)) + '\n')
out.write("Total " + str(len(tad_coord)) + " TADs." + "\n")
out.write("Total " + str(len(tad_intrx)) + " combinations of TAD interactions.\n")
out.write("================================= \n")
tad_intrx2 = tad_intrx
for i in range(len(tad_intrx2)):
tads = tad_intrx2[i][0].split(",")
all_pairs = list(it.combinations(tads,2))
gem_cnts = []
for k in range(len(all_pairs)):
gem_cnts.extend([x[1] for x in tad_intrx if all_pairs[k][0] in x[0] and all_pairs[k][1] in x[0] and x[3]==len(tads)])
tad_intrx2[i].append(sum(gem_cnts))
gem_count2 = [x[1] for x in tad_intrx if x[3]>len(tads) and set(tads) < set(x[0].split(","))]
tad_intrx2[i].append(sum(gem_count2))
for x in tad_intrx2:
x.append(int(x[4]+x[5]*x[3]*(x[3]-1)/2))
tad_num = [x[3] for x in tad_intrx2]
for i in list(set(tad_num)):
compute_sig(tad_intrx2, tad_coord, i, chrom_list, pval_thresh)
subset = [x for x in tad_intrx2 if x[3]==i]
subset_pass = [x for x in subset if x[10] == 'PASS']
subset_fail = [x for x in subset if x[10] == 'FAIL']
out.write("== Interactions among " + str(i) + " TADs == \n")
out.write("Total: " + str(len(subset)) + ' combinations. \n')
out.write("Pass: " + str(len(subset_pass)) + ' combinations. \n')
if len(subset_pass) > 0:
out.write("Pass avg. complex cnt: " + str(round(np.mean([x[1] for x in subset_pass]), 2)) + ' \n')
out.write("Pass avg. combs in higher class: " + str(round(np.mean([x[5] for x in subset_pass]), 2)) + ' \n')
out.write("Pass avg. norm. complex cnt : " + str(round(np.mean([x[6] for x in subset_pass]), 2)) + ' \n')
out.write("Fail: " + str(len(subset_fail)) + ' combinations. \n')
if len(subset_fail) > 0:
out.write("Fail avg. complex cnt: " + str(round(np.mean([x[1] for x in subset_fail]), 2)) + ' \n')
out.write("Fail avg. combs in higher class: " + str(round(np.mean([x[5] for x in subset_fail]), 2)) + ' \n')
out.write("Fail avg. norm. complex cnt: " + str(round(np.mean([x[6] for x in subset_fail]), 2)) + ' \n')
for j in range(len(tad_intrx2)):
tadlist = tad_intrx2[j][0].split(',')
tad_intrx2[j].append(','.join([tad_dictionary[t] for t in tadlist]))
### Write output file ###
header = ['TAD combination', 'ComplexCnt', 'Chrom', '# of TADs', 'Pairs in same class', 'Combs in higher class', 'Norm. cnt', 'Tot. norm. cnts by tadn in chrom', 'Tot. # of combs by tadn in chrom', 'p-val (uniform obs.)', 'Decision', 'TAD coord.']
with open(directory + out_file, 'a') as file1:
file1.write('\t'.join(map(str,header)) + '\n')
for i in range(len(tad_intrx2)):
file1.write('\t'.join(map(str,tad_intrx2[i])) + '\n')
file1.close()
out.write("================================= \n")
out.write("DONE. \n")
out.close()
|
"""An Implementation of Binary Tree."""
from typing import Any
class Node:
"""Binary Tree node."""
__slots__ = "_data", "_left", "_right"
def __init__(self, data: Any, left: "Node"=None, right: "Node"=None):
self._data = data
self._left = left
self._right = right
def display(self, indent=0) -> None:
"""Print this node and its children."""
if self._right:
self._right.display(indent+3)
print(f"{(' '*indent)} {self._data}")
if self._left:
self._left.display(indent+3)
def search(self, data: Any) -> bool:
"""Search data in the chain of nodes."""
if data == self._data:
return True
if data < self._data:
if self._left:
return self._left.search(data)
else:
return False
else:
if self._right:
return self._right.search(data)
else:
return False
def insert(self, data: Any) -> None:
"""Insert a data into the tree at appropriate node."""
if data != self._data:
if data < self._data:
if self._left:
self._left.insert(data)
else:
self._left = Node(data)
else:
if self._right:
self._right.insert(data)
else:
self._right = Node(data)
else:
print("No duplicate in a binary search tree")
def maxval(self) -> Any:
if self._right:
return self._right.maxval()
else:
return self._data
def minval(self) -> Any:
if self._left:
return self._left.minval()
else:
return self._data
@property
def data(self) -> Any:
return self._data
@data.setter
def data(self, value):
self._data = value
@property
def left(self) -> "Node":
return self._left
@property
def right(self) -> "Node":
return self._right
def num_nodes(self) -> int:
"""Return the number of node in the BST."""
n = 1
if self._left:
n = n + self._left.num_nodes()
if self._right:
n = n + self._right.num_nodes()
return n
def search_parent(self, data) -> "Node":
if data == self._data: # root node
return None
if data < self._data: # Maybe in the left subtree
if self._left.data == data:
return self # this node is the parent node
else:
return self._left.search_parent(data) # continue the search
else: # Maybe in the right subtree
if self._right.data == data:
return self # this node is the parent node
else:
return self._right.search_parent(data) # continue the search
def delete_node(self, data, parent: "Node"):
if data < self.data:
self.left.delete_node(data, parent)
else:
if data > self.data:
self.right.delete_node(data, parent)
else:
if self.left is None and self.right is None: # node children
if parent.left is self:
parent._left = None
else:
parent._right = None
else:
if self.left is None or self.right is None: # one child
if self.left:
node = self.left
else:
node = self.right
if parent.left is self:
temp = parent.left
parent._left = node
else:
temp = parent.right
parent._right = node
temp._left = None
temp._right = None
else: # two children
if self.left.num_nodes() > self.right.num_nodes():
x = self.left.maxval()
else:
x = self.right.minval()
self.delete(x)
self.data = x
def delete(self, data):
"""Delete a data from the BST."""
parent = self.search_parent(data)
self.delete_node(data, parent)
def display_preorder(self):
if self.left:
self.left.display_preorder()
print(self.data)
if self.right:
self.right.display_preorder()
def _preorder(self, container:list) -> None:
if self.left:
self.left._preorder(container)
container.append(self.data)
if self.right:
self.right._preorder(container)
def preorder(self) -> list:
container = list()
self._preorder(container)
return container
def display_postorder(self) -> None:
if self.right:
self.right.display_postorder()
print(self.data)
if self.left:
self.left.display_postorder()
def _postorder(self, container:list) -> None:
if self.right:
self.right._postorder(container)
container.append(self.data)
if self.left:
self.left._postorder(container)
def postorder(self) -> list:
container = list()
self._postorder(container)
return container
class BinarySearchTree:
"""Binary Search Tree class."""
def __init__(self):
self._root: Node = None
def display(self) -> None:
"""Print this BST."""
if self._root:
self._root.display()
else:
print("Empty BST")
def search(self, data: Any) -> bool:
"""Search data in the BST."""
if self._root:
return self._root.search(data)
else:
return False
def insert(self, data: Any) -> None:
"""Insert data into the BST."""
if self._root:
self._root.insert(data)
else:
self._root = Node(data)
def delete(self, data: Any) -> None:
"""Delete a data from BST."""
if self.search(data):
if self._root.data == data:
if self._root.left:
child = self._root.left
else:
child = self._root.right
if child is None:
self._root = None
else:
if self._root.left:
x = child.maxval()
else:
x = child.minval()
self._root.delete(x)
self._root.data = x
else:
self._root.delete(data)
else:
print("Data not in the BST.")
def display_preorder(self):
if self._root:
self._root.display_preorder()
else:
print("Empty BST.")
def display_postorder(self):
if self._root:
self._root.display_postorder()
else:
print("Empty BST.")
def preorder(self) -> list:
if self._root:
return self._root.preorder()
else:
return []
def postorder(self) -> list:
if self._root:
return self._root.postorder()
else:
return []
def num_nodes(self):
if self._root:
return self._root.num_nodes()
else:
return 0
def _test():
"""Test"""
import random
tree = BinarySearchTree()
for _ in range(10):
tree.insert(random.randrange(100))
tree.display()
print("---", tree.preorder())
print("---", tree.postorder())
print(f"{tree.num_nodes()} nodes in the BST")
end = False
while not end:
print("*" * 50)
print("(a) Add, or (i) Preorder or (d) Postorder ")
print("or (r) Remove or (q) Quit?")
print("*" * 50)
cmd = input(">> ")[0].lower()
if cmd == "a" or cmd[0] == "r":
value = int(input(">> "))
print("-" * 50)
if cmd == "a":
tree.insert(value)
if cmd == "r":
tree.delete(value)
if cmd == "i":
print(tree.preorder())
if cmd == "d":
print(tree.postorder())
if cmd == "q":
end = True
if cmd not in "aidrq":
print("Valid input commands are: 'a', 'i', 'd', 'r' or 'q'")
if __name__ == "__main__":
_test()
|
import requests
from itertools import product
class ForceAttackModule(object):
"""
Module finds links on the website. It uses brute-force method.
"""
def __init__(self, proxies=None, start=1, stop=2, capital=False):
"""
Constructor of the class.
:param proxies: Proxy information.
"""
self.start = start
self.stop = stop
self.capital = capital
self._result = {}
self._id_mod = "b"
self._proxies = proxies
def scan(self, main_url):
"""
Scanning Method
:param capital:
:param stop:
:param start:
:param main_url: Main url of the scanned website
:return: None.
"""
for sets_num in xrange(self.start, self.stop + 1):
letters_range = (range(65, 91) if self.capital else []) + range(97, 123)
letters = ''.join([chr(x) for x in letters_range])
products = product(letters, repeat=sets_num)
for subdir in products:
subdir = '/' + ''.join(subdir)
attacked_url = main_url + subdir
response = requests.get(attacked_url, proxies=self._proxies)
if response.status_code == 200:
if "link_array_positive" not in self._result.keys():
self._result["link_array_positive"] = []
self._result["link_array_positive"].append(attacked_url)
def get_id(self):
"""
Method which returns id of the module
:return: Id of module
"""
return self._id_mod
def get_result(self):
"""
Method which returns id of the Module
:return: Dict with results.
"""
return self._result
|
""" This module stores the default config settings for the server.
It is optionally overriden by a local_config module in the same directory.
The local configs are for dev debugging/testing.
"""
import importlib
# Flask app settings
APP_HOST = "0.0.0.0"
APP_PORT = 5000
FLASK_DEBUG = False
VERBOSE = False
CLEANUP_SNS = True
DOWNLOAD_GRDS = True
RUN_ML = True
UPDATE_ML = True
COUNTRIES_GEOJSON = "Countries.geojson"
EEZ_GEOJSON = "eez_v11_simplified_noholes.geojson"
OCEAN_GEOJSON = "OceanGeoJSON_lowres.geojson"
BLOCK_REPEAT_SNS = True
SQS_URL = "https://sqs.eu-central-1.amazonaws.com/162277344632/New_Machinable"
SQS_DEADLETTER_URL = "https://sqs.eu-central-1.amazonaws.com/162277344632/Dead_Machinable"
UPLOAD_OUTPUTS = True
WKT_ROUNDING = 5 # -1 means don't round
# Database connection settings
DB_HOST = "db-slick.cboaxrzskot9.eu-central-1.rds.amazonaws.com"
DB_USER = "postgres"
DB_PASSWORD = "postgres"
DB_DATABASE = "cerulean"
DB_PORT = "5432"
DB_TYPE = "postgresql"
ECHO_SQL = False
# DEBUG_DB_ACCESS switches on/off all DB functionality if in debug mode
DEBUG_DB_ACCESS = True
# SciHub login settings
SH_USER = "jonaraph"
SH_PWD = "fjjEwvMDHyJH9Fa"
# If the local_config module is found, import all those settings, overriding any here that overlap.
if importlib.util.find_spec("configs.local_config") is not None:
from configs.local_config import * # pylint: disable=unused-wildcard-import
|
import pytest
from intcode import solve
@pytest.mark.parametrize('opcodes,solution', [
([1,0,0,0,99],[2,0,0,0,99]),
([2,3,0,3,99], [2,3,0,6,99]),
([2,4,4,5,99,0], [2,4,4,5,99,9801]),
([1,1,1,4,99,5,6,0,99], [30,1,1,4,2,5,6,0,99])
])
def test_solve(opcodes, solution):
assert solve(opcodes) == solution
|
"""Autocomplete urls."""
from django.urls import path
from . import views
urlpatterns = [
path('get_names', views.get_names, name='get_names'),
]
|
# Copyright 2020 Tabacaru Eric
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Server script for a multi-threaded chat application, using sockets.
# Server.py
# Necessary library imports.
import socket
from threading import Thread
# Client and IP dicts.
clients = {}
ip_addrs = {}
###### SERVER DATA ######
# Constants
SV_HOST = '127.0.0.1'
SV_PORT = 33000
BUFSIZ = 4096
# Server
sv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sv.bind((SV_HOST, SV_PORT))
###### SERVER FUNCTIONALITY ######
# Function that accepts new connections to the server.
def accept_connections():
while True:
client, addr = sv.accept()
print(f"New connection from {addr}!")
client.send(
"Welcome to Saint's Chatroom! He also goes by the name of dsnk! Please type your nickname and press Enter!".encode('utf-8')
)
ip_addrs[client] = addr
Thread(target=handle_client, args=(client,)).start()
# Function used to handle client messages and connection.
def handle_client(client):
# First receives the client's name in response to the previous server message.
name = client.recv(BUFSIZ).decode('utf-8')
# Send a message about how to properly disconnect from the server.
greet_msg = f"Nice to meet you {name}! If you ever want to quit the chatroom, type '[quit]'.".encode('utf-8')
client.send(greet_msg)
# Broadcast a message informing people that a new person joined the chat.
broadcast(f"{name} has joined the chat! Everybody give him a warm welcome!".encode('utf-8'))
clients[client] = name
# Start of main communication loop. First it stores the message received from the client inside the 'msg' variable,
# Then it checks if the msg is '[quit]', if true it closes the socket connection to that client and then echos
# '[quit]' to the client to shut down the client as well. If it's not true, then it simply broadcasts the message
# to all the clients.
while True:
msg = client.recv(BUFSIZ)
if msg != "[quit]".encode('utf-8'):
broadcast(msg, f"{name}: ".encode('utf-8'))
else:
client.send("[quit]".encode('utf-8'))
client.close()
del clients[client]
broadcast(f"{name} has left the chat.".encode('utf-8'))
break
# Function used to easily send a data packet to all the clients connected on the server.
# Note: Optional argument 'prefix' which is used to let the other users know who sent the message. If none
# is provided, it will simply send the message with no prefix, letting the users know this is a system message.
def broadcast(msg, prefix=" ".encode('utf-8')):
for client in clients:
client.send(prefix + msg)
if __name__ == "__main__":
sv.listen(5)
print(f"Listening on {SV_HOST}:{SV_PORT}")
accept_thread = Thread(target=accept_connections)
accept_thread.start()
accept_thread.join()
sv.close() |
from nose.tools import *
from .generator import generate_resource
import math
import udaru_anomaly_detection.check_gramma as check_gramma
fr3 = 1/3
def test_loss_calculation():
# Test loss on:
# ^ -> A -> $
model = check_gramma.CheckGrammaModel()
node_a = model.add_node()
node_a.increment_emission('A')
model.root.increment_transition(node_a)
node_a.increment_transition(model.end)
assert_almost_equal(model.compute_prior_log_prop(),
math.log(3 ** -3))
assert_almost_equal(model.compute_sequence_log_prop(['A']),
math.log(1 * 1 * 1))
# Test loss on:
# ^ -> [A] -> $
node_a.increment_transition(node_a)
assert_almost_equal(model.compute_prior_log_prop(),
math.log(3 ** -4))
assert_almost_equal(model.compute_sequence_log_prop(['A']),
math.log(1 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['A', 'A']),
math.log(1 * 0.5 * 0.5))
# Test loss on:
# /-> B -\
# ^ -> [A] -> $
node_b = model.add_node()
node_b.increment_emission('B')
model.root.increment_transition(node_b)
node_b.increment_transition(model.end)
assert_almost_equal(model.compute_prior_log_prop(),
math.log(4 ** -7))
assert_almost_equal(model.compute_sequence_log_prop(['A']),
math.log(0.5 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['A', 'A']),
math.log(0.5 * 0.5 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['B']),
math.log(0.5 * 1 * 1))
# Test loss on:
# /--> B --\
# ^ -> [A = 75% | B = 25%] -> $
node_a.increment_emission('B')
node_a.increment_emission('A')
node_a.increment_emission('A')
assert_almost_equal(model.compute_prior_log_prop(),
math.log(4 ** -(5 + 5)))
assert_almost_equal(model.compute_sequence_log_prop(['A']),
math.log(0.5 * 0.75 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['A', 'A']),
math.log(0.5 * 0.75 * 0.5 * 0.75 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['B']),
math.log(0.5 * 1 * 1 + 0.5 * 0.25 * 0.5))
assert_almost_equal(model.compute_sequence_log_prop(['B', 'B']),
math.log(0.5 * 0.25 * 0.5 * 0.25 * 0.5))
def test_merge_node():
# Build the graph
#
# -A- -F-
# / \ / \
# / D \
# / / \ \
# ^ +-- B +- G -+ $
# \ \ / /
# \ E /
# \ / \ /
# -C- -H-
#
model = check_gramma.CheckGrammaModel()
def create_node(char):
node = model.add_node()
node.increment_emission(char)
return node
node_a = create_node('A')
node_b = create_node('B')
node_c = create_node('C')
node_d = create_node('D')
node_e = create_node('E')
node_f = create_node('F')
node_g = create_node('G')
node_h = create_node('H')
model.root.increment_transition(node_a)
model.root.increment_transition(node_b)
model.root.increment_transition(node_c)
node_a.increment_transition(node_d)
node_b.increment_transition(node_d)
node_b.increment_transition(node_e)
node_c.increment_transition(node_e)
node_d.increment_transition(node_f)
node_d.increment_transition(node_g)
node_e.increment_transition(node_g)
node_e.increment_transition(node_h)
node_f.increment_transition(model.end)
node_g.increment_transition(model.end)
node_h.increment_transition(model.end)
# Check cost of structure
assert_almost_equal(model.compute_prior_log_prop(),
math.log(10 ** -(8 + 14)))
assert_almost_equal(model.compute_sequence_log_prop(['A', 'D', 'F']),
math.log(fr3 * 1.0 * 0.5 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['B', 'D', 'G']),
math.log(fr3 * 0.5 * 0.5 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['B', 'E', 'G']),
math.log(fr3 * 0.5 * 0.5 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['C', 'E', 'H']),
math.log(fr3 * 1.0 * 0.5 * 1.0))
#
# Merge node E into D
#
# A F
# / \ / \
# / \ / \
# / \ / \
# ^ +- B -[D|E]= G -+ $
# \ / \ /
# \ / \ /
# \ / \ /
# C H
#
model.merge_node(node_d, node_e)
# Check cost of structure
assert_almost_equal(model.compute_prior_log_prop(),
math.log(9 ** -(10 + 12)))
assert_almost_equal(model.compute_sequence_log_prop(['A', 'D', 'F']),
math.log(fr3 * 1.0 * 0.5 * 0.25 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['B', 'D', 'G']),
math.log(fr3 * 1.0 * 0.5 * 0.5 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['B', 'E', 'G']),
math.log(fr3 * 1.0 * 0.5 * 0.5 * 1.0))
assert_almost_equal(model.compute_sequence_log_prop(['C', 'E', 'H']),
math.log(fr3 * 1.0 * 0.5 * 0.25 * 1.0))
def test_merge_self_recursive_node():
def create_node(char):
node = model.add_node()
node.increment_emission(char)
return node
# Graph:
# ^ -> [A] -> $
# \--> B --/
model = check_gramma.CheckGrammaModel()
node_a = create_node('A')
node_b = create_node('B')
model.root.increment_transition(node_a)
model.root.increment_transition(node_b)
node_a.increment_transition(node_a)
node_a.increment_transition(model.end)
node_b.increment_transition(model.end)
model.merge_node(node_a, node_b)
assert_almost_equal(node_a.transition, {
node_a.index: 1*fr3,
model.end.index: 2*fr3
})
# Graph:
# ^ -> A -> B -> $
# \-<-/
model = check_gramma.CheckGrammaModel()
node_a = create_node('A')
node_b = create_node('B')
model.root.increment_transition(node_a)
node_a.increment_transition(node_b)
node_b.increment_transition(node_a)
node_b.increment_transition(model.end)
model.merge_node(node_a, node_b)
assert_almost_equal(node_a.transition, {
node_a.index: 2*fr3,
model.end.index: 1*fr3
})
def test_merge_sequence_self_merge():
model = check_gramma.CheckGrammaModel()
dataset = [
['A', 'B', 'C', 'A', 'B', 'C', 'D']
]
# Expect
#
# /---<---\
# ^ -> A -> B -> C -> D -> $
#
model.merge_sequence(dataset[0], dataset[0:1])
assert_almost_equal(model.compute_prior_log_prop(),
math.log(6 ** -10))
# Test that the graph generalizes to ABC repeating more than the dataset
assert_almost_equal(
model.compute_sequence_log_prop(
['A', 'B', 'C', 'A', 'B', 'C', 'A', 'B', 'C', 'D']
),
math.log(1.0 * 1.0 * 1.0 * 0.5 *
1.0 * 1.0 * 1.0 * 0.5 *
1.0 * 1.0 * 1.0 * 0.5 *
1.0 * 1.0)
)
def test_merge_sequence():
model = check_gramma.CheckGrammaModel()
dataset = [
['A', 'A', 'C', 'D'],
['B', 'B', 'C', 'D'],
['C', 'C', 'C', 'D']
]
# Expect
# ^ -> [A] --> C -> D -> $
#
model.merge_sequence(dataset[0], dataset[0:1])
assert_almost_equal(model.compute_prior_log_prop(),
math.log(5 ** -(3 + 5)))
assert_almost_equal(
model.compute_sequence_log_prop(
['A', 'A', 'A', 'C', 'D']
),
math.log(1.0 * 0.5 * 0.5 * 0.5 * 1.0 * 1.0)
)
# Expect
# ^ -> [A] -+> C -> D -> $
# \-> [B] -/
model.merge_sequence(dataset[1], dataset[0:2])
assert_almost_equal(model.compute_prior_log_prop(),
math.log(6 ** -(4 + 8)))
assert_almost_equal(
model.compute_sequence_log_prop(
['A', 'A', 'A', 'C', 'D']
),
math.log(0.5 * 0.5 * 0.5 * 0.5 * 1.0 * 1.0)
)
assert_almost_equal(
model.compute_sequence_log_prop(
['B', 'B', 'B', 'C', 'D']
),
math.log(0.5 * 0.5 * 0.5 * 0.5 * 1.0 * 1.0)
)
# Expect
# /-> [A] -\
# ^ --------+> [C] -> D -> $
# \-> [B] -/
model.merge_sequence(dataset[2], dataset[0:3])
assert_almost_equal(model.compute_prior_log_prop(),
math.log(6 ** -(4 + 10)))
assert_almost_equal(
model.compute_sequence_log_prop(
['A', 'A', 'A', 'C', 'D']
),
math.log(fr3 * 0.5 * 0.5 * 0.5 * 1.0 * 0.6 * 1.0)
)
assert_almost_equal(
model.compute_sequence_log_prop(
['B', 'B', 'B', 'C', 'D']
),
math.log(fr3 * 0.5 * 0.5 * 0.5 * 1.0 * 0.6 * 1.0)
)
assert_almost_equal(
model.compute_sequence_log_prop(
['C', 'C', 'C', 'C', 'D']
),
math.log(fr3 * 0.4 * 0.4 * 0.4 * 0.6 * 1.0)
)
def test_merge_sequence_fast():
model = check_gramma.CheckGrammaModel()
dataset = [
['A', 'B', 'A', 'B', 'C', 'C'],
['B', 'C'],
['B', 'C']
]
# Build Graph
#
# /---> B ---> C(1) ---\
# ^ -> [A|B] -> [C](2) --> $
#
node_b = model.add_node()
node_b.increment_emission('B')
node_ab = model.add_node()
node_ab.increment_emission('A')
node_ab.increment_emission('B')
node_ab.increment_transition(node_ab)
node_c1 = model.add_node()
node_c1.increment_emission('C')
node_c2 = model.add_node()
node_c2.increment_emission('C')
node_c2.increment_transition(node_c2)
model.root.increment_transition(node_b)
model.root.increment_transition(node_ab)
node_b.increment_transition(node_c1)
node_ab.increment_transition(node_c2)
node_c1.increment_transition(model.end)
node_c2.increment_transition(model.end)
# Assert that correct paths are taken
assert_almost_equal(model.compute_prior_log_prop(),
math.log(6 ** -(8 + 7)))
assert_almost_equal(
model.compute_sequence_log_prop(dataset[0]),
math.log(0.5 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 *
0.5 * 1.0 * 0.5 * 1.0 * 0.5 * 1.0)
)
assert_almost_equal(
model.compute_sequence_log_prop(dataset[1]),
math.log(0.5 * 0.5 * 0.5 * 1.0 * 0.5 * 1.0 +
0.5 * 1.0 * 1.0 * 1.0 * 1.0 * 1.0)
)
# Perform a fast merge
# The heuristic actually produces something suboptimal in this case
model.merge_sequence(dataset[2], dataset)
# Assert that correct paths are taken
assert_almost_equal(model.compute_prior_log_prop(),
math.log(6 ** -(8 + 7)))
assert_almost_equal(
model.compute_sequence_log_prop(dataset[0]),
math.log(fr3 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 * 0.5 *
0.5 * 1.0 * 0.5 * 1.0 * 0.5 * 1.0)
)
assert_almost_equal(
model.compute_sequence_log_prop(dataset[1]),
math.log(1*fr3 * 0.5 * 0.5 * 1.0 * 0.5 * 1.0 +
2*fr3 * 1.0 * 1.0 * 1.0 * 1.0 * 1.0)
)
def test_train_and_validate():
model = check_gramma.train(generate_resource(10, 'train'))
for sequence in generate_resource(2, 'test'):
assert check_gramma.validate(model, sequence)
|
# -*- coding: utf-8 -*-
from . import data_processor as dp
from . import data_simulator as ds
from . import train, evaluate, optimize, fcnet, hpmodel, nodeframe, utils
import torch
from torch.autograd import Variable
import torch.multiprocessing as mp
import numpy as np
#%% multilayer perceptron (MLP)
class MLP(train.Train):
"""Multilayer perceptron (MLP) that is used to predict cosmological parameters with one set of datasets.
Parameters
----------
spectra : array-like
The simulated observational spectra (data) with shape (N, spectra_length).
parameters : array-like
The simulated parameters of a specific cosmological (or theoretical) model.
param_names : list
A list which contains the parameter names, e.g. ['H0','Omega_m','ombh2','omch2','tau','As','ns'].
params_dict : dict or None, optional
Information of cosmological parameters that include the labels, the base values, the minimum values,
and the maximum values. See :func:`~.cosmic_params.params_dict_zoo`. Default: None
obs_errors : array-like, optional
Observational errors with shape (spectra_length,). Default: None
cov_matrix : array-like or None, optional
Covariance matrix of the observational data. Default: None
hidden_layer : int, optional
The number of the hidden layer of the network. Default: 3
hp_model : str, optional
Hyperparameter model that contains hyperparameters (such as activation function, batch normalization, dropout, etc.) used in the network.
It can be 'eco_1', 'eco_2', 'eco_3', 'eco_4', 'eco_5', 'eco_6', or 'eco_7' (see :func:`~.hpmodel.models`). Default: 'eco_3'
loss_func : str, optional
The loss function used in the network. Default: 'L1'
noise_type : str, optional
The type of Gaussian noise added to the training set, 'singleSigma' or 'multiSigma'. Default: 'multiSigma'
noise_sigma : float, optional
For the case of 'singleSigma', it is the standard deviation of the Gaussian noise, while for the case of 'multiSigma' it is
a coefficient of the standard deviation. Default: 0.5
multi_noise : int, optional
The number of realization of noise added to a spectrum. Default: 5
Attributes
----------
lr : float, optional
The learning rate setting of the network. Default: 1e-2
lr_min : float, optional
The minimum of the learning rate. Default: 1e-8
batch_size : int, optional
The batch size setting of the network. Default: 750
auto_batchSize : bool, optional
If True, the batch size will be set automatically in the training process, otherwise, use the setting of ``batch_size``. Default: True
epoch : int, optional
The number of epoch of the training process. Default: 2000
base_epoch : int, optional
The base number (or the minimum number) of epoch. Default: 1000
auto_epoch : bool, optional
If True, the epoch will be set automatically in the training process, otherwise, use the setting of ``epoch``. Default: True
norm_inputs : bool, optional
If True, the input data of the network will be normalized. Default: True
norm_target : bool, optional
If True, the target data (cosmological parameters) will be normalized. Default: True
norm_type : str, optional
The method of normalization, 'z_score', 'minmax', or 'mean' (see :class:`~.data_processor.Normalize`). Default: 'z_score'
spaceSigma_min : int, optional
The minimum parameter space to be learned, e.g. for spaceSigma_min=5, the parameter space to be learned is :math:`[-5\sigma, +5\sigma]`. Default: 5
transfer_learning : bool, optional
If True, the network will be initialized using the well-trained network of the previous step. Default: False
"""
def __init__(self, spectra, parameters, param_names, params_dict=None, obs_errors=None, cov_matrix=None,
hidden_layer=3, hp_model='eco_3', loss_func='L1',
noise_type='multiSigma', noise_sigma=0.5, multi_noise=5):
#data
self.spectra = spectra
self.params = dp.ParamsScaling(parameters, param_names, params_dict=params_dict).scaling()
self.param_names = param_names
self.params_dict = params_dict
self.obs_errors = obs_errors
self.cov_matrix = cov_matrix
#ANN model
self.hidden_layer = hidden_layer
self.hp_model = hp_model
self.loss_func = train.loss_funcs(name=loss_func)
self.lr = 1e-2
self.lr_min = 1e-8
self.batch_size = 750
self.auto_batchSize = True
self.epoch = 2000
self.base_epoch = 1000
self.auto_epoch = True
self.fix_initialize = False
self.print_info = False
#data preprocessing
self.noise_type = noise_type
self.noise_sigma = noise_sigma
self.multi_noise = multi_noise
self.norm_inputs = True
self.norm_target = True
self.norm_type = 'z_score'
#training
self.spaceSigma_min = 5
self.auto_repeat_n = False
self.burn_in = False
self.burnIn_step = None
self.transfer_learning = False
self.randn_num = round(abs(np.random.randn()/5.), 5)
def _nodes(self):
self.node_in = self.spectra.shape[1]
self.node_out = self.params.shape[1]
return nodeframe.decreasingNode(node_in=self.node_in,node_out=self.node_out,hidden_layer=self.hidden_layer)
def _net(self):
if self.fix_initialize:
torch.manual_seed(1000) #Fixed parameter initialization
self.nodes = self._nodes()
self.hparams = hpmodel.models(self.hp_model)
self.net = fcnet.get_FcNet(nodes=self.nodes, hparams=self.hparams)
if self.print_info:
print(self.net)
def _check_batchSize(self):
if self.batch_size > len(self.params):
self.batch_size = len(self.params)
print('The batch size is set to %s'%(self.batch_size))
def _auto_batchSize(self):
if self.burn_in:
#here <=2.5 is based on experiments
if self.spaceSigma_min<=2.5:
self.batch_size = 500
else:
self.batch_size = len(self.params)//4
else:
self.batch_size = len(self.params)//2
#make sure batch size will not too large
if self.batch_size>1250:
self.batch_size = 1250
def _auto_epoch(self):
if not self.burn_in:
self.epoch = self.base_epoch
def _auto_repeat_n(self, repeat_n):
if self.burn_in:
return repeat_n
else:
return 1
def statistic(self):
self.spectra_statistic = dp.Statistic(self.spectra).statistic()
self.params_statistic = dp.Statistic(self.params).statistic()
def load_wellTrainedNet(self, path='ann', randn_num='0.123'):
randn_num = str(randn_num)
print('\nLoading the well trained network that has random number {}'.format(randn_num))
file_path = evaluate.FilePath(filedir=path, randn_num=randn_num).filePath()
self.trained_net = torch.load(file_path)[0]#
self.transfer_learning = True
def copyLayers_fromTrainedNet(self):
print('\nCopying hyperparameters of a well trained network to the network')
self.net.load_state_dict(self.trained_net.state_dict())
def transfer_data(self):
if self.use_GPU:
self.spectra = dp.numpy2cuda(self.spectra)
self.params = dp.numpy2cuda(self.params)
if self.cov_matrix is None:
self.obs_errors = dp.numpy2cuda(self.obs_errors)
else:
self.cov_matrix = dp.numpy2cuda(self.cov_matrix)
else:
self.spectra = dp.numpy2torch(self.spectra)
self.params = dp.numpy2torch(self.params)
if self.cov_matrix is None:
self.obs_errors = dp.numpy2torch(self.obs_errors)
else:
self.cov_matrix = dp.numpy2torch(self.cov_matrix)
def train(self, repeat_n=3, showIter_n=100):
self._net()
if self.transfer_learning:
self.copyLayers_fromTrainedNet()
self.transfer_net()
self.optimizer = self._optimizer(name='Adam')
if self.auto_batchSize:
self._auto_batchSize()
self._check_batchSize()
# print('batch size: %s'%self.batch_size)
if self.auto_epoch:
self._auto_epoch()
if self.auto_repeat_n:
repeat_n = self._auto_repeat_n(repeat_n)
self.iteration = self.multi_noise*len(self.spectra)//self.batch_size * repeat_n
self.statistic()
self.transfer_data()
self.loss = []
# np.random.seed(1000)#
print('randn_num: %s'%self.randn_num)
for subsample_num in range(1, self.epoch+1):
self.inputs, self.target = ds.AddMultiGaussianNoise(self.spectra,self.params,obs_errors=self.obs_errors,cov_matrix=self.cov_matrix,multi_noise=self.multi_noise,use_GPU=self.use_GPU).multiNoisySample(noise_type=self.noise_type,sigma=self.noise_sigma,reorder=True)
if self.norm_inputs:
self.inputs = dp.Normalize(self.inputs, self.spectra_statistic, norm_type=self.norm_type).norm()
if self.norm_target:
self.target = dp.Normalize(self.target, self.params_statistic, norm_type=self.norm_type).norm()
_, loss_ = self.train_1(self.inputs, self.target, repeat_n=1, set_seed=False, lr_decay=False, print_info=False)
self.loss += loss_
if subsample_num%showIter_n==0:
print('(epoch:%s/%s; loss:%.5f; lr:%.8f)'%(subsample_num, self.epoch, loss_[-1], self.optimizer.param_groups[0]['lr']))
lrdc = optimize.LrDecay(subsample_num,iteration=self.epoch,lr=self.lr,lr_min=self.lr_min)
self.optimizer.param_groups[0]['lr'] = lrdc.exp()
self.net = self.net.cpu()
self.loss = np.array(self.loss)
return self.net, self.loss
def predict(self, spectra, in_type='numpy'):
if len(spectra.shape)==1:
spectra = spectra.reshape(1, -1) #for one spectrum
if self.norm_inputs:
spectra = dp.Normalize(spectra, self.spectra_statistic, norm_type=self.norm_type).norm()
self.pred_params = evaluate.predict(self.net, spectra, in_type=in_type)
if self.norm_target:
self.pred_params = dp.InverseNormalize(self.pred_params, self.params_statistic, norm_type=self.norm_type).inverseNorm()
self.pred_params = dp.ParamsScaling(self.pred_params, self.param_names, params_dict=self.params_dict).inverseScaling()
return self.pred_params
def predict_chain(self, obs_spectra, cov_matrix=None, chain_leng=10000):
# torch.manual_seed(1000)#
obs_spectra = dp.numpy2torch(obs_spectra)
obs_best, obs_errors = obs_spectra[:,1], obs_spectra[:,2]
obs_best_multi = torch.ones((chain_leng, len(obs_best))) * obs_best
if cov_matrix is not None:
cov_matrix = dp.numpy2torch(cov_matrix)
obs_best_multi = ds.AddGaussianNoise(obs_best_multi, obs_errors=obs_errors, cov_matrix=cov_matrix, use_GPU=False).singleSigma(sigma=1)
self.chain = self.predict(obs_best_multi, in_type='torch')
return self.chain
def save_net(self, path='ann', sample=None):
if sample is None:
fileName = 'net_nodes%s_train%s_batch%s_epoch%s_%s.pt'%(str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
else:
fileName = 'net-%s_nodes%s_train%s_batch%s_epoch%s_%s.pt'%(sample,str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
utils.saveTorchPt(path+'/net', fileName, self.net)
def save_loss(self, path='ann', sample=None):
if sample is None:
fileName = 'loss_nodes%s_train%s_batch%s_epoch%s_%s'%(str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
else:
fileName = 'loss-%s_nodes%s_train%s_batch%s_epoch%s_%s'%(sample,str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
utils.savenpy(path+'/net', fileName, self.loss)
def save_chain(self, path='ann', sample=None):
if sample is None:
fileName = 'chain_nodes%s_train%s_batch%s_epoch%s_%s'%(str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
else:
fileName = 'chain-%s_nodes%s_train%s_batch%s_epoch%s_%s'%(sample,str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
utils.savenpy(path+'/chains', fileName, self.chain)
def save_hparams(self, path='ann', sample=None):
if sample is None:
fileName = 'hparams_nodes%s_train%s_batch%s_epoch%s_%s'%(str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
else:
fileName = 'hparams-%s_nodes%s_train%s_batch%s_epoch%s_%s'%(sample,str(self.nodes),len(self.params),self.batch_size,self.epoch,self.randn_num)
utils.savenpy(path+'/hparams', fileName, [self.spectra_statistic, self.params_statistic, self.param_names, self.burnIn_step])
def plot_loss(self):
evaluate.plot_loss(self.loss)
class RePredictMLP(MLP):
"""Repredict cosmological parameters using the saved networks.
Parameters
----------
path : str
The path of the results saved. Default: 'ann'
randn_num : str or int
A random number that identifies the saved results.
params_dict : dict or None, optional
Information of cosmological parameters that include the labels, the base values, the minimum values,
and the maximum values. See :func:`~.cosmic_params.params_dict_zoo`. Default: None
Attributes
----------
norm_inputs : bool, optional
If True, the input data of the network will be normalized. Default: True
norm_target : bool, optional
If True, the target data (cosmological parameters) will be normalized. Default: True
norm_type : str, optional
The method of normalization, 'z_score', 'minmax', or 'mean' (see :class:`~.data_processor.Normalize`). Default: 'z_score'
"""
def __init__(self, path='ann', randn_num='0.123', params_dict=None):
self.path = path
self.randn_num = str(randn_num)
self.params_dict = params_dict
self.norm_inputs = True
self.norm_target = True
self.norm_type = 'z_score'
def load_net(self):
file_path = evaluate.FilePath(filedir=self.path+'/net', randn_num=self.randn_num).filePath()
self.net = torch.load(file_path)
def load_loss(self):
file_path = evaluate.FilePath(filedir=self.path+'/net', randn_num=self.randn_num, suffix='.npy').filePath()
self.loss = np.load(file_path)
def load_chain(self):
file_path = evaluate.FilePath(filedir=self.path+'/chains', randn_num=self.randn_num, suffix='.npy').filePath()
self.chain = np.load(file_path)
def load_hparams(self):
file_path = evaluate.FilePath(filedir=self.path+'/hparams', randn_num=self.randn_num, suffix='.npy').filePath()
self.spectra_statistic, self.params_statistic, self.param_names, self.burnIn_step = np.load(file_path, allow_pickle=True)
#%% multibranch network
class MultiBranchNet(MLP):
"""Multibranch network that is used to predict cosmological parameters with multiple sets of datasets.
Parameters
----------
spectra : list
Simulated observational spectra (data), it is a list spectra with shape [(N,spectra_length_1), (N,spectra_length_2), ...].
parameters : array-like
The simulated parameters of a specific cosmological (or theoretical) model.
param_names : list
A list which contains the parameter names, e.g. ['H0','Omega_m','ombh2','omch2','tau','As','ns'].
params_dict : dict or None, optional
Information of cosmological parameters that include the labels, the base values, the minimum values,
and the maximum values. See :func:`~.cosmic_params.params_dict_zoo`. Default: None
obs_errors : list, optional
Observational errors, it is a list of errors with shape [(spectra_length_1,), (spectra_length_2,), ...]. Default: None
cov_matrix : list or None, optional
A list of covariance matrix with shape [(spectra_length_1, spectra_length_1), (spectra_length_2, spectra_length_2), ...].
If there is no covariance for some observations, the covariance matrix should be set to None. e.g. [cov_matrix_1, None, cov_matrix_3]. Default: None
branch_hiddenLayer : int, optional
The number of the hidden layer for the branch part of the network. Default: 2
trunk_hiddenLayer : int, optional
The number of the hidden layer for the trunk part of the network. Default: 1
hp_model : str, optional
Hyperparameter model that contains hyperparameters (such as activation function, batch normalization, dropout, etc.) used in the network.
It can be 'eco_1', 'eco_2', 'eco_3', 'eco_4', 'eco_5', 'eco_6', or 'eco_7' (see :func:`~.hpmodel.models`). Default: 'eco_3'
loss_func : str, optional
The loss function used in the network. Default: 'L1'
noise_type : str, optional
The type of Gaussian noise added to the training set, 'singleSigma' or 'multiSigma'. Default: 'multiSigma'
noise_sigma : float, optional
For the case of 'singleSigma', it is the standard deviation of the Gaussian noise, while for the case of 'multiSigma' it is
a coefficient of the standard deviation. Default: 0.5
multi_noise : int, optional
The number of realization of noise added to a spectrum. Default: 5
Attributes
----------
lr : float, optional
The learning rate setting of the network. Default: 1e-2
lr_branch : float, optional
The learning rate setting of the branch part. Default: 1e-2
lr_min : float, optional
The minimum of the learning rate. Default: 1e-8
batch_size : int, optional
The batch size setting of the network. Default: 750
auto_batchSize : bool, optional
If True, the batch size will be set automatically in the training process, otherwise, use the setting of ``batch_size``. Default: True
epoch : int, optional
The number of epoch of the training process. Default: 2000
epoch_branch : int, optional
The number of epoch for the branch part. This only works when training the branch part. Default: 2000
base_epoch : int, optional
The base number (or the minimum number) of epoch. Default: 1000
auto_epoch : bool, optional
If True, the epoch will be set automatically in the training process, otherwise, use the setting of ``epoch``. Default: True
norm_inputs : bool, optional
If True, the input data of the network will be normalized. Default: True
norm_target : bool, optional
If True, the target data (cosmological parameters) will be normalized. Default: True
norm_type : str, optional
The method of normalization, 'z_score', 'minmax', or 'mean' (see :class:`~.data_processor.Normalize`). Default: 'z_score'
spaceSigma_min : int, optional
The minimum parameter space to be learned, e.g. for spaceSigma_min=5, the parameter space to be learned is :math:`[-5\sigma, +5\sigma]`. Default: 5
transfer_learning : bool, optional
If True, the network will be initialized using the well-trained network of the previous step. Default: False
Note
----
It is suggested to set lr and lr_branch the same value.
"""
def __init__(self, spectra, parameters, param_names, params_dict=None, obs_errors=None, cov_matrix=None,
branch_hiddenLayer=2, trunk_hiddenLayer=1, hp_model='eco_3', loss_func='L1',
noise_type='multiSigma', noise_sigma=0.5, multi_noise=5):
#data
self.spectra = spectra
self.params = dp.ParamsScaling(parameters, param_names, params_dict=params_dict).scaling()
self.param_names = param_names
self.params_dict = params_dict
self.obs_errors = self._obs_errors(obs_errors)
self.cov_matrix = self._cov_matrix(cov_matrix)
#ANN model
self.branch_n = len(spectra)
self.branch_hiddenLayer = branch_hiddenLayer
self.trunk_hiddenLayer = trunk_hiddenLayer
self.hp_model = hp_model
self.loss_func = train.loss_funcs(name=loss_func)
self.lr = 1e-2
self.lr_branch = 1e-2
self.lr_min = 1e-8
self.batch_size = 750
self.auto_batchSize = True
self.epoch = 2000
self.epoch_branch = 2000
self.base_epoch = 1000
self.auto_epoch = True
self.fix_initialize = False
self.print_info = False
#data preprocessing
self.noise_type = noise_type
self.noise_sigma = noise_sigma
self.multi_noise = multi_noise
self.norm_inputs = True
self.norm_target = True
self.norm_type = 'z_score'
#training
self.spaceSigma_min = 5
self.auto_repeat_n = False
self.burn_in = False
self.burnIn_step = None
self.transfer_learning = False
self.randn_num = round(abs(np.random.randn()/5.), 5)
def _obs_errors(self, errors):
if errors is None:
return [None for i in range(len(self.spectra))]
else:
return errors
def _cov_matrix(self, matrix):
if matrix is None:
return [None for i in range(len(self.spectra))]
else:
return matrix
def _nodes(self):
self.nodes_in = []
self.node_out = self.params.shape[1]
for i in range(self.branch_n):
self.nodes_in.append(self.spectra[i].shape[1])
def _net(self):
if self.fix_initialize:
torch.manual_seed(1000) #Fixed parameter initialization
self._nodes()
self.hparams = hpmodel.models(self.hp_model)
for i in range(self.branch_n):
exec('self.branch_net%s=fcnet.get_FcNet(node_in=self.nodes_in[i], node_out=self.node_out,\
hidden_layer=2*self.branch_hiddenLayer+1, hparams=self.hparams)'%(i+1))
self.net = fcnet.get_MultiBranchFcNet(nodes_in=self.nodes_in, node_out=self.node_out, branch_hiddenLayer=self.branch_hiddenLayer,
trunk_hiddenLayer=self.trunk_hiddenLayer, nodes_all=None, hparams=self.hparams)
if self.print_info:
print(self.net)
def transfer_branchNet(self, device=None):
if self.use_GPU:
for i in range(1, self.branch_n+1):
exec('self.branch_net%s = self.branch_net%s.cuda(device)'%(i,i))
def _copyLayer_fromBranch(self, branch_index=None):
if branch_index is None:
print('\nCopying hyperparameters of the branch networks to the multibranch network')
for i in range(1, self.branch_n+1):
for j in range(self.branch_hiddenLayer+1):
eval('self.net.branch%s[j*3].load_state_dict(self.branch_net%s.fc[j*3].state_dict())'%(i, i))#copy Linear
eval('self.net.branch%s[j*3+1].load_state_dict(self.branch_net%s.fc[j*3+1].state_dict())'%(i, i))#copy BN
else:
print('Copying hyperparameters of the branch network {} to the multibranch network\n'.format(branch_index))
for j in range(self.branch_hiddenLayer+1):
eval('self.net.branch%s[j*3].load_state_dict(self.branch_net%s.fc[j*3].state_dict())'%(branch_index, branch_index))#copy Linear
eval('self.net.branch%s[j*3+1].load_state_dict(self.branch_net%s.fc[j*3+1].state_dict())'%(branch_index, branch_index))#copy BN
def statistic(self):
self.spectra_statistic = [dp.Statistic(self.spectra[i]).statistic() for i in range(self.branch_n)]
self.params_statistic = dp.Statistic(self.params).statistic()
def transfer_data(self):
if self.use_GPU:
self.spectra = [dp.numpy2cuda(self.spectra[i]) for i in range(self.branch_n)]
self.params = dp.numpy2cuda(self.params)
for i in range(self.branch_n):
if self.cov_matrix[i] is None:
self.obs_errors[i] = dp.numpy2cuda(self.obs_errors[i])
else:
self.cov_matrix[i] = dp.numpy2cuda(self.cov_matrix[i])
else:
self.spectra = [dp.numpy2torch(self.spectra[i]) for i in range(self.branch_n)]
self.params = dp.numpy2torch(self.params)
for i in range(self.branch_n):
if self.cov_matrix[i] is None:
self.obs_errors[i] = dp.numpy2torch(self.obs_errors[i])
else:
self.cov_matrix[i] = dp.numpy2torch(self.cov_matrix[i])
# def transfer_subData(self, device=None):
# if self.use_GPU:
# self.inputs = dp.numpy2cuda(self.inputs, device=device)
# self.target = dp.numpy2cuda(self.target, device=device)
# self.error = dp.numpy2cuda(self.error, device=device)
# else:
# self.inputs = dp.numpy2torch(self.inputs)
# self.target = dp.numpy2torch(self.target)
# self.error = dp.numpy2torch(self.error)
def _train_branch(self, rank, repeat_n, showIter_n, device):
optimizer = torch.optim.Adam(eval('self.branch_net%s.parameters()'%(rank+1)), lr=self.lr_branch)
iteration = self.multi_noise*len(self.spectra[0])//self.batch_size * repeat_n
self.inputs = self.spectra[rank]
self.target = self.params
self.error = self.obs_errors[rank]
self.cov_m = self.cov_matrix[rank]
# self.transfer_subData(device=device)
print('Training the branch network %s'%(rank+1))
for subsample_num in range(1, self.epoch_branch+1):
_inputs, _target = ds.AddMultiGaussianNoise(self.inputs,self.target,obs_errors=self.error,cov_matrix=self.cov_m,multi_noise=self.multi_noise,use_GPU=self.use_GPU).multiNoisySample(noise_type=self.noise_type,sigma=self.noise_sigma,reorder=True)
if self.norm_inputs:
_inputs = dp.Normalize(_inputs, self.spectra_statistic[rank], norm_type=self.norm_type).norm()
if self.norm_target:
_target = dp.Normalize(_target, self.params_statistic, norm_type=self.norm_type).norm()
for iter_mid in range(1, iteration+1):
batch_index = np.random.choice(len(_inputs), self.batch_size, replace=False)
xx = _inputs[batch_index]
yy = _target[batch_index]
xx = Variable(xx)
yy = Variable(yy, requires_grad=False)
_predicted = eval('self.branch_net%s(xx)'%(rank+1))
_loss = self.loss_func(_predicted, yy)
optimizer.zero_grad()
_loss.backward()
optimizer.step()
if subsample_num%showIter_n==0:
print('(epoch:%s/%s; loss:%.5f; lr:%.8f)'%(subsample_num, self.epoch_branch, _loss.item(), optimizer.param_groups[0]['lr']))
lrdc = optimize.LrDecay(subsample_num,iteration=self.epoch_branch,lr=self.lr_branch,lr_min=self.lr_min)
optimizer.param_groups[0]['lr'] = lrdc.exp()
#############################################################################
# Note: hyperparameters must be transferred in the subprocess.
#
# Variables defined in the subprocess can not be called by the main process,
# but, the hyperparameters of "self.branch_net%s"%i can be copied to "self.net",
# the reason may be that hyperparameters of the network shared the memory.
#############################################################################
#print(eval("self.branch_net%s.fc[3].state_dict()['bias'][:5]"%(rank+1)))
self._copyLayer_fromBranch(branch_index=rank+1)
def _train_branchNet(self, repeat_n=3, showIter_n=10):
#############################################################################
# Note: variables used in the subprocess (in the function self._train_branch)
# should be defined before using "mp.spawn", and variables defined in the
# subprocess can not be called by the main process.
#
# # the following lines have the same function as "mp.spawn"
# mp.set_start_method('spawn') #this is important
# processes = []
# for rank in range(self.branch_n):
# p = mp.Process(target=self._train_branch, args=(rank, repeat_n, showIter_n, device))
# p.start()
# processes.append(p)
# for p in processes:
# p.join()
#############################################################################
#this means that the branch networks can only be trained on 1 GPU, how to train them on muliple GPUs?
device = None
#Note: all networks should be transfered to GPU when using "mp.spawn" to train the branch networks
self.transfer_branchNet(device=device)
mp.spawn(self._train_branch, nprocs=self.branch_n, args=(repeat_n, showIter_n, device), join=True)
def _train_trunk(self, repeat_n=3, showIter_n=100, fix_lr=1e-4, reduce_fix_lr=False):
branch_p = []
for i in range(1, self.branch_n+1):
branch_p.append(eval("{'params':self.net.branch%s.parameters(), 'lr':fix_lr}"%i)) #lr=fix_lr
optimizer = torch.optim.Adam(branch_p + [{'params':self.net.trunk.parameters()}], lr=self.lr)
print('Training the trunk network')
for subsample_num in range(1, self.epoch_branch+1):
self.inputs, self.target = ds.AddMultiGaussianNoise(self.spectra,self.params,obs_errors=self.obs_errors,cov_matrix=self.cov_matrix,multi_noise=self.multi_noise,use_GPU=self.use_GPU).multiNoisySample(noise_type=self.noise_type,sigma=self.noise_sigma,reorder=True)
if self.norm_inputs:
self.inputs = [dp.Normalize(self.inputs[i], self.spectra_statistic[i], norm_type=self.norm_type).norm() for i in range(self.branch_n)]
if self.norm_target:
self.target = dp.Normalize(self.target, self.params_statistic, norm_type=self.norm_type).norm()
for iter_mid in range(1, self.iteration+1):
batch_index = np.random.choice(len(self.inputs[0]), self.batch_size, replace=False)
xx = [self.inputs[i][batch_index] for i in range(self.branch_n)]
yy = self.target[batch_index]
xx = [Variable(xx[i]) for i in range(self.branch_n)]
yy = Variable(yy, requires_grad=False)
_predicted = self.net(xx)
_loss = self.loss_func(_predicted, yy)
optimizer.zero_grad()
_loss.backward()
optimizer.step()
if subsample_num%showIter_n==0:
print('(epoch:%s/%s; loss:%.5f; lr_branch:%.8f; lr_trunk:%.8f)'%(subsample_num, self.epoch_branch, _loss.item(), optimizer.param_groups[0]['lr'], optimizer.param_groups[-1]['lr']))
lrdc_t = optimize.LrDecay(subsample_num,iteration=self.epoch_branch,lr=self.lr,lr_min=self.lr_min)
optimizer.param_groups[-1]['lr'] = lrdc_t.exp()
#test
if reduce_fix_lr:
lrdc_b = optimize.LrDecay(subsample_num,iteration=self.epoch_branch,lr=fix_lr,lr_min=self.lr_min)#change to lr=self.lr ?
for i in range(self.branch_n):
optimizer.param_groups[i]['lr'] = lrdc_b.exp()
def train(self, repeat_n=3, showIter_n=100, train_branch=True, parallel=True, train_trunk=False, fix_lr=1e-4, reduce_fix_lr=False):
self._net()
if self.transfer_learning==True and train_branch==False:
self.copyLayers_fromTrainedNet()
self.transfer_net(prints=self.print_info)
#branch_p = [eval("{'params':self.net.branch%s.parameters(), 'lr':self.lr_branch}"%i) for i in range(1,self.branch_n+1)] #this will raise an error in python3.X
#however, the following lines run well for both python2.X and python3.X, why?
branch_p = []
for i in range(1, self.branch_n+1):
branch_p.append(eval("{'params':self.net.branch%s.parameters(), 'lr':self.lr_branch}"%i))
self.optimizer = torch.optim.Adam(branch_p + [{'params':self.net.trunk.parameters()}], lr=self.lr)
#added
if self.auto_batchSize:
self._auto_batchSize()
self._check_batchSize()
# print('batch size: %s'%self.batch_size)
if self.auto_epoch:
self._auto_epoch()
if self.auto_repeat_n:
repeat_n = self._auto_repeat_n(repeat_n)
self.iteration = self.multi_noise*len(self.spectra[0])//self.batch_size * repeat_n
# print('repeat_n:%s'%repeat_n)
self.statistic()
self.transfer_data()
# np.random.seed(1000)#
print('randn_num: {}'.format(self.randn_num))
if train_branch:
if parallel:
self._train_branchNet(repeat_n=repeat_n, showIter_n=showIter_n)
else:
self.transfer_branchNet()
for rank in range(self.branch_n):
self._train_branch(rank, repeat_n, showIter_n, None)
if train_trunk:
self._train_trunk(repeat_n=repeat_n, showIter_n=showIter_n, fix_lr=fix_lr, reduce_fix_lr=reduce_fix_lr)
self.loss = []
print('\nTraining the multibranch network')
for subsample_num in range(1, self.epoch+1):
self.inputs, self.target = ds.AddMultiGaussianNoise(self.spectra,self.params,obs_errors=self.obs_errors,cov_matrix=self.cov_matrix,multi_noise=self.multi_noise,use_GPU=self.use_GPU).multiNoisySample(noise_type=self.noise_type,sigma=self.noise_sigma,reorder=True)
if self.norm_inputs:
self.inputs = [dp.Normalize(self.inputs[i], self.spectra_statistic[i], norm_type=self.norm_type).norm() for i in range(self.branch_n)]
if self.norm_target:
self.target = dp.Normalize(self.target, self.params_statistic, norm_type=self.norm_type).norm()
for iter_mid in range(1, self.iteration+1):
batch_index = np.random.choice(len(self.inputs[0]), self.batch_size, replace=False)
xx = [self.inputs[i][batch_index] for i in range(self.branch_n)]
yy = self.target[batch_index]
xx = [Variable(xx[i]) for i in range(self.branch_n)]
yy = Variable(yy, requires_grad=False)
_predicted = self.net(xx)
_loss = self.loss_func(_predicted, yy)
self.optimizer.zero_grad()
_loss.backward()
self.optimizer.step()
self.loss.append(_loss.item())
if subsample_num%showIter_n==0:
if self.lr==self.lr_branch:
print('(epoch:%s/%s; loss:%.5f; lr:%.8f)'%(subsample_num, self.epoch, self.loss[-1], self.optimizer.param_groups[0]['lr']))
else:
print('(epoch:%s/%s; loss:%.5f; lr_branch:%.8f; lr_trunk:%.8f)'%(subsample_num, self.epoch, self.loss[-1], self.optimizer.param_groups[0]['lr'], self.optimizer.param_groups[-1]['lr']))
lrdc_t = optimize.LrDecay(subsample_num,iteration=self.epoch,lr=self.lr,lr_min=self.lr_min)
self.optimizer.param_groups[-1]['lr'] = lrdc_t.exp()
lrdc_b = optimize.LrDecay(subsample_num,iteration=self.epoch,lr=self.lr_branch,lr_min=self.lr_min)#change to lr=self.lr ?
for i in range(self.branch_n):
self.optimizer.param_groups[i]['lr'] = lrdc_b.exp()
self.net = self.net.cpu()
self.loss = np.array(self.loss)
return self.net, self.loss
def _predict(self, net, inputs, in_type='numpy'):
# in_type: 'numpy' or 'torch'
net = net.eval() #this works for the batch normalization layers
if in_type=='numpy':
inputs = [dp.numpy2torch(inputs[i]) for i in range(len(inputs))]
inputs = [Variable(inputs[i]) for i in range(len(inputs))]
pred = net(inputs)
pred = dp.torch2numpy(pred.data)
return pred
def predict(self, spectra, in_type='numpy'):
# spectra: [spectra1, spectra2, ...]
if len(spectra[0].shape)==1:
spectra = [spectra[i].reshape(1, -1) for i in range(len(spectra))] #for one spectrum
if self.norm_inputs:
spectra = [dp.Normalize(spectra[i], self.spectra_statistic[i], norm_type=self.norm_type).norm() for i in range(len(spectra))]
pred_params = self._predict(self.net, spectra, in_type=in_type)
if self.norm_target:
self.pred_params = dp.InverseNormalize(pred_params, self.params_statistic, norm_type=self.norm_type).inverseNorm()
self.pred_params = dp.ParamsScaling(self.pred_params, self.param_names, params_dict=self.params_dict).inverseScaling()
return self.pred_params
def predict_chain(self, obs_spectra, cov_matrix=None, chain_leng=10000):
# obs_spectra: observational spectrum in a list [spectra1, spectra2, ...], each element has shape (N, 3)
# torch.manual_seed(1000)#
if cov_matrix is None:
cov_matrix = [None for i in range(len(obs_spectra))]
obs_spectra = [dp.numpy2torch(obs_spectra[i]) for i in range(len(obs_spectra))]
obs_best = [obs_spectra[i][:,1] for i in range(len(obs_spectra))]
obs_errors = [obs_spectra[i][:,2] for i in range(len(obs_spectra))]
obs_best_multi = [torch.ones((chain_leng, len(obs_best[i]))) * obs_best[i] for i in range(len(obs_spectra))]
for i in range(len(obs_spectra)):
if cov_matrix[i] is not None:
cov_matrix[i] = dp.numpy2torch(cov_matrix[i])
obs_best_multi = ds.AddGaussianNoise(obs_best_multi, obs_errors=obs_errors, cov_matrix=cov_matrix, use_GPU=False).singleSigma(sigma=1)
self.chain = self.predict(obs_best_multi, in_type='torch')
return self.chain
def save_net(self, path='ann', sample='TT'):
if sample is None:
fileName = 'net_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s.pt'%(self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
else:
fileName = 'net-%s_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s.pt'%(sample,self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
utils.saveTorchPt(path+'/net', fileName, self.net)
def save_loss(self, path='ann', sample='TT'):
if sample is None:
fileName = 'loss_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
else:
fileName = 'loss-%s_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(sample,self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
utils.savenpy(path+'/net', fileName, self.loss)
def save_chain(self, path='ann', sample='TT'):
if sample is None:
fileName = 'chain_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
else:
fileName = 'chain-%s_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(sample,self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
utils.savenpy(path+'/chains', fileName, self.chain)
def save_hparams(self, path='ann', sample='TT'):
if sample is None:
fileName = 'hparams_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
else:
fileName = 'hparams-%s_branch%s_train%s_batch%s_epoch%s_epochBranch%s_%s'%(sample,self.branch_n,len(self.params),self.batch_size,self.epoch,self.epoch_branch,self.randn_num)
utils.savenpy(path+'/hparams', fileName, [self.spectra_statistic, self.params_statistic, self.param_names, self.burnIn_step])
class RePredictMBNet(MultiBranchNet):
"""Repredict cosmological parameters using the saved networks.
Parameters
----------
path : str
The path of the results saved. Default: 'ann'
randn_num : str or int
A random number that identifies the saved results.
params_dict : dict or None, optional
Information of cosmological parameters that include the labels, the base values, the minimum values,
and the maximum values. See :func:`~.cosmic_params.params_dict_zoo`. Default: None
Attributes
----------
norm_inputs : bool, optional
If True, the input data of the network will be normalized. Default: True
norm_target : bool, optional
If True, the target data (cosmological parameters) will be normalized. Default: True
norm_type : str, optional
The method of normalization, 'z_score', 'minmax', or 'mean' (see :class:`~.data_processor.Normalize`). Default: 'z_score'
"""
def __init__(self, path='ann', randn_num='0.123', params_dict=None):
self.path = path
self.randn_num = str(randn_num)
self.params_dict = params_dict
self.norm_inputs = True
self.norm_target = True
self.norm_type = 'z_score'
def load_net(self):
file_path = evaluate.FilePath(filedir=self.path+'/net', randn_num=self.randn_num).filePath()
self.net = torch.load(file_path)
def load_loss(self):
file_path = evaluate.FilePath(filedir=self.path+'/net', randn_num=self.randn_num, suffix='.npy').filePath()
self.loss = np.load(file_path)
def load_chain(self):
file_path = evaluate.FilePath(filedir=self.path+'/chains', randn_num=self.randn_num, suffix='.npy').filePath()
self.chain = np.load(file_path)
def load_hparams(self):
file_path = evaluate.FilePath(filedir=self.path+'/hparams', randn_num=self.randn_num, suffix='.npy').filePath()
self.spectra_statistic, self.params_statistic, self.param_names, self.burnIn_step = np.load(file_path, allow_pickle=True)
#!!!
# 1. check how to reduce training time, when using multi_noise
# 2. for unilateral distribution, the result is not good, try to solve this
# 3. whether to use transfer learning?
|
import unittest
from amonlite.web.template import *
from nose.tools import eq_
class TestTemplateFilters(unittest.TestCase):
def test_dateformat(self):
date = dateformat(1319737106)
eq_('27-10-2011-17:38', date)
def test_timeformat(self):
time = timeformat(1319737106)
eq_('17:38', time)
def test_date_to_js(self):
date = date_to_js(1319737106)
eq_('2011,9, 27, 17, 38', date)
def test_to_int(self):
_int = to_int('testme2')
eq_(_int, 2)
def test_clean_string(self):
string = clean_string('24.5MB')
eq_(string, 24.5)
def test_progress_width_percent(self):
full_container = progress_width_percent(100, container_type='full' )
eq_(full_container, '330px')
full_container = progress_width_percent(50, container_type='full' )
eq_(full_container, '165px')
full_container = progress_width_percent(0, container_type='full' )
eq_(full_container, '0px; border:2px solid transparent; background: none;')
container = progress_width_percent(100, container_type='medium' )
eq_(container, '158px')
container = progress_width_percent(50, container_type='medium' )
eq_(container, '79px')
container = progress_width_percent(0, container_type='medium' )
eq_(container, '0px; border:2px solid transparent; background: none;')
container = progress_width_percent(100, container_type='small' )
eq_(container, '100px')
container = progress_width_percent(50, container_type='small' )
eq_(container, '50px')
def test_progress_width(self):
full_container = progress_width(300, 300, container_type='full' )
eq_(full_container, '330px')
full_container_50 = progress_width(150, 300, container_type='full' )
eq_(full_container_50, '165px')
full_container_0 = progress_width(0, 300, container_type='full' )
eq_(full_container_0, '0px; border:2px solid transparent; background: none;')
medium_container = progress_width(300, 300, container_type='medium' )
eq_(medium_container, '158px')
medium_container_50 = progress_width(150, 300, container_type='medium' )
eq_(medium_container_50, '79px')
medium_container_0 = progress_width(0, 300, container_type='medium' )
eq_(medium_container_0, '0px; border:2px solid transparent; background: none;')
small_container = progress_width(300, 300, container_type='small' )
eq_(small_container, '100px')
small_container_50 = progress_width(150, 300, container_type='small' )
eq_(small_container_50, '50px')
small_container_0 = progress_width(0, 300, container_type='small' )
eq_(small_container_0, '0px; border:2px solid transparent; background: none;')
def test_progress_width_with_zeroes(self):
empty_container_full = progress_width(0,0, container_type='full' )
eq_(empty_container_full, '0px; border:2px solid transparent; background: none;')
empty_container_medium = progress_width(0,0, container_type='medium' )
eq_(empty_container_medium, '0px; border:2px solid transparent; background: none;')
empty_container_small = progress_width(0,0, container_type='small' )
eq_(empty_container_small, '0px; border:2px solid transparent; background: none;')
def test_value_bigger_than_total(self):
container_full = progress_width(600,0, container_type='full' )
eq_(container_full, '330px')
def test_with_big_numbers(self):
container_full = progress_width(12332323600,3434344, container_type='full')
eq_(container_full, '330px') # Value bigger than total - container is 100%
container = progress_width(9,12233332, container_type='full')
eq_(container, '0px; border:2px solid transparent; background: none;')
container_full = progress_width(1232,34343, container_type='full')
eq_(container_full, '9px')
def test_url(self):
_url = url('more', 'and', 'even', 'more')
eq_(_url, 'more/and/even/more')
def test_base_url(self):
_base_url = base_url()
assert isinstance(_base_url, str)
def test_check_additional_data(self):
ignored_dicts = [{'occurrence': 12223323}, {'occurrence': 1212121221}]
check_ignored_dicts = check_additional_data(ignored_dicts)
eq_(check_ignored_dicts, None)
true_dicts = [{'occurrence': 12223323, 'test': 'me'}, {'occurrence': 1212121221}]
check_true_dicts = check_additional_data(true_dicts)
eq_(check_true_dicts, True)
def test_cleanup_string(self):
string = '//test---/'
clean = clean_slashes(string)
eq_(clean, 'test')
def test_query_dict(self):
base_url = 'local'
# Single parameter with single value
uri = query_dict(base_url, {'tags': ['test']},1)
eq_(uri, 'local?tags=test&page=1')
# Single parameter with multiple values
uri = query_dict(base_url, {'tags': ['test', 'more']},1)
eq_(uri, 'local?tags=test&tags=more&page=1')
# Single parameter, no page, no values
uri = query_dict(base_url, {'tags': []})
eq_(uri, 'local')
# Single parameter, page , no values
uri = query_dict(base_url, {'tags': []},1)
eq_(uri, 'local?page=1')
# Multiple paramaters with multiple values
uri = query_dict(base_url, {'tags': ['test', 'more'], 'query': ['test','me']},1)
eq_(uri, 'local?query=test&query=me?tags=test&tags=more&page=1')
# Single parameter, page , value is None
uri = query_dict(base_url, {'tags': None },1)
eq_(uri, 'local?page=1')
|
from django.shortcuts import render
from stock.models import Stock, StockPrice, StockNews, StockTweet
from datetime import datetime, date
from stock.utils.ticker import Ticker
import json
from stock.utils.signals import Signals
import math
from collections import Counter
import pandas as pd
from stock.utils.load_data import import_news_data, import_tweets_data
# Create your views here.
def home(request):
tickers = Stock.objects.all().values_list('ticker', flat=True)
return render(request, "stock/home.html", {"tickers": tickers})
def stock_detail(request, ticker):
with open("/Users/chetangupta/Documents/College/BTP/stock analysis code/analysis-dashboard/StockAnalyser/stock/utils/INDICATOR_CONFIG.json") as file:
config = json.load(file)
try:
stock_obj = Stock.objects.get(ticker__iexact=ticker)
key_executives = stock_obj.key_executives.all()
historical_price = StockPrice.objects.filter(ticker=stock_obj, date__gte=date(2014, 12, 4))
historical_price_data = historical_price.order_by("-date")[:10]
ticker_obj = Ticker(ticker_string=ticker, start_date="2014-12-04")
for indicator_name in config["INDICATORS_CONFIG"]:
indicator_obj = ticker_obj.get_indicator(indicator_name=indicator_name,
indicator_config=config["INDICATORS_CONFIG"][indicator_name])
indicator_obj.calculate()
indicator_starting_point = 850
chart_payload = {
"date": [x.date.strftime("%Y-%m-%d") for x in historical_price][indicator_starting_point:],
"close": ["{}".format(x.close_price) for x in historical_price][indicator_starting_point:],
"volume": [x.total_traded_quantity for x in historical_price][indicator_starting_point:]
}
for col in list(ticker_obj.data.columns.values):
if col is not "Date":
chart_payload[col.replace("|", "_")] = [0 if math.isnan(x) else x for x in list(
ticker_obj.data[col].values)][indicator_starting_point:]
technical_analysis_signals = Signals(ticker=ticker, data_df=ticker_obj.data)
stock_news = StockNews.objects.filter(ticker=stock_obj).order_by("-date")[:10]
news_analysis_signal = Counter([x.sentiment for x in stock_news]).most_common(1)[0][0]
stock_tweets = StockTweet.objects.filter(ticker=stock_obj).order_by("-date")[:10]
tweets_analysis_signal = Counter([x.sentiment for x in stock_news]).most_common(1)[0][0]
overall_signal = Counter([technical_analysis_signals.overall_signal,
news_analysis_signal, tweets_analysis_signal]).most_common(1)[0][0]
signals = {
"overall": overall_signal,
"technical_indicators": technical_analysis_signals.overall_signal,
"all_technical_indicators": technical_analysis_signals.overall_signal_data,
"news_analysis": news_analysis_signal,
"twitter_analysis": tweets_analysis_signal
}
except Stock.DoesNotExist:
stock_obj = None
key_executives = None
signals = None
historical_price_data = None
chart_payload = None
stock_news = None,
stock_tweets = None
return render(request, "stock/stock.html", {
"stock": stock_obj,
"key_executives": key_executives,
"signals": signals,
"historical_data": historical_price_data,
"chart_payload": chart_payload,
"stock_news": stock_news,
"stock_tweets": stock_tweets,
})
|
#!/usr/bin/env python
import boto3
import json
TABLE_NAME='vehicle'
client = boto3.client('dynamodb')
response = client.delete_item(
TableName = TABLE_NAME,
Key = {'device': {'S': 'DR003'}},
ReturnValues = 'ALL_OLD'
)
print(json.dumps(response, indent=4)) |
from pathlib import Path
from secret_santa.util.path import PathUtils
def test_get_project_root(project_root_directory: Path):
# This more of a double validation, making sure that both point to the same directory
code_project_root = PathUtils.get_project_root()
assert code_project_root == project_root_directory, (
f"The PathUtils.get_project_root() and project_root_directory do not point to the same directory as they "
f"should: PathUtils.get_project_root(): {code_project_root}, project_root_directory: {project_root_directory}."
)
|
"""
References:
https://github.com/TheAlgorithms/Python/blob/master/digital_image_processing/filters/bilateral_filter.py
"""
import cv2
import numpy as np
def vec_gaussian(img, sigma):
cons = 1 / (sigma * np.sqrt(2 * np.pi))
return cons * np.exp(-((img / sigma) ** 2) * 0.5)
def get_window(img, center, k_size=3):
y, x = center
off_set = k_size // 2
return img[y-off_set: y+off_set+1, x-off_set:x+off_set+1]
def get_space_gaussian(k_size=3, sigma=1):
space_gaussian = np.zeros((k_size, k_size))
for i in range(k_size):
for j in range(k_size):
space_gaussian[i, j] = np.sqrt(abs(i - k_size // 2) ** 2 + abs(j - k_size // 2) ** 2)
return vec_gaussian(space_gaussian, sigma)
def bilateral_filter(img, space_sigma=1, intensity_sigma=1, k_size=3):
height, width = img.shape[0], img.shape[1]
pad_size = k_size // 2
padding_img = np.pad(img, pad_size, mode="edge")
space_gaussian = get_space_gaussian(k_size, space_sigma)
bilateral_img = np.zeros((height, width))
for i in range(pad_size, padding_img.shape[0]-pad_size):
for j in range(pad_size, padding_img.shape[1]-pad_size):
window_intensity = get_window(padding_img, (i, j), k_size)
differ_intensity = window_intensity - padding_img[i][j]
intensity_gaussian = vec_gaussian(differ_intensity, intensity_sigma)
current_gaussian = np.multiply(intensity_gaussian, space_gaussian)
values = np.multiply(window_intensity, current_gaussian)
current_value = np.sum(values) / np.sum(current_gaussian)
bilateral_filter[i - pad_size][j - pad_size] = current_value
return bilateral_img
if __name__ == "__main__":
test_path = r"./test.jpg"
img = cv2.imread(test_path)
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray_img = gray_img / 255
gray_img = gray_img.astype("float32")
bilateral_img = bilateral_filter(gray_img)
bilateral_img = bilateral_img * 255
bilateral_img = bilateral_img.astype("uint8")
# show result images
cv2.imshow("bilateral filter", bilateral_img)
cv2.waitKey(0)
|
""" func.py - Additional tools for working with functions and callable objects
"""
__all__ = ['persistent_cache']
import functools
from .collections import PersistentDict
def _map_wrapper(user_func, m):
@functools.wraps(user_func)
def wrapper(*args, **kwargs):
key = args + tuple(kwargs.items())
if key in m:
return m[key]
response = user_func(*args, **kwargs)
m[key] = response
return response
return wrapper
def persistent_cache(identifier=None, **kwargs):
def decorator(user_func):
pm = PersistentDict(
identifier if identifier else user_func.__name__,
**kwargs)
return _map_wrapper(user_func, pm)
return decorator
|
"""
The model is adapted from the tensorflow tutorial:
https://www.tensorflow.org/get_started/mnist/pros
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
class Model(object):
def __init__(self, fea_dim=1024):
self.x_input = tf.placeholder(tf.float32, shape = [None, 784])
self.y_input = tf.placeholder(tf.int64, shape = [None])
self.x_image = tf.reshape(self.x_input, [-1, 28, 28, 1])
# first convolutional layer
W_conv1 = self._weight_variable([5,5,1,32])
b_conv1 = self._bias_variable([32])
self.h_conv1 = h_conv1 = tf.nn.relu(self._conv2d(self.x_image, W_conv1) + b_conv1)
h_pool1 = self._max_pool_2x2(h_conv1)
# second convolutional layer
W_conv2 = self._weight_variable([5,5,32,64])
b_conv2 = self._bias_variable([64])
self.h_conv2 = h_conv2 = tf.nn.relu(self._conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = self._max_pool_2x2(h_conv2)
# first fully connected layer
W_fc1 = self._weight_variable([7 * 7 * 64, 256])
b_fc1 = self._bias_variable([fea_dim])
self.fc1 = h_fc1 = tf.nn.relu(tf.matmul(h_pool2, W_fc1) + b_fc1)
# output layer
from cosine_loss import cos_loss
self.fea_variables = [W_conv1, b_conv1, W_conv2, b_conv2, W_fc1, b_fc1]
self.fea = h_fc1
labels = self.y_input
NUM_CLASSES = 10
total_loss, logits, tmp = cos_loss(self.fea, labels, NUM_CLASSES, alpha=0.05)
self.xent = total_loss
self.y_pred = tf.arg_max(tf.matmul(tmp['x_feat_norm'], tmp['w_feat_norm']), 1)
correct_prediction = tf.cast(tf.equal(self.y_pred, labels), tf.float32)
self.num_correct = tf.reduce_sum(correct_prediction)
self.accuracy = tf.reduce_mean(correct_prediction)
@staticmethod
def _weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
@staticmethod
def _bias_variable(shape):
initial = tf.constant(0.1, shape = shape)
return tf.Variable(initial)
@staticmethod
def _conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')
@staticmethod
def _max_pool_2x2( x):
return tf.nn.max_pool(x,
ksize = [1,2,2,1],
strides=[1,2,2,1],
padding='SAME')
|
import requests
def _f(path):
return 'http://localhost:8080/' + path
def create_user(params):
assert 'sid' in params
assert 'name' in params
assert 'pass' in params
res = requests.post(_f('users'), json=params)
return res.status_code == 200
def search_user(sid):
res = requests.get(_f('search/users/' + sid))
return res.json() if res.status_code == 302 else None
def get_user(uid):
res = requests.get(_f('users/' + uid))
return res.json() if res.status_code == 200 else None
def get_token(params):
assert 'sid' in params
assert 'pass' in params
res = requests.post(_f('token'), json=params)
return res.json()['token'] if res.status_code == 200 else None
def delete_user(token, uid):
res = requests.delete(_f('users/' + uid), headers={'Authorization': 'Bearer ' + token})
return res.status_code == 200
def test_user_registration():
user = {
'sid': 'proelbtn',
'name': 'proelbtn',
'pass': 'proelbtn',
}
assert create_user(user)
u = search_user(user['sid'])
assert u is not None
t = get_token(user)
assert t is not None
assert delete_user(t, u['id'])
|
from ..emailer import sendemail
import subprocess
notifytext = """
Děkujeme za registraci na LinuxDays. Ještě je potřeba registraci potvrdit.
Jméno: {name[0]}
Odebírat oznámení: {announces[0]}
Účast: {days[0]}
Oběd sobota: {mealsat[0]}
Oběd neděle: {mealsun[0]}
Svou registraci prosím potvrďte kliknutím na následující odkaz:
https://www.linuxdays.cz/cgi-bin/ldform.py?formid=conf2016&r={regid}&n={nonce}
Na stejném odkazu je možno registraci kdykoli později zrušit.
Děkujeme,
váš tým LinuxDays
"""
notifytexten = """
Thank you for registration to LinuxDays. You need to confirm it now.
Name: {name[0]}
Get announces: {announces[0]}
Days: {days[0]}
Meal Saturday: {mealsat[0]}
Meal Sunday: {mealsun[0]}
Please confirm your registration by clicking this link:
https://www.linuxdays.cz/cgi-bin/ldform.py?formid=conf2016&r={regid}&n={nonce}&l=en
You can also cancel your registration using the same link.
Thank You,
LinuxDays team
"""
def respond(data):
"""Send e-mail notification"""
email = data.getfirst('email', '').strip()
lang = data.getfirst('lang', 'cs').strip()
data['confirmed'] = False
data['validated'] = False
if '@' in email:
name = data.getfirst('name', '')
if lang == 'cs':
emailtext = notifytext.format_map(data)
subject = "LinuxDays registrace"
else:
emailtext = notifytexten.format_map(data)
subject = "LinuxDays registration"
recipients = [(name, email)]
sendemail(emailtext, subject, recipients)
print("Content-type: text/html; charset=UTF-8")
if lang == 'cs':
print("""
<html>
<head>
<meta http-equiv="refresh" content="10;/">
</head>
<body>
<h1>Vaše registrace byla zaznamenána</h1>
<p>Registraci je nutné potvrdit v e-mailu, který byl odeslán na vaši adresu.</p>
<p>Přesměrujeme vás <a href="/">zpět</a> za 10 sekund…</p>
</body>
</html>""")
else:
print("""
<html>
<head>
<meta http-equiv="refresh" content="10;/2016/en/">
</head>
<body>
<h1>Your registration has been recorded</h1>
<p>You need to confirm the registration by clicking the link in the e-mail you're just going to receive.</p>
<p>Redirecting you <a href="/2016/en/">back</a> in 10 seconds…</p>
</body>
</html>""")
|
#!/usr/bin/env python2
# -*- encoding:utf-8 -*-
"""
使用 Python 计算 Fibonacci 数:0, 1, 1, 2, 3, 5, ...
1. fibonacci_simple, 最简单的写法,直接利用递归调用
2. fibonacci_list,使用列表缓存中间结果
3. fibonacci_formula,使用公式推导
"""
from __future__ import print_function
import signal
import time
def signle_handler(signal_n, frame):
"""
利用信号处理实现简单的定时器
:param signal_n:
:param frame:
"""
raise Exception('end of time')
def timer(func):
"""
计时器修饰器
:param func:
"""
def wrapper(*args, **kwargs):
_start = time.time()
_result = func(*args, **kwargs)
print("%s's result is %s, time cost: %s" % (func, _result, time.time() - _start))
return _result
return wrapper
def fibonacci_simple(n):
"""
利用递归调用德方式计算,f_{n} = f_{n-1} + f_{n-2}
:param n: n>0
:return
"""
if n <= 0:
return 0
elif n == 1:
return 1
return fibonacci_simple(n - 1) + fibonacci_simple(n - 2)
@timer
def fibonacci_list(n):
"""
用列表缓存中间的计算结果
:param n: n>0
:return
"""
lst = [0, 1, 1]
while len(lst) < n + 1:
ln = len(lst)
lst.append(lst[ln - 1] + lst[ln - 2])
return lst[0] if n < 0 else lst[n]
def fibonacci_formula(n):
"""
利用公式推导,纯数学计算
f_{2m + 1} = f_{m} ** 2 + f_{m+1} ** 2
f_{2m} = (f_{m-1} + f_{m+1}) * f_{m} = (2 * f_{m + 1} - f_{m}) * f_{m}
奇数情况 n == 2m + 1:
(f_{n}, f_{n+1})
= (f_{2m+1}, f_{2m+2})
= (f_{m} ** 2 + f_{m+1} ** 2, (f_{m} + f_{m+2}) * f_{m+1})
= (f_{m} ** 2 + f_{m+1} ** 2, (2 * f_{m} + f_{m+1}) * f_{m+1})
偶数情况 n == 2m:
(f_{n}, f_{n+1})
= (f_{2m}, f_{2m+1})
= ((2 * f_{m+1} - f_{m}) * f_{m}, f_{m} ** 2 + f_{m+1} ** 2)
比如,n = 1, m = 0:
fibonacci_formula(1)
= (f_{0} ** 2 + f_{1} ** 2, (2 * f_{0} + f_{1}) * f_{1})
= (0 + 1, (0 + 1) * 1)
= (1, 1)
比如,n = 2, m = 1:
fibonacci_formula(2)
= ((2 * f_{2} - f_{1}) * f_{1}, f_{1} ** 2 + f_{2} ** 2)
= ((2 * 1 - 1)* 1, 1 + 1)
= (1, 2)
:param n: n>0
:return (f_{n}, f_{n+1})
"""
if n <= 0:
return (0, 1)
# m = n / 2, fibonacci_formula(m) get (f_{m}, f_{m+1})
f_m, f_m1 = fibonacci_formula(n / 2)
if n % 2:
return f_m ** 2 + f_m1 ** 2, (2 * f_m + f_m1) * f_m1
return (2 * f_m1 - f_m) * f_m, f_m ** 2 + f_m1 ** 2
if __name__ == '__main__':
signal.signal(signal.SIGALRM, signle_handler)
signal.alarm(10)
# 1. fibonacci_simple 计算第 30 个 Fibonacci 数,用时:0.852499961853
# start = time.time()
# result = fibonacci_simple(30)
# print("%s's result is %s, time cost: %s" % ("fibonacci_simple", result, time.time() - start))
# 2. fibonacci_list 计算第 200 个 Fibonacci 数,用时:0.000149011611938
# fibonacci_list(200)
# 3. fibonacci_formula 计算第 200 个 Fibonacci 数,0.000126123428345
start = time.time()
result = fibonacci_formula(200)
print("%s's result is %s, time cost: %s" % ("fibonacci_formula", result, time.time() - start)) |
import numpy as np
from . import _pynqs_gpu
def argchecker(kwargs, ArgCheckList):
for arg in ArgCheckList:
if not arg in kwargs:
raise Exception ('You omit an essential argument registered in :', ArgCheckList)
class RBM:
def __init__(self, **kwargs):
"""
floatType : 'float32' or 'float64'
symmType : 'None' --> No symmetry is considerd.
'tr' --> Translational symmetry is considered.
'z2pr' --> Z2 and parity symmetries are considered.
"""
argchecker(kwargs, ['floatType', 'symmType'])
floatType = kwargs['floatType']
self._floatType = floatType
symmType = kwargs['symmType']
self._symmType = symmType
if floatType == 'float32' and symmType == 'None':
self._sampler = _pynqs_gpu.sRBMSampler
elif floatType == 'float64' and symmType == 'None':
self._sampler = _pynqs_gpu.dRBMSampler
elif floatType == 'float32' and symmType == 'tr':
self._sampler = _pynqs_gpu.sRBMTrSymmSampler
elif floatType == 'float64' and symmType == 'tr':
self._sampler = _pynqs_gpu.dRBMTrSymmSampler
elif floatType == 'float32' and symmType == 'z2pr':
self._sampler = _pynqs_gpu.sRBMZ2PrSymmSampler
elif floatType == 'float64' and symmType == 'z2pr':
self._sampler = _pynqs_gpu.dRBMZ2PrSymmSampler
else:
raise Exception(' --hint: floatType: float32 or float64 / symmType: None, tr, z2pr')
def init(self, **kwargs):
argchecker(kwargs, ['nInputs', 'nHiddens', 'nChains', 'seedNumber',
'seedDistance', 'path_to_load', 'init_mcmc_steps'])
self._rbm = self._sampler(kwargs)
self._nInputs = int(kwargs['nInputs'])
self._nChains = int(kwargs['nChains'])
path = str(kwargs['path_to_load'])
init_mcmc_steps = int(kwargs['init_mcmc_steps'])
self._rbm.load('%s'%path)
self._rbm.warm_up(init_mcmc_steps)
def do_mcmc_steps(self, mcmc_steps):
self._rbm.do_mcmc_steps(mcmc_steps)
def get_spinStates(self):
return self._rbm.get_spinStates().reshape([-1, self._nInputs])
def get_lnpsi(self):
return self._rbm.get_lnpsi()
def get_lnpsi_for_fixed_spins(self, spinStates):
spinStates = np.array(spinStates). \
astype(self._floatType). \
reshape([self._nChains, self._nInputs])
return self._rbm.get_lnpsi_for_fixed_spins(spinStates)
|
from datetime import timedelta
from fastapi import APIRouter, Body, HTTPException, status
from app.core.auth import authenticate_user, create_access_token
from app.core.config import config
from app.models.v1.auth import Token, UserAuthenticate
router = APIRouter()
ACCESS_TOKEN_EXPIRE_MINUTES = config.JWT_EXPIRY
@router.post("/token", response_model=Token)
async def login_for_access_token(user_auth: UserAuthenticate = Body(...)):
user = await authenticate_user(user_auth.username, user_auth.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(seconds=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"sub": user.username}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer"}
|
# Generated by Django 3.2.12 on 2022-03-22 18:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0003_auto_20220322_1759'),
]
operations = [
migrations.AlterField(
model_name='order',
name='total_price',
field=models.DecimalField(decimal_places=2, editable=False, max_digits=5),
),
]
|
from dust_mie import calc_mie
import numpy as np
import matplotlib.pyplot as plt
wave = np.linspace(0.5,2.5,50)
qext, qsca, qback, g = calc_mie.get_mie_coeff_distribution(wave,r=1.0,material='SiO2',s=0.5)
plt.plot(wave,qext)
plt.xlabel('Wavelength ($\mu$m)')
plt.ylabel('Q$_{ext}$')
plt.savefig('extinct_func_distribution.png') |
# -*- coding:utf-8 -*-
import os
import logging
import math as m
import numpy as np
import scipy.io as sio
import scipy.signal as signal
from typing import AnyStr
# DON'T DELETE THIS! This is used by other modules.
from sklearn.model_selection._split import check_random_state
def cart2sph(x, y, z):
'''
Transform Cartesian coordinates to spherical
Parameters
----------
```txt
x: float, X coordinate
y: float, Y coordinate
z: float, Z coordinate
```
Returns
-------
```txt
radius, elevation, azimuth: float -> tuple, Transformed polar coordinates
```
'''
x2_y2 = x**2 + y**2
r = m.sqrt(x2_y2 + z**2) # r
elev = m.atan2(z, m.sqrt(x2_y2)) # Elevation
az = m.atan2(y, x) # Azimuth
return r, elev, az
def pol2cart(theta, rho):
'''
Transform polar coordinates to Cartesian
Parameters
----------
```txt
theta : float, angle value
rho : float, radius value
```
Returns
-------
```txt
X, Y : float -> tuple, projected coordinates
```
'''
return rho * m.cos(theta), rho * m.sin(theta)
def azim_proj(pos):
'''
Computes the Azimuthal Equidistant Projection of input point in
3D Cartesian Coordinates. Imagine a plane being placed against
(tangent to) a globe. If a light source inside the globe projects
the graticule onto the plane the result would be a planar, or
azimuthal, map projection.
Parameters
----------
```txt
pos : list or tuple, position in 3D Cartesian coordinates [x, y, z]
```
Returns
-------
```txt
X, Y : float -> tuple, projected coordinates using Azimuthal Equidistant Projection
```
'''
[r, elev, az] = cart2sph(pos[0], pos[1], pos[2])
return pol2cart(az, m.pi / 2 - elev)
def riemannian_dis(pos, center):
pass
def load_data(datafile, label=True):
'''
Loads the data from MAT file.
MAT file would be two kinds. `'*.mat'` which contains the feature
matrix in the shape of `[nTrials, nChannels, nSamples]` and
`'*_label.mat'` which contains the output labels as a vector.
Label numbers are assumed to start from 0.
Parameters
----------
```txt
datafile : str, load data or label from *.mat file (* in '*.mat'
and '*_label.mat' are the same, pls let datafile = '*.mat')
label : bool, if True: load label, else: load data
```
Returns
-------
```txt
data or label : ndarray
```
'''
if label:
datafile = datafile[:-4] + '_label.mat'
print('Loading data from %s' % (datafile))
dataMat = sio.loadmat(datafile, mat_dtype=True)
print('Data loading complete. Shape is %r' %
(dataMat['classlabel'].shape, ))
# Class labels should start from 0
return dataMat['classlabel'] - 1
else: # [nChannels, nSamples, nTrials]
print('Loading data from %s' % (datafile))
dataMat = sio.loadmat(datafile, mat_dtype=True)
dataMat['s'] = dataMat['s'].swapaxes(1, 2)
dataMat['s'] = dataMat['s'].swapaxes(0, 1)
print('Data loading complete. Shape is %r' % (dataMat['s'].shape, ))
return dataMat['s'] # [nTrials, nChannels, nSamples]
def filterbank(data, srate=250, start=4, stop=38, window=4, step=2):
'''
Process raw data with filter-bank.
Parameters
----------
```txt
data : ndarray, raw data, shapes as [nTrials, nChannels, nSamples]
srate : int, the sample rate of raw data, default is 250
start : int, frequency where the filter-bank begins, default is 4
stop : int, frequency where the filter-bank ends, default is 38
window : int, the bandwidth of one filter in the filter-bank, default is 4
step : int, the interval of each neighbouring filter in the filter-bank, default is 2
```
Returns
-------
```txt
FBdata : ndarray, data after filter-bank, shapes (nTrials, nChannels, nSamples, nColors)
```
'''
nTrials, nChannels, nSamples = data.shape
FBdata = []
for beg in range(start, stop - window + 1, step):
end = beg + window
b, a = signal.butter(4, [beg / srate * 2, end / srate * 2], 'bandpass')
FBdata.append(signal.filtfilt(b, a, data, axis=-1))
#now np.array(FBdata) shapes as[nColors, nTrials, nChannels, nSamples]
FBdata = np.swapaxes(np.array(FBdata), 0, 1)
FBdata = np.swapaxes(FBdata, 1, 2)
FBdata = np.swapaxes(FBdata, 2, 3)
print('Data filterbank complete. Shape is %r.' % (FBdata.shape, ))
return FBdata
def load_or_gen_filterbank_data(filepath,
beg=0.,
end=4.,
srate=250,
start=4,
stop=38,
window=4,
step=4):
'''
load or generate data with filter-bank.
Parameters
----------
```txt
filepath: str, path of raw data file, and data shape is [nTrials, nChannels, nSamples]
beg : float, second when imegery tasks begins
end : float, second when imegery tasks ends
srate : int, the sample rate of raw data, default is 250
start : int, frequency where the filter-bank begins, default is 4
stop : int, frequency where the filter-bank ends, default is 38
window : int, the bandwidth of one filter in the filter-bank, default is 4
step : int, the interval of each neighbouring filter in the filter-bank, default is 2
```
Returns
-------
```txt
FBdata : ndarray, data after filter-bank, shapes as [nTrials, nChannels, nSamples, nColors]
```
'''
if os.path.exists(filepath[:-4] + '_fb.mat'):
print('Loading data from %s' % (filepath[:-4] + '_fb.mat'))
data = sio.loadmat(filepath[:-4] + '_fb.mat')['fb']
print('Load filterbank data complete. Shape is %r.' % (data.shape, ))
else:
data = filterbank(load_data(filepath, label=False),
srate=srate,
start=start,
stop=stop,
window=window,
step=step)
data = data[:, :, round(beg * srate):round(end * srate), :]
print('Load filterbank data complete. Shape is %r.' % (data.shape, ))
sio.savemat(filepath[:-4] + '_fb.mat', {'fb': data})
print('Save filterbank data[\'fb\'] complete. To %s' %
(filepath[:-4] + '_fb.mat'))
return data
def load_locs(filepath=None):
'''
load data of electrodes' 3D location and name.
Parameters
----------
```txt
filepath: str, path of electrodes' 3D location data file, default is None
```
Returns
-------
```txt
locs : ndarray, data of electrodes' 3D location, shapes (nChannels, 3)
names : ndarray, name of electrodes, shapes (nChannels, )
```
'''
if filepath is None:
filepath = os.path.join('data', '22scan_locs.mat')
if not os.path.exists(filepath):
raise FileNotFoundError('File not found in {}'.format(filepath))
locs = sio.loadmat(filepath)['locs']
names = sio.loadmat(filepath)['name']
return locs, names
def interestingband(data, bands, axis=-1, srate=250, swapaxes=True):
'''
Filter raw signal to five interesting bands - theta, alpha, low beta, high beta,
gamma and high gamma. (depends on bands you passed)
theta: 2-8Hz
alpha: 8-12Hz
low beta: 12-20Hz
high beta: 20-30Hz
gamma: 30-60Hz
high gamma: 80-100Hz
Parameters
----------
```txt
data : ndarray, raw data, shapes (nTrials, nChannels, nSamples)
axis : int, which axis should be filtered, default is -1
srate : int, the sample rate of raw data, default is 250
swapaxes: bool, deciding the interestingband axis at the end or the begining, default is True
```
Returns
-------
```txt
IBdata : ndarray, data after filter-bank, shapes (nTrials, nChannels, nSamples, nColors)
```
'''
IBdata = []
for _band in bands:
i = _band.find('-')
band = list(map(int, [_band[:i], _band[i + 1:]]))
b, a = signal.butter(5, band, 'bandpass', fs=srate)
IBdata.append(signal.filtfilt(b, a, data, axis=axis))
# now np.array(IBdata) shapes as[nColors, nTrials, nChannels, nSamples]
IBdata = np.array(IBdata)
if swapaxes:
IBdata = np.swapaxes(IBdata, 0, 1)
IBdata = np.swapaxes(IBdata, 1, 2)
IBdata = np.swapaxes(IBdata, 2, 3)
print('Data filterbank complete. Shape is %r.' % (IBdata.shape, ))
return IBdata
def load_or_gen_interestingband_data(filepath, beg=0., end=4., srate=250):
'''
load or generate data with interesting-band filters.
Parameters
----------
```txt
filepath: str, path of raw data file, and data shapes (nTrials, nChannels, nSamples)
beg : float, second when imegery tasks begins
end : float, second when imegery tasks ends
srate : int, the sample rate of raw data, default is 250
```
Returns
-------
```txt
IBdata : ndarray, data after interesting-band filters, shapes (nTrials, nChannels, nSamples, nColors)
```
'''
if os.path.exists(filepath[:-4] + '_ib.mat'):
print('Loading data from %s' % (filepath[:-4] + '_ib.mat'))
data = sio.loadmat(filepath[:-4] + '_ib.mat')['ib']
print('Load interestingband data complete. Shape is %r.' %
(data.shape, ))
else:
data = interestingband(load_data(filepath, label=False), srate=srate)
data = data[:, :, round(beg * srate):round(end * srate), :]
print('Load interestingband data complete. Shape is %r.' %
(data.shape, ))
sio.savemat(filepath[:-4] + '_ib.mat', {'ib': data})
print('Save interestingband data[\'ib\'] complete. To %s' %
(filepath[:-4] + '_ib.mat'))
return data
def highpassfilter(data, Wn=4, srate=250):
b, a = signal.butter(4, Wn=Wn, btype='highpass', fs=srate)
new_data = []
for e in data:
new_data.append(signal.filtfilt(b, a, e))
return np.asarray(new_data)
def bandpassfilter(data, Wn=[.5, 100], srate=250):
b, a = signal.butter(4, Wn=Wn, btype='bandpass', fs=srate)
new_data = []
for e in data:
new_data.append(signal.filtfilt(b, a, e))
return np.asarray(new_data)
def detrend(data, axis=-1):
return signal.detrend(data, axis=axis)
def moving_average(a, n=3):
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
def unlinearDetrend(data, axis=-1):
x = np.arange(data.shape[axis])
a = np.polyfit(x, data, 5)
b = np.poly1d(a)
trend = b(x)
return data - trend
def normalization(data, axis=-1):
'''max-min -> y belongs to [0, 1]'''
_range = np.nanmax(data, axis=axis, keepdims=True) - np.nanmin(
data, axis=axis, keepdims=True)
return (data - np.nanmin(data, axis=axis, keepdims=True)) / _range
def standardization(data, axis=-1):
'''z-score -> mu=0, sigma=1'''
mu = np.nanmean(data, axis=axis, keepdims=True)
sigma = np.nanstd(data, axis=axis, keepdims=True)
return (data - mu) / sigma
def confusionMatrix(predict, groundTruth):
elements = set(groundTruth)
_len = len(elements) + 1
cm = np.zeros((_len, _len))
for (i, j) in zip(predict, groundTruth):
cm[int(i), int(j)] += 1
cm[-1, -1] = np.sum(cm)
for i in np.arange(_len - 1):
cm[i, -1] = np.sum(cm[i, :-1])
cm[-1, i] = np.sum(cm[:-1, i])
return cm
def computeKappa(predict, groundTruth, probpred=False):
'''
Compute kappa using prediction and ground truth.
***Deprecated***
We have module `tensorflow-addons` to compute kappa.
But this function will be retained.
'''
logging.warning('computeKappa: Deprecated: We have module '
'`tensorflow-addons`to compute kappa. But '
'this function will be retained.')
if probpred:
predict = np.argmax(predict, axis=1)
predict = np.squeeze(predict)
groundTruth = np.squeeze(groundTruth)
cm = confusionMatrix(predict, groundTruth)
p0 = np.mean(predict == groundTruth)
pe = np.sum(cm[-1, :-1] * cm[:-1, -1]) / cm[-1, -1]**2
return (p0 - pe) / (1 - pe)
def walk_files(path, emt: AnyStr):
file_list = []
for root, _, files in os.walk(path):
for file in files:
file: AnyStr
if file.split('.')[-1] == emt:
file_path = os.path.join(root, file)
file_list.append(file_path)
return file_list |
# -*- coding: utf-8 -*-
"""
Created on Saturday March 24 at 9:04 am 2018
@author: milesws
"""
import pandas as pd
import numpy as np
from os import listdir
import re
#Establish directory to files of false name matches comparing 1900 to 1910 censuses
files = listdir(r"R:\JoePriceResearch\record_linking\projects\deep_learning\census")
#Convert each file name in directory to string
files_string = " ".join(str(x) for x in files)
#Use regular expression to select 2 files from each state
files1 = re.findall('(?:[a-zA-Z])\w+T2.dta', files_string)
#Create a blank dataframe
df = pd.DataFrame()
#Loop over files selected to clean data to vectorize later
for x in files1:
directory = r'R:\JoePriceResearch\record_linking\projects\deep_learning\census\\' + x
#Read in each file
data = pd.read_stata(directory, columns=['gname1900', 'lname1900', 'gname1910', 'lname1910', 'ark1900', 'true'])
#Rename columns
data1 = data.rename(index=str, columns={"true": "match", "gname1900":"first_name", "lname1900":"last1", "gname1910":"first2", "lname1910":"last2", "ark1900":"fsid"})
fn = data1[data1.match == 0]
#Create columns for first, middle and full names
fn['full1'] = fn.first_name + " " +fn.last1
fn['full2'] = fn.first_name + " " +fn.last2
fn['mid1'] = fn.first_name.str.split('\s+').str[1]
fn['mid2'] = fn.first_name.str.split('\s+').str[1]
fn['first1'] = fn.first_name.str.split(' ', 1).str[0]
fn['first2'] = fn.first_name.str.split(' ', 1).str[0]
#Fill in missing values with a space for individals missing a name
fn = fn.replace(np.NaN, '', regex=True)
#Take a random sample of 10000 per file
fn1 = fn.sample(n=10000, replace=True)
#Append to a .csv
fn1.to_csv(r'R:\JoePriceResearch\LIFE-M\Miles Strother\Machine Learning\false_names.csv', mode='a', header=True)
|
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
# This file is based on an earlier version of pretix which was released under the Apache License 2.0. The full text of
# the Apache License 2.0 can be obtained at <http://www.apache.org/licenses/LICENSE-2.0>.
#
# This file may have since been changed and any changes are released under the terms of AGPLv3 as described above. A
# full history of changes and contributors is available at <https://github.com/pretix/pretix>.
#
# This file contains Apache-licensed contributions copyrighted by: Jakob Schnell, jasonwaiting@live.hk, pajowu
#
# Unless required by applicable law or agreed to in writing, software distributed under the Apache License 2.0 is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under the License.
import dateutil.parser
from django.contrib import messages
from django.db import transaction
from django.db.models import Exists, Max, OuterRef, Prefetch, Subquery
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.timezone import is_aware, make_aware, now
from django.utils.translation import gettext_lazy as _
from django.views.generic import DeleteView, ListView
from pytz import UTC
from pretix.base.channels import get_all_sales_channels
from pretix.base.models import Checkin, Order, OrderPosition
from pretix.base.models.checkin import CheckinList
from pretix.base.signals import checkin_created
from pretix.control.forms.checkin import CheckinListForm
from pretix.control.forms.filter import CheckInFilterForm, CheckinFilterForm
from pretix.control.permissions import EventPermissionRequiredMixin
from pretix.control.views import CreateView, PaginationMixin, UpdateView
from pretix.helpers.models import modelcopy
class CheckInListShow(EventPermissionRequiredMixin, PaginationMixin, ListView):
model = Checkin
context_object_name = 'entries'
template_name = 'pretixcontrol/checkin/index.html'
permission = 'can_view_orders'
def get_queryset(self, filter=True):
cqs = Checkin.objects.filter(
position_id=OuterRef('pk'),
list_id=self.list.pk,
type=Checkin.TYPE_ENTRY
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
cqs_exit = Checkin.objects.filter(
position_id=OuterRef('pk'),
list_id=self.list.pk,
type=Checkin.TYPE_EXIT
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
qs = OrderPosition.objects.filter(
order__event=self.request.event,
order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING] if self.list.include_pending else [Order.STATUS_PAID],
).annotate(
last_entry=Subquery(cqs),
last_exit=Subquery(cqs_exit),
auto_checked_in=Exists(
Checkin.objects.filter(position_id=OuterRef('pk'), list_id=self.list.pk, auto_checked_in=True)
)
).select_related(
'item', 'variation', 'order', 'addon_to'
).prefetch_related(
Prefetch('subevent', queryset=self.request.event.subevents.all())
)
if self.list.subevent:
qs = qs.filter(
subevent=self.list.subevent
)
if not self.list.all_products:
qs = qs.filter(item__in=self.list.limit_products.values_list('id', flat=True))
if filter and self.filter_form.is_valid():
qs = self.filter_form.filter_qs(qs)
return qs
@cached_property
def filter_form(self):
return CheckInFilterForm(
data=self.request.GET,
event=self.request.event,
list=self.list
)
def dispatch(self, request, *args, **kwargs):
self.list = get_object_or_404(self.request.event.checkin_lists.all(), pk=kwargs.get("list"))
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['checkinlist'] = self.list
if self.request.event.has_subevents:
ctx['seats'] = (
self.list.subevent.seating_plan_id if self.list.subevent
else self.request.event.subevents.filter(seating_plan__isnull=False).exists()
)
else:
ctx['seats'] = self.request.event.seating_plan_id
ctx['filter_form'] = self.filter_form
for e in ctx['entries']:
if e.last_entry:
if isinstance(e.last_entry, str):
# Apparently only happens on SQLite
e.last_entry_aware = make_aware(dateutil.parser.parse(e.last_entry), UTC)
elif not is_aware(e.last_entry):
# Apparently only happens on MySQL
e.last_entry_aware = make_aware(e.last_entry, UTC)
else:
# This would be correct, so guess on which database it works… Yes, it's PostgreSQL.
e.last_entry_aware = e.last_entry
if e.last_exit:
if isinstance(e.last_exit, str):
# Apparently only happens on SQLite
e.last_exit_aware = make_aware(dateutil.parser.parse(e.last_exit), UTC)
elif not is_aware(e.last_exit):
# Apparently only happens on MySQL
e.last_exit_aware = make_aware(e.last_exit, UTC)
else:
# This would be correct, so guess on which database it works… Yes, it's PostgreSQL.
e.last_exit_aware = e.last_exit
return ctx
def post(self, request, *args, **kwargs):
if "can_change_orders" not in request.eventpermset:
messages.error(request, _('You do not have permission to perform this action.'))
return redirect(reverse('control:event.orders.checkins', kwargs={
'event': self.request.event.slug,
'organizer': self.request.event.organizer.slug
}) + '?' + request.GET.urlencode())
positions = self.get_queryset(filter=False).filter(
pk__in=request.POST.getlist('checkin')
)
if request.POST.get('revert') == 'true':
for op in positions:
if op.order.status == Order.STATUS_PAID or (self.list.include_pending and op.order.status == Order.STATUS_PENDING):
Checkin.objects.filter(position=op, list=self.list).delete()
op.order.log_action('pretix.event.checkin.reverted', data={
'position': op.id,
'positionid': op.positionid,
'list': self.list.pk,
'web': True
}, user=request.user)
op.order.touch()
messages.success(request, _('The selected check-ins have been reverted.'))
else:
for op in positions:
if op.order.status == Order.STATUS_PAID or (self.list.include_pending and op.order.status == Order.STATUS_PENDING):
t = Checkin.TYPE_EXIT if request.POST.get('checkout') == 'true' else Checkin.TYPE_ENTRY
lci = op.checkins.filter(list=self.list).first()
if self.list.allow_multiple_entries or t != Checkin.TYPE_ENTRY or (lci and lci.type != Checkin.TYPE_ENTRY):
ci = Checkin.objects.create(position=op, list=self.list, datetime=now(), type=t)
created = True
else:
try:
ci, created = Checkin.objects.get_or_create(position=op, list=self.list, defaults={
'datetime': now(),
})
except Checkin.MultipleObjectsReturned:
ci, created = Checkin.objects.filter(position=op, list=self.list).first(), False
op.order.log_action('pretix.event.checkin', data={
'position': op.id,
'positionid': op.positionid,
'first': created,
'forced': False,
'datetime': now(),
'type': t,
'list': self.list.pk,
'web': True
}, user=request.user)
checkin_created.send(op.order.event, checkin=ci)
messages.success(request, _('The selected tickets have been marked as checked in.'))
return redirect(reverse('control:event.orders.checkinlists.show', kwargs={
'event': self.request.event.slug,
'organizer': self.request.event.organizer.slug,
'list': self.list.pk
}) + '?' + request.GET.urlencode())
class CheckinListList(EventPermissionRequiredMixin, PaginationMixin, ListView):
model = CheckinList
context_object_name = 'checkinlists'
permission = 'can_view_orders'
template_name = 'pretixcontrol/checkin/lists.html'
def get_queryset(self):
qs = self.request.event.checkin_lists.select_related('subevent').prefetch_related("limit_products")
if self.request.GET.get("subevent", "") != "":
s = self.request.GET.get("subevent", "")
qs = qs.filter(subevent_id=s)
return qs
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
clists = list(ctx['checkinlists'])
sales_channels = get_all_sales_channels()
for cl in clists:
if cl.subevent:
cl.subevent.event = self.request.event # re-use same event object to make sure settings are cached
cl.auto_checkin_sales_channels = [sales_channels[channel] for channel in cl.auto_checkin_sales_channels]
ctx['checkinlists'] = clists
ctx['can_change_organizer_settings'] = self.request.user.has_organizer_permission(
self.request.organizer,
'can_change_organizer_settings',
self.request
)
return ctx
class CheckinListCreate(EventPermissionRequiredMixin, CreateView):
model = CheckinList
form_class = CheckinListForm
template_name = 'pretixcontrol/checkin/list_edit.html'
permission = 'can_change_event_settings'
context_object_name = 'checkinlist'
def dispatch(self, request, *args, **kwargs):
r = super().dispatch(request, *args, **kwargs)
r['Content-Security-Policy'] = 'script-src \'unsafe-eval\''
return r
@cached_property
def copy_from(self):
if self.request.GET.get("copy_from") and not getattr(self, 'object', None):
try:
return self.request.event.checkin_lists.get(pk=self.request.GET.get("copy_from"))
except CheckinList.DoesNotExist:
pass
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
if self.copy_from:
i = modelcopy(self.copy_from)
i.pk = None
kwargs['instance'] = i
else:
kwargs['instance'] = CheckinList(event=self.request.event)
return kwargs
def get_success_url(self) -> str:
return reverse('control:event.orders.checkinlists', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
})
@transaction.atomic
def form_valid(self, form):
form.instance.event = self.request.event
messages.success(self.request, _('The new check-in list has been created.'))
ret = super().form_valid(form)
form.instance.log_action('pretix.event.checkinlist.added', user=self.request.user,
data=dict(form.cleaned_data))
return ret
def form_invalid(self, form):
messages.error(self.request, _('We could not save your changes. See below for details.'))
return super().form_invalid(form)
class CheckinListUpdate(EventPermissionRequiredMixin, UpdateView):
model = CheckinList
form_class = CheckinListForm
template_name = 'pretixcontrol/checkin/list_edit.html'
permission = 'can_change_event_settings'
context_object_name = 'checkinlist'
def dispatch(self, request, *args, **kwargs):
r = super().dispatch(request, *args, **kwargs)
r['Content-Security-Policy'] = 'script-src \'unsafe-eval\''
return r
def get_object(self, queryset=None) -> CheckinList:
try:
return self.request.event.checkin_lists.get(
id=self.kwargs['list']
)
except CheckinList.DoesNotExist:
raise Http404(_("The requested list does not exist."))
@transaction.atomic
def form_valid(self, form):
messages.success(self.request, _('Your changes have been saved.'))
if form.has_changed():
self.object.log_action(
'pretix.event.checkinlist.changed', user=self.request.user, data={
k: form.cleaned_data.get(k) for k in form.changed_data
}
)
return super().form_valid(form)
def get_success_url(self) -> str:
return reverse('control:event.orders.checkinlists.edit', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
'list': self.object.pk
})
def form_invalid(self, form):
messages.error(self.request, _('We could not save your changes. See below for details.'))
return super().form_invalid(form)
class CheckinListDelete(EventPermissionRequiredMixin, DeleteView):
model = CheckinList
template_name = 'pretixcontrol/checkin/list_delete.html'
permission = 'can_change_event_settings'
context_object_name = 'checkinlist'
def get_object(self, queryset=None) -> CheckinList:
try:
return self.request.event.checkin_lists.get(
id=self.kwargs['list']
)
except CheckinList.DoesNotExist:
raise Http404(_("The requested list does not exist."))
@transaction.atomic
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
success_url = self.get_success_url()
self.object.checkins.all().delete()
self.object.log_action(action='pretix.event.checkinlists.deleted', user=request.user)
self.object.delete()
messages.success(self.request, _('The selected list has been deleted.'))
return HttpResponseRedirect(success_url)
def get_success_url(self) -> str:
return reverse('control:event.orders.checkinlists', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
})
class CheckinListView(EventPermissionRequiredMixin, PaginationMixin, ListView):
model = Checkin
context_object_name = 'checkins'
permission = 'can_view_orders'
template_name = 'pretixcontrol/checkin/checkins.html'
def get_queryset(self):
qs = Checkin.all.filter(
list__event=self.request.event,
).select_related(
'position', 'position', 'position__item', 'position__variation', 'position__subevent'
).prefetch_related(
'list', 'gate'
)
if self.filter_form.is_valid():
qs = self.filter_form.filter_qs(qs)
return qs
@cached_property
def filter_form(self):
return CheckinFilterForm(data=self.request.GET, event=self.request.event)
def get_context_data(self, **kwargs):
ctx = super().get_context_data()
ctx['filter_form'] = self.filter_form
return ctx
|
import os
from qtpy import QtGui, QtWidgets, QtCore
import typhos.utils
class Splash(QtWidgets.QDialog):
def __init__(self, *args, **kwargs):
super(Splash, self).__init__(*args, **kwargs)
self.setWindowFlags(QtCore.Qt.Window | QtCore.Qt.FramelessWindowHint)
self._base_path = os.path.dirname(os.path.abspath(__file__))
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
logo_pixmap = QtGui.QPixmap(os.path.join(self._base_path, 'logo.png'))
logo_pixmap = logo_pixmap.scaled(400, 100, 1)
logo = QtWidgets.QLabel(self)
logo.setPixmap(logo_pixmap)
layout.addWidget(logo)
self.status_display = QtWidgets.QLabel()
tout = typhos.utils.TyphosLoading.LOADING_TIMEOUT_MS
# No Timeout!
typhos.utils.TyphosLoading.LOADING_TIMEOUT_MS = -1
loading = typhos.utils.TyphosLoading(self)
typhos.utils.TyphosLoading.LOADING_TIMEOUT_MS = tout
status_layout = QtWidgets.QHBoxLayout()
status_layout.addWidget(self.status_display)
status_layout.addWidget(loading)
layout.addLayout(status_layout)
def update_status(self, msg):
self.status_display.setText(f"Loading: {msg}")
QtWidgets.QApplication.instance().processEvents()
|
graph = {
'1': ['2', '3', '4'],
'2': ['5', '6'],
'5': ['9', '10'],
'4': ['7', '8'],
'7': ['11', '12']
}
def bfs(graph, start):
queue = []
queue.append([start])
while queue:
path = queue.pop(0)
node = path[-1]
for adjacent in graph.get(node, []):
new_path = list(path)
new_path.append(adjacent)
queue.append(new_path)
print new_path
print bfs(graph, '1')
|
class Node:
def __init__(self, data=None, next=None):
self.data = data
self.next = next
class LinkedList:
def __init__(self):
# The first node is called the head
self.head = None
# insert at beginning
def insert_at_beginning(self, data):
node = Node(data, self.head)
self.head = node
# insert at end
def insert_at_end(self, data):
if self.head is None:
self.head = Node(data, None)
return
itr = self.head
while itr.next:
itr = itr.next
itr.next = Node(data, None)
# insert list
def insert_values(self, data_list):
self.head = None
for data in data_list:
self.insert_at_end(data)
# get length of the linked list
def get_length(self):
count = 0
itr = self.head
while itr:
count += 1
itr = itr.next
return count
# remove at particular position
def remove_at(self, index):
if index < 0 or index >= self.get_length():
raise Exception("Invalid index")
if index == 0:
self.head = self.head.next
count = 0
itr = self.head
while itr:
if count == index - 1:
itr.next = itr.next.next
break
itr = itr.next
count += 1
# insert at particular index
def insert_at(self, index, data):
if index < 0 or index >= self.get_length():
raise Exception("Invalid index")
if index == 0:
self.insert_at_beginning(data)
return
count = 0
itr = self.head
while itr:
if count == index - 1:
node = Node(data, itr.next)
itr.next = node
break
itr = itr.next
count += 1
def insert_after_value(self, data_after, data_to_insert):
if self.head is None:
return
if self.head.data == data_after:
self.head.next = Node(data_to_insert, self.head.next)
return
itr = self.head
while itr:
if itr.data == data_after:
itr.next = Node(data_to_insert, itr.next)
break
itr = itr.next
def remove_by_value(self, data):
if self.head is None:
return
if self.head.data == data:
self.head = self.head.next
return
itr = self.head
while itr.next:
if itr.next.data == data:
itr.next = itr.next.next
break
itr = itr.next
# reverse the linked list
def reverse(self):
prev = None
current = self.head
while current is not None:
next = current.next
current.next = prev
prev = current
current = next
self.head = prev
# utility function
def print(self):
if self.head is None:
print('Linked list is empty.')
return
itr = self.head
llstr = ''
while itr:
# llstr += str(itr.data) + '<->'
print(itr.data, end=' ')
itr = itr.next
# print(llstr)
if __name__ == "__main__":
ll = LinkedList()
ll.insert_at_beginning(10)
ll.insert_at_beginning(20)
# ll.insert_values(["banana", "mango", "grapes", "orange"])
# ll.print()
# ll.insert_after_value("mango", "apple")
# ll.print()
# ll.remove_by_value("orange")
# ll.print()
# ll.remove_by_value("figs")
# ll.print()
# ll.remove_by_value("banana")
# ll.remove_by_value("mango")
# ll.remove_by_value("apple")
# ll.remove_by_value("grapes")
# ll.reverse()
ll.print() |
import scrapy
from tutorial.items import FileDemoItem
class FiledemoSpider(scrapy.Spider):
name = 'FileDemo'
allowed_domains = ['quotes.toscrape.com']
start_urls = [
'http://quotes.toscrape.com/',
]
def parse(self, response):
for quote in response.xpath('//div[@class="quote"]'):
item = FileDemoItem()
item['text'] = quote.xpath('./span[@class="text"]/text()').extract_first()
item['author'] = quote.xpath('.//small[@class="author"]/text()').extract_first()
item['tags'] = quote.xpath('.//div[@class="tags"]/a[@class="tag"]/text()').extract()
yield item
next_page_url = response.xpath('//li[@class="next"]/a/@href').extract_first()
if next_page_url is not None:
yield scrapy.Request(response.urljoin(next_page_url))
def parse_css(self, response):
for quote in response.css("div.quote"):
yield {
'text': quote.css("span.text::text").extract_first(),
'author': quote.css("small.author::text").extract_first(),
'tags': quote.css("div.tags > a.tag::text").extract()
}
next_page_url = response.css("li.next > a::attr(href)").extract_first()
if next_page_url is not None:
yield scrapy.Request(response.urljoin(next_page_url))
|
from .homspace import HeckeModuleHomspace, is_HeckeModuleHomspace
from .module import HeckeModule_free_module, HeckeModule_generic, is_HeckeModule
from .hecke_operator import HeckeOperator, is_HeckeOperator
from .degenmap import DegeneracyMap
from .algebra import HeckeAlgebra, is_HeckeAlgebra
from .morphism import (HeckeModuleMorphism, HeckeModuleMorphism_matrix,
is_HeckeModuleMorphism,
is_HeckeModuleMorphism_matrix)
from .element import HeckeModuleElement, is_HeckeModuleElement
from .submodule import HeckeSubmodule, is_HeckeSubmodule
from .ambient_module import AmbientHeckeModule, is_AmbientHeckeModule
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.test import TestCase
from system.models import Configuration
from physical.errors import NoDiskOfferingError, NoDiskOfferingLesserError, \
NoDiskOfferingGreaterError, DiskOfferingMaxAutoResize
class PhysicalErrorsTestCase(TestCase):
def setUp(self):
self.auto_resize_max_size_in_gb = Configuration(
name='auto_resize_max_size_in_gb', value=100
)
self.auto_resize_max_size_in_gb.save()
def tearDown(self):
self.auto_resize_max_size_in_gb.delete()
def test_no_disk_offering(self):
size = 123
typo = 'testing'
message = 'No disk offering {} than {}kb'.format(typo, size)
no_disk_offering = NoDiskOfferingError(typo=typo, size=size)
self.assertEqual(no_disk_offering.message, message)
def test_no_disk_offering_lesser(self):
size = 456
message = 'No disk offering lesser than {}kb'.format(size)
no_disk_offering = NoDiskOfferingLesserError(size=size)
self.assertEqual(no_disk_offering.message, message)
def test_no_disk_offering_greater(self):
size = 789
message = 'No disk offering greater than {}kb'.format(size)
no_disk_offering = NoDiskOfferingGreaterError(size=size)
self.assertEqual(no_disk_offering.message, message)
def test_disk_auto_resize_max_value(self):
message = 'Disk auto resize can not be greater than {}GB'.format(
self.auto_resize_max_size_in_gb.value
)
no_disk_offering = DiskOfferingMaxAutoResize()
self.assertEqual(no_disk_offering.message, message)
|
import sys
import os
from time import sleep
from core.system import *
from modules.logo import *
class lt(object):
def notinl(self):
Mylogo()
print("\n\n\033[1;31m Error \033[01;33m\'localtunnel\' \033[01;31mis not installed in "+system+".\033[00m")
sleep(2)
def locals(self):
while True:
os.system("python modules/.srvr.aex")
Mylogo()
dn = input("\n\n\033[1;33m Enter your subdomain name (\033[01;32mex Myweb\033[01;33m) :- \033[1;36m")
port = input("\033[1;33m Enter your port (\033[01;32mex 8080\033[01;33m) :- \033[1;36m")
os.system("python modules/.srvr.aex")
Mylogo()
print("\n\033[01;33mStarting Server ......\033[00m\n")
if os.path.exists(bpath+"lt"):
os.system("lt --port "+port+" --subdomain "+dn)
print("\n\033[01;31m unfortunately server stopped\n\033[00m")
sys.exit()
elif os.path.exists("/usr/local/bin/lt"):
os.system("lt --port "+port+" --subdomain "+dn)
print("\033[01;31m unfortunately server stopped\n\033[00m")
sys.exit()
elif os.path.exists("/usr/sbin/lt"):
os.system("lt --port "+port+" --subdomain "+dn)
print("\n\033[01;31m unfortunately server stopped\n\033[00m")
sys.exit()
else:
ins().localin()
if os.path.exists("/data/data/com.termux/files/usr/bin/lt") or os.path.exists("/usr/bin/lt") or os.path.exists("/usr/local/bin/lt") or os.path.exists("/usr/sbin/lt"):
self.locals()
else:
self.notinl()
break
class ins(object):
def localin(self):
Mylogo()
print("\n\033[01;32mInstalling Localtunnel .......\033[00m\n")
if system=="termux":
lt().notinl()
elif system=="ubuntu":
os.system(pac+" update")
os.system(pac+" upgrade -y")
os.system(pac+" install nodejs -y")
os.system(pac+" install npm -y")
os.system("sudo npm install -g localtunnel")
if os.path.exists("/data/data/com.termux/files/usr/bin/lt") or os.path.exists("/usr/bin/lt") or os.path.exists("/usr/local/bin/lt") or os.path.exists("/usr/sbin/lt"):
lt().locals()
else:
lt().notinl()
else:
os.system(pac+" install sudo -y")
os.system(pac+" update")
os.system(pac+" upgrade -y")
os.system(pac+" install curl python-software-properties -y")
os.system("curl -sL https://deb.nodesource.com/setup_10.x | sudo bash -")
os.system(pac+" install nodejs -y")
os.system(pac+" install npm -y")
os.system("npm install -g localtunnel")
if os.path.exists("/data/data/com.termux/files/usr/bin/lt") or os.path.exists("/usr/bin/lt") or os.path.exists("/usr/local/bin/lt") or os.path.exists("/usr/sbin/lt"):
lt().locals()
else:
lt().notinl()
def chklt():
if os.path.exists("/data/data/com.termux/files/usr/bin/lt") or os.path.exists("/usr/bin/lt") or os.path.exists("/usr/local/bin/lt") or os.path.exists("/usr/sbin/lt"):
lt().locals()
else:
Mylogo()
print("\n\n\033[01;31m [\033[01;33m+\033[01;31m] \033[01;36mLocaltunnel \033[01;31mis not installed in your "+system+".")
opt=input("\n\033[01;33m Do you want to install Localtunnel [\033[01;32mY/n\033[01;33m] >>\033[01;36m ")
if opt=="y" or opt=="Y":
Mylogo()
print("\n\033[01;33minstalling Localtunnel ......\033[00m\n")
sleep(1)
ins().localin()
def local():
chklt() |
import os
import traceback
import platform
import time
#import sys
#sys.path.append("../..") # tell python to add top level directories so we can import from here as well.
from .build_tools import *
'''
Run cmake script is in charge of calling the cmake command for
every versions of maya. Basically it calls the following commands:
# mkdir build/folders
# path/to/visual/cvars64.bat // if needed
# cmake -DMAYA_VERSION=2014 (and a lot of other args ...)
# jom -j8 // compiles the code
# cmake -DMAYA_VERSION=2015 (and a lot of other args ...)
# jom -j8 // compiles the code
# etc.
'''
# -----------------------------------------------------------------------------
def append_define_args(command_list, list_defines):
list = command_list
for (symbol_name, value) in list_defines:
list += ["-D", symbol_name + "=" + value]
return list
# -----------------------------------------------------------------------------
# @return the cmake command corresponding to cmake_args.
def get_cmake_command_list(cmake_args, list_defines):
list = ["cmake",
"-D", "CMAKE_BUILD_TYPE=" + cmake_args['build_type'],
"-D", "MAYA_VERSION=" + cmake_args['maya_version'],
"-G", cmake_args['generator']
,"-D", "CMAKE_BUILD_ARCH_X64=" + str(int(cmake_args['arch_type'] == "x64"))
]
if( cmake_args['vcs_path'] != ""):
#if False:
list += [#"-D", "CMAKE_RC_COMPILER=" + cmake_args['vcs_path'],
"-D", "CMAKE_C_COMPILER=cl.exe",
"-D", "CMAKE_CXX_COMPILER=cl.exe"]
if False:
list += ["-D", "CMAKE_RC_COMPILER=C:/Qt/Tools/mingw730_64/bin",
"-D", "CMAKE_C_COMPILER=gcc.exe",
"-D", "CMAKE_CXX_COMPILER=g++.exe"]
if( False ):
list += ["-D", "CMAKE_C_COMPILER:PATH=C:/Program Files/LLVM/bin/clang.exe",
"-D", "CMAKE_CXX_COMPILER:PATH=C:/Program Files/LLVM/bin/clang.exe",
"-D", "CMAKE_C_COMPILER_ID=Clang",
"-D", "CMAKE_CXX_COMPILER_ID=Clang",
"-D", "CMAKE_SYSTEM_NAME=Generic"]
#if( cmake_args['path_iwyu'] != "" ):
# list += ["-D", "CMAKE_CXX_INCLUDE_WHAT_YOU_USE=" + cmake_args['path_iwyu'] ]
list = append_define_args(list, list_defines)
list += [cmake_args['current_dir']]
return list
# -----------------------------------------------------------------------------
# raw bash version of the command
# def get_cmake_command(cmake_args):
# return ("cmake " +
# "-DCMAKE_BUILD_TYPE=" + cmake_args['build_type'] + " " +
# "-DMAYA_VERSION=" + cmake_args['maya_version'] + " " +
# "-G^\"" + cmake_args['generator'] + "^\"" + " " +
# "-DCMAKE_BUILD_ARCH_X64=" + str(int(cmake_args['arch_type'] == "x64")) + " " +
# "-DCMAKE_RC_COMPILER=" + cmake_args['vcs_path'] + " "
# + cmake_args['current_dir'])
# -----------------------------------------------------------------------------
'''
arch_string : "x86" or "x64"
maya_version_list : ["All", "2014", "2015", "2016", "2016.5", "2017", "2018"]
build_type : ["Debug", "Release", "Both"]
use_separate_folder_build_folder_maya
'''
def launch(list_defines, arch_string, maya_version_list, build_type, use_separate_folder_build_folder_maya):
start_time = time.time()
current_dir = os.getcwd()
#NOTE: I think cmake will choose the correct Qt version according to the
#generator if present in the Path, you can set several Qt installs in PATH
# however qmake.exe -v will be the first occurence in your PATH
#if not call_command(["qmake.exe", "-v"]):
#print("WARNING: can't seem to find Qt, check it is defined in your PATH")
# NOTE: with same make you can force the qt install directory with:
# "-D", "CMAKE_PREFIX_PATH=C:/Qt/5.12.3/msvc2017_64/"
cmake_args = {}
cmake_args['build_type'] = "Release"
cmake_args['maya_version'] = "UNDEF"
#cmake_args['generator'] = "NMake Makefiles JOM"
cmake_args['generator'] = "Ninja"
#cmake_args['generator'] = "MinGW Makefiles"
cmake_args['arch_type'] = arch_string
cmake_args['vcs_path'] = ""
cmake_args['current_dir'] = current_dir
#cmake_args['path_iwyu'] = path_iwyu
build_command = ["jom"] # ["ninja", "-j8"] # possible values "ninja", "jom", "make" etc.
if( cmake_args['generator'] == "Ninja"):
build_command = ["ninja"]
elif( cmake_args['generator'] == "NMake Makefiles JOM"):
build_command = ["jom"]
#
# Setup visual studio env var if needed
#
vcs_vars_command = ""
if platform.system() == "Windows" and cmake_args['generator'] != "MinGW Makefiles":
vcs_path = os.environ.get('VISUAL_STUDIO_PATH', 'None')
if vcs_path == "None":
print("ERROR CAN'T FIND PATH TO VISUAL STUDIO")
print("please define the environment variable:")
print("VISUAL_STUDIO_PATH=path_to_visual_compiler_binaries")
print("the path usually looks like:")
print('C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\bin')
sys.exit()
print("Visual Studio Path: ")
print(vcs_path)
vcs_vars_command = find_bat_script(cmake_args['arch_type'], vcs_path)
cmake_args['vcs_path'] = vcs_path.replace('\\', '/')
post_command = ["call"] + vcs_vars_command + [ "&"] if (len(vcs_vars_command) > 0) else []
for maya_version in maya_version_list:
os.chdir(current_dir)
print("=========================================================================")
print("Prep for MAYA"+maya_version)
print("=========================================================================")
cmake_args['maya_version'] = maya_version
if use_separate_folder_build_folder_maya:
#build_dir_debug = "build/maya"+maya_version+"/build/release_with_deb_info"
build_dir_debug = "build/maya"+maya_version+"/debug"
build_dir_release = "build/maya"+maya_version+"/release"
else:
#build_dir_debug = "build/release_with_deb_info"
build_dir_debug = "build/debug"
build_dir_release = "build/release"
print("\n\n== CREATE BUILD DIRECTORIES ==")
make_dir(build_dir_debug)
make_dir(build_dir_release)
if build_type == "Release" or build_type == "Both":
print("\n\n=== LAUNCH CMAKE RELEASE ===\n")
os.chdir(current_dir)
os.chdir(build_dir_release)
cmake_args['build_type'] = "Release"
cmake_cmd_list = get_cmake_command_list(cmake_args, list_defines)
commands = post_command + cmake_cmd_list + ["&"] + build_command
call_command(commands)
if build_type == "Debug" or build_type == "Both":
print("\n\n=== LAUNCH CMAKE DEBUG ===\n")
os.chdir(current_dir)
os.chdir(build_dir_debug)
cmake_args['build_type'] = "Debug" #"RelWithDebInfo" #"Debug"
cmake_cmd_list = get_cmake_command_list(cmake_args, list_defines)
commands = post_command + cmake_cmd_list + ["&"] + build_command
call_command(commands)
print("Total build time: " + str(time.time() - start_time) + " sec\n")
print("\n========== FINISHED ==========\n")
# -----------------------------------------------------------------------------
def main(list_defines=[]):
#
# Setup architecture
#
#archs = ["x86", "x64"]
#idx = ask_choice("Choose architecture", archs)
#arch_string = str(archs[idx])
#
# Setup Maya versions
#
maya_version_list = ["All", "2017", "2018", "2019", "2020", "2021"]
idx = ask_choice("Choose Maya version", maya_version_list)
if idx == 0: # All versions
maya_version_list = maya_version_list[1:]
else: # just one
maya_version_list = [maya_version_list[idx]]
builds = ["Debug", "Release", "Both"]
build_type = builds[ask_choice("Build for", builds)]
launch([("CMAKE_BUILD_ARCH_X64","1" )]+list_defines, "x64", maya_version_list, build_type, True)
# -----------------------------------------------------------------------------
if __name__ == "__main__":
try:
main()
except Exception as err:
print("Exception raised")
print(err)
print("Trace: ")
print(traceback.print_exc())
finally:
wait = input("Press ENTER to continue.")
|
from railroad import NonTerminal, Terminal, Choice, OneOrMore, ZeroOrMore, Diagram, Optional, Sequence, Group, MultipleChoice, Comment, Start, DEFAULT_STYLE
tfm_diagram = Group(Choice(0, "Normalizer", "MinMaxScaler", "RobustScaler", "StandardScaler", "PCA"), "tfm")
est_diagram = Group(Choice(0, "LogisticRegression", "DecisionTreeClassifier", "RandomForestClassifier", "GradientBoostingClassifier", "ExtraTreesClassifier","GaussianNB", "KNeighbors", "QDA"),"est")
sklearn_diagram = Diagram("sklearn_subset", Sequence(
Group(
Choice(0,
tfm_diagram,
Sequence(tfm_diagram, NonTerminal("DAG")),
Sequence(MultipleChoice(0, "all", tfm_diagram, NonTerminal("DAG")), "Concat" ),
Sequence(MultipleChoice(0, "all", est_diagram, NonTerminal("DAG")), "Concat" ),
),
"DAG"
),
est_diagram
)) |
#!usr/bin/python3
import subprocess
from fabric import Connection, ThreadingGroup, SerialGroup
import getpass
import json
import time
def get_node_list(node_config):
return node_config["workers"] + [node_config["master"]]
def get_node_group(nodes):
return ThreadingGroup(*nodes, user="root", forward_agent=True)
def node_group_put(node_group, local_path, remote_path):
for conn in node_group:
conn.put(local_path, remote=remote_path)
### ALL NODES CODE ###
def prepare_kubeadm(config):
# Get all the nodes
nodes = get_node_list(config["nodes"])
node_group = get_node_group(nodes)
node_group.run("echo this works")
# Configure IP Tables
node_group_put(node_group, "kube_files/nodes/k8s.conf", "/etc/sysctl.d/k8s.conf")
node_group.run("sysctl --system")
# Install Required Everything (APT, Docker, Kube)
install_packages(config["base_packages"], node_group)
install_docker(config["docker_packages"], node_group)
install_kubeadm(config["kube_packages"], node_group)
def install_packages(package_list, node_group, mark=False):
node_group.run("apt-get update")
node_group.run("apt-get install -y {}".format(" ".join(package_list)))
if mark:
node_group.run("apt-mark hold {}".format(" ".join(package_list)))
def install_docker(docker_packages, node_group):
node_group.run("curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -")
node_group.run('sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"')
install_packages(docker_packages, node_group)
node_group_put(node_group, "kube_files/nodes/daemon.json", "/etc/docker/daemon.json")
node_group.run("mkdir -p /etc/systemd/system/docker.service.d")
node_group.run("sudo systemctl daemon-reload")
node_group.run("sudo systemctl restart docker")
node_group.run("sudo systemctl enable docker")
def install_kubeadm(kube_packages, node_group):
node_group.run("curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -")
node_group_put(node_group, "kube_files/nodes/kubernetes.list", "/etc/apt/sources.list.d/kubernetes.list")
install_packages(kube_packages, node_group)
node_group.run("apt-mark hold {}".format(" ".join(kube_packages)))
def init_kubeadm(config):
# Get Connections
init_conf = config["init"]
master_conn = Connection(config["nodes"]["master"], user="root")
worker_group = ThreadingGroup(*config["nodes"]["workers"], user="root")
# Run Init and Join Nodes
master_conn.run("kube init --api-server-advertise={} --pod-network-cidr={}".format(init_conf["api_server"], init_conf["pod_network"]))
master_conn.run("mkdir -p $HOME/.kube")
master_conn.run("cp -i /etc/kubernetes/admin.conf $HOME/.kube/config")
master_conn.run("chown $(id -u):$(id -g) $HOME/.kube/config")
join_cmd = master_conn.run("kubeadm token create --print-join-command", hide=True).stdout
worker_group.run(join_cmd)
print("Waiting for Nodes to Join")
time.sleep(30)
def install_plugins(config):
plugin_conf = config["plugins"]
master_conn = Connection(config["nodes"]["master"], user="root")
install_calico(master_conn, plugin_conf['calico'])
install_helm(master_conn, plugin_conf['helm'])
install_metallb(master_conn, plugin_conf['metallb'])
install_nfs_client_provisioner(master_conn, plugin_conf['nfs-client-provisioner'])
def install_calico(master_conn, calico_conf):
master_conn.run("kubectl create -f https://docs.projectcalico.org/manifests/tigera-operator.yaml")
master_conn.run("kubectl create -f https://docs.projectcalico.org/manifests/custom-resources.yaml")
master_conn.run("kubectl apply -f https://docs.projectcalico.org/manifests/calicoctl.yaml")
master_conn.run('alias calicoctl="kubectl exec -i -n kube-system calicoctl -- /calicoctl"')
print("Waiting for Calico to Initialize")
time.sleep(30)
def install_helm(master_conn, helm_conf):
if helm_conf["version"] == "2":
master_conn.run("curl https://raw.githubusercontent.com/kubernetes/helm/master/scripts/get | bash")
elif helm_conf["version"] == "3":
master_conn.run('curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash')
else:
print("No Helm Version Specified - Exiting")
exit()
master_conn.run("kubectl --namespace kube-system create serviceaccount tiller")
master_conn.run("kubectl create clusterrolebinding tiller --clusterrole cluster-admin --serviceaccount=kube-system:tiller")
master_conn.run("helm init --service-account tiller --history-max 100 --wait")
master_conn.run('kubectl patch deployment tiller-deploy --namespace=kube-system --type=json --patch=\'[{"op": "add", "path": "/spec/template/spec/containers/0/command", "value": ["/tiller", "--listen=localhost:44134"]}]\'')
print("Waiting for Helm and Tiller to Initialize")
time.sleep(30)
def install_metallb(master_conn, metallb_conf):
# Just Reading Config for now, Reading from Clusters
master_conn.run("kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.3/manifests/namespace.yaml")
master_conn.run("kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.3/manifests/metallb.yaml")
master_conn.run('kubectl create secret generic -n metallb-system memberlist --from-literal=secretkey="$(openssl rand -base64 128)"')
master_conn.run('mkdir -p /root/kube_plugins/metallb')
master_conn.put('kube_files/plugins/metallb/config.yaml', remote="/root/kube_plugins/metallb/config.yaml")
master_conn.run('kubectl apply -f /root/kube_plugins/metallb/config.yaml')
print("Waiting for MetalLB to Initialize")
time.sleep(30)
def install_nfs_client_provisioner(master_conn, nfs_client_conf):
nfs_ns = nfs_client_conf["namespace"]
nfs_server = nfs_client_conf["nfs_server"]
nfs_path = nfs_client_conf["export_path"]
master_conn.run("helm install --namespace {} --set nfs.server={} --set nfs.path={} stable/nfs-client-provisioner".format(nfs_ns, nfs_server, nfs_path))
pass
def read_cluster_conf():
with open("cluster/cluster.json", "r") as f:
config = json.load(f)
return config
if __name__ == "__main__":
config = read_cluster_conf()
prepare_kubeadm(config)
init_kubeadm(config)
install_plugins(config) |
#!/usr/bin/env python
print("Generating leds");
fd = open("leds.inc", "w")
for ix in range(0,35):
for iy in range(0, 47):
fd.write('''
(module tom-opto:B1010RGBT-HG (layer F.Cu) (tedit 5FCE6CDB) (tstamp 5FD3D3A9)
(at {} {})
(descr "RGB 1.0mm x 1.0mm")
(path /5FD9C85A/5FF7642E)
(attr smd)
(fp_text reference D{:02}{:02} (at 10 10 90) (layer F.SilkS) hide
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value FC-B1010RGBT-HG (at 10 10 90) (layer F.Fab) hide
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_line (start -0.6 -0.6) (end 0.6 -0.6) (layer F.CrtYd) (width 0.12))
(fp_line (start 0.6 -0.6) (end 0.6 0.6) (layer F.CrtYd) (width 0.12))
(fp_line (start 0.6 0.6) (end -0.6 0.6) (layer F.CrtYd) (width 0.12))
(fp_line (start -0.6 0.6) (end -0.6 -0.6) (layer F.CrtYd) (width 0.12))
(fp_line (start 0.7 -0.7) (end 0.7 0) (layer F.SilkS) (width 0.12))
(fp_line (start 0.7 -0.7) (end 0 -0.7) (layer F.SilkS) (width 0.12))
(pad 1 smd rect (at 0.385 -0.385 90) (size 0.45 0.45) (layers F.Cu F.Paste F.Mask))
(pad 4 smd rect (at -0.385 -0.385 90) (size 0.45 0.45) (layers F.Cu F.Paste F.Mask))
(pad 3 smd rect (at -0.385 0.385 90) (size 0.45 0.45) (layers F.Cu F.Paste F.Mask))
(pad 2 smd rect (at 0.385 0.385 90) (size 0.45 0.45) (layers F.Cu F.Paste F.Mask))
)
'''.format(66+ix*2, 2+iy*2, ix+1, iy+1));
fd.close
|
from thinksocket import ThinkSocket
import base64
import glob
with ThinkSocket() as sock:
sock.bind()
sock.listen(1)
connection, client_address = sock.accept()
print "Got connection from client:", client_address
with ThinkSocket(connection) as conn:
registration = conn.receive_json()
print "Got Registration", registration
b64s = []
for fl in glob.glob("test/imgs/*.jpg"):
print fl
with open(fl) as f:
img = f.read()
img64 = base64.b64encode(img)
b64s.append(img64)
# function_req = {
# "type":"function",
# "function": {
# "uid": "classify_images_v1.0.0",
# "args": [[ { "type":"base64-encoded-blob",
# "blob": img64 } for img64 in b64s ]]
# }
# }
function_req = {
"type":"function",
"function": {
"uid": "classify_image_v1.0.0",
"args": [ { "type":"base64-encoded-blob",
"blob": b64s[0] } ]
}
}
conn.send_json(function_req)
answer = conn.receive_json()
print "Got answer:", answer
|
from _sha256 import sha256
from common.serializers.serialization import domain_state_serializer
from plenum.common.constants import STEWARD, ROLE
LAST_SEQ_NO = "lsn"
VALUE = "val"
LAST_UPDATE_TIME = "lut"
def is_steward(state, nym, is_committed: bool = False):
role = get_role(state, nym, is_committed)
return role == STEWARD
def get_role(state, nym, is_committed: bool = False):
nymData = get_nym_details(state, nym, is_committed)
if not nymData:
return {}
else:
return nymData.get(ROLE)
def get_nym_details(state, nym, is_committed: bool = False):
key = nym_to_state_key(nym)
data = state.get(key, is_committed)
if not data:
return {}
return domain_state_serializer.deserialize(data)
def nym_to_state_key(nym: str) -> bytes:
return sha256(nym.encode()).digest()
def encode_state_value(value, seqNo, txnTime):
return domain_state_serializer.serialize({
LAST_SEQ_NO: seqNo,
LAST_UPDATE_TIME: txnTime,
VALUE: value
})
def decode_state_value(ecnoded_value):
decoded = domain_state_serializer.deserialize(ecnoded_value)
value = decoded.get(VALUE)
last_seq_no = decoded.get(LAST_SEQ_NO)
last_update_time = decoded.get(LAST_UPDATE_TIME)
return value, last_seq_no, last_update_time
|
class DeviceConfiguration():
def __init__(self, session):
self.session = session
def get_device_list_by_template_id(self, template_id):
url = f'{self.session.api_url}/template/device/config/attached/{template_id}'
r = self.session.get(url)
return r.json()
def get_device_templates(self, params: dict = None) -> dict:
url = f'{self.session.api_url}/template/device'
r = self.session.get(url, params=params)
return r.json()
def get_device_template_id_by_name(self, name: str) -> str:
device_templates = self.get_device_templates()
return next(tmpl['templateId'] for tmpl in device_templates['data'] if tmpl['templateName'] == name)
def get_feature_templates(self, params: dict = None) -> dict:
url = f'{self.session.api_url}/template/feature'
r = self.session.get(url, params=params)
return r.json()
def get_feature_templates_by_name(self, name: str) -> dict:
feature_templates = self.get_feature_templates()
return next(d for d in feature_templates['data'] if d['templateName'] == name)
def get_feature_templates_by_type(self, type: str) -> list:
feature_templates = self.get_feature_templates()
return [d for d in feature_templates['data'] if d['templateType'] == type]
def get_device_template_object(self, template_id: str) -> dict:
url = f'{self.session.api_url}/template/device/object/{template_id}'
r = self.session.get(url)
return r.json()
def get_device_feature_templates(self) -> list:
device_templates = self.get_device_templates()
device_template_ids = []
for template in device_templates['data']:
template_obj = self.get_device_template_object(template['templateId'])
device_template_ids.append(template_obj)
return device_template_ids
def get_attached_config(self, deviceid) -> dict:
params = {'deviceId': deviceid}
url = f'{self.session.api_url}/template/device/config/attachedconfig'
r = self.session.get(url, params=params)
return r.json()
def update_device_template(self, template_id: str, json: dict) -> dict:
url = f'{self.session.api_url}/template/device/{template_id}'
r = self.session.put(url, json=json)
return r.json()
def add_device_feature_template(self, json: dict) -> dict:
url = f'{self.session.api_url}/template/device/feature'
r = self.session.post(url, json=json)
return r.json()
def delete_device_feature_template(self, template_id: str) -> int:
url = f'{self.session.api_url}/template/device/{template_id}'
r = self.session.delete(url)
return r.status_code
def add_feature_template(self, json: dict = None) -> dict:
url = f'{self.session.api_url}/template/feature'
r = self.session.post(url, json=json)
return r.json()
def delete_feature_template(self, template_id: str) -> int:
url = f'{self.session.api_url}/template/feature/{template_id}'
r = self.session.delete(url)
return r.status_code
def get_attached_devices_by_template_id(self, template_id: str) -> dict:
url = f'{self.session.api_url}/template/device/config/attached/{template_id}'
r = self.session.get(url)
return r.json()
def attach_feature_device_template(self, json: dict = None) -> int:
url = f'{self.session.api_url}/template/device/config/attachfeature'
r = self.session.post(url, json=json)
return r.status_code
def clone_feature_template(self, template_name, suffix) -> dict:
feature_template = self.get_feature_templates_by_name(template_name)
feature_template['templateName'] += suffix
feature_template['templateDescription'] += suffix
r = self.add_feature_template(feature_template)
return r
def clone_device_feature_template(self, template_name, suffix):
device_templates = self.get_device_feature_templates()
device_template = next(d for d in device_templates if d['templateName'] == template_name)
device_template['templateName'] += suffix
device_template['templateDescription'] += suffix
r = self.add_device_feature_template(device_template)
return r
def add_feature_template_to_device_feature_template_by_name(self, feature_template_name: str, device_template_name: str) -> int:
device_template_id = self.get_device_template_id_by_name(device_template_name)
device_template_obj = self.get_device_template_object(device_template_id)
feature_template = self.get_feature_templates_by_name(feature_template_name)
template_reference = {
"templateId": feature_template['templateId'],
"templateType": feature_template['templateType']
}
device_template_obj['generalTemplates'].append(template_reference)
return self.update_device_template(device_template_id, device_template_obj) |
# coding: utf8
from __future__ import unicode_literals
STOP_WORDS = set(
"""
à às área acerca ademais adeus agora ainda algo algumas alguns ali além ambas ambos antes
ao aos apenas apoia apoio apontar após aquela aquelas aquele aqueles aqui aquilo
as assim através atrás até aí
baixo bastante bem boa bom breve
cada caminho catorze cedo cento certamente certeza cima cinco coisa com como
comprida comprido conhecida conhecido conselho contra contudo corrente cuja
cujo custa cá
da daquela daquele dar das de debaixo demais dentro depois des desde dessa desse
desta deste deve devem deverá dez dezanove dezasseis dezassete dezoito diante
direita disso diz dizem dizer do dois dos doze duas dá dão
é és ela elas ele eles em embora enquanto entre então era essa essas esse esses esta
estado estar estará estas estava este estes esteve estive estivemos estiveram
estiveste estivestes estou está estás estão eu eventual exemplo
falta fará favor faz fazeis fazem fazemos fazer fazes fazia faço fez fim final
foi fomos for fora foram forma foste fostes fui
geral grande grandes grupo
inclusive iniciar inicio ir irá isso isto
já
lado lhe ligado local logo longe lugar lá
maior maioria maiorias mais mal mas me meio menor menos meses mesmo meu meus mil
minha minhas momento muito muitos máximo mês
na nada naquela naquele nas nem nenhuma nessa nesse nesta neste no nos nossa
nossas nosso nossos nova novas nove novo novos num numa nunca nuns não nível nós
número números
obrigada obrigado oitava oitavo oito onde ontem onze ora os ou outra outras outros
para parece parte partir pegar pela pelas pelo pelos perto pode podem poder poderá
podia pois ponto pontos por porquanto porque porquê portanto porém posição
possivelmente posso possível pouca pouco povo primeira primeiro próprio próxima
próximo puderam pôde põe põem
quais qual qualquer quando quanto quarta quarto quatro que quem quer querem quero
questão quieta quieto quinta quinto quinze quê
relação
sabe saber se segunda segundo sei seis sem sempre ser seria sete seu seus sexta
sexto sim sistema sob sobre sois somente somos sou sua suas são sétima sétimo só
tais tal talvez também tanta tanto tarde te tem temos tempo tendes tenho tens
tentar tentaram tente tentei ter terceira terceiro teu teus teve tipo tive
tivemos tiveram tiveste tivestes toda todas todo todos treze três tu tua tuas
tudo tão têm
um uma umas uns usa usar último
vai vais valor veja vem vens ver vez vezes vinda vindo vinte você vocês vos vossa
vossas vosso vossos vários vão vêm vós
zero
""".split()
)
|
def diff_map_info(W, verbose=False):
'''
Construct the information necessary to easily construct diffusion map for any t
Inputs:
W a numpy array of size n x n containing the affinities between points
Outputs:
diff_vec a numpy array of size n x n-1 containing the n-1 nontrivial eigenvectors of Markov matrix as columns
diff_eig a numpy array of size n-1 containing the n-1 nontrivial eigenvalues of Markov matrix
We assume the convention that the coordinates in the diffusion vectors are in descending order
according to eigenvalues.
'''
r = np.sum(W, axis=0) # row sum
D_right = np.diag((r)**-0.5)
D_left = np.diag((r)**-0.5)
M_s = np.matmul(D_right, np.matmul(W,D_left)) # Normalized Markov matrix
eigenValues, eigenVectors = np.linalg.eigh(M_s) # Compute eigendecomposition
if verbose:
print('eigenValues: {}'.format(eigenValues))
idx = eigenValues.argsort()[::-1] # Sort and invert order of eigenvalues
idx = idx[1:]
if verbose:
print('idx: {}'.format(idx))
diff_eig = eigenValues[idx]
if verbose:
print('diff_eig: {}'.format(diff_eig))
diff_vec = eigenVectors[:,idx]
if verbose:
print('diff_vec: {}'.format(diff_vec))
# Compute normalization
psi = np.matmul(D_left,diff_vec)
diff_vec = psi/np.linalg.norm(psi)
# return the info for diffusion maps
return diff_vec, diff_eig
def get_diff_map(diff_vec, diff_eig, t):
'''
Construct a diffusion map at t from eigenvalues and eigenvectors of Markov matrix
Inputs:
diff_vec a numpy array of size n x n-1 containing the n-1 nontrivial eigenvectors of Markov matrix as columns
diff_eig a numpy array of size n-1 containing the n-1 nontrivial eigenvalues of Markov matrix
t diffusion time parameter t
Outputs:
diff_map a numpy array of size n x n-1, the diffusion map defined for t
'''
diff_map = np.matmul(diff_vec,np.diag(diff_eig)**t)
return diff_map |
from flask import Flask, request, jsonify
from flask_restful import Resource, Api
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
import slack
# Create webapp
app = Flask(__name__)
api = Api(app)
# Limitations per host per day / hour
limiter = Limiter(
app,
key_func=get_remote_address,
default_limits=["50 per day", "20 per hour"])
class Computer(Resource):
def get(self, serial):
data = "FOO BAR!"
if not data:
# Nothing to return
return("", 204)
return(data)
def put(self, serial):
data = request.json
if not data:
# Nothing was passed to webservice
return("", 204)
# TODO: Store data somewhere
slack_info = 'Inventory recieved from *{}*'.format(serial)
slack.post_message_to_slack(slack_info)
return(jsonify(serialNumber=serial))
class HelloWorld(Resource):
def get(self):
return("Hello, World!")
api.add_resource(HelloWorld, "/hello")
api.add_resource(Computer, "/computer/<string:serial>")
if __name__ == "__main__":
# Create certificates:
# openssl req -x509 -newkey rsa:4096 -nodes -out cert.pem -keyout key.pem -days 365
context = ('cert.pem', 'key.pem')
app.run(host='0.0.0.0', debug=False, ssl_context=('cert.pem', 'key.pem'))
|
import json
from easydata.data import DataBag
from easydata.managers import ModelManager
from tests.factory import data_dict
from tests.factory.models import ProductJsonModel
def load_data_bag_with_model():
model_manager = ModelManager(ProductJsonModel())
data_bag = DataBag(main=json.dumps(data_dict.item_with_options))
data_bag.init_model_manager(model_manager)
return data_bag
def test_item_parser():
data_bag = DataBag(main="groove")
assert data_bag["main"] == "groove"
data_bag.add("main_new", "peach")
assert data_bag["main_new"] == "peach"
def test_data_bag_get():
data_bag = load_data_bag_with_model()
assert data_bag.get("name") == "EasyBook pro 15"
# Test if results get cached
assert data_bag.cached_results == {"name": "EasyBook pro 15"}
data_bag_copy = data_bag.copy()
# Test that data bag copy is cleared of cached results
assert data_bag_copy.cached_results == {}
def test_data_bag_get_multi():
data_bag = load_data_bag_with_model()
req_params = ["currency", "name", "price", "sale_price", "tags"]
assert data_bag.get_multi(req_params) == {
"currency": "USD",
"name": "EasyBook pro 15",
"price": 99.99,
"sale_price": 49.99,
"tags": ["notebook", "ecommerce"],
}
# Test if multiple results gets cached
assert all(k in data_bag.cached_results for k in req_params)
def test_data_bag_add():
data_bag = load_data_bag_with_model()
data_bag.add("brand_info", "Groove")
assert data_bag["brand_info"] == "Groove"
|
import random
from .base import PublicTestCase
class SimulatorTestCase(PublicTestCase):
def test_get_simulator(self):
response = self.client.simulator(amount=random.randint(1000, 10000))
self.assertEqual(response.status_code, 200)
|
# Criar Variáveis para nome (str), idade (int), altura(float) e peso(float) de uma pessoa.
# Criar a variável com ano atual (int)
# Obter o ano de nascimento da pessoa (baseado na idade e no ano atual)
# Obter o IMC da pessoa com 2 casas decimais (peso e na altura da pessoa)
# Exibir um texto com todos os valores na tela usando F-Strings (com chaves)
nome = 'Luiz'
idade = 32
altura = 1.70
peso = 82.5
ano_atual = 2019
nascimento = ano_atual - idade
imc = peso / altura ** 2
print(f'{nome} tem {idade} anos de idade e sua altura é {altura:.2F}.')
print(f'{nome} pesa {peso} e seu imc é de {imc:.2f}.')
print(f'{nome} nasceu em {nascimento}.')
|
from enum import Enum
# 型の定義
class ColumnType(Enum):
INT = 1
DOUBLE = 2
STRING = 3
DATE = 4
DATETIME = 5
BOOL = 6
from .read_definition import read_definition
from .write_for_mysql import write_for_mysql
|
from django.utils.translation import ugettext_lazy as _
class GrapheneDjangoJWTBaseException(Exception):
default_message = _('You do not have permission to perform this action')
code = 401
class JSONWebTokenError(GrapheneDjangoJWTBaseException):
pass
class PermissionDenied(GrapheneDjangoJWTBaseException):
default_message = _('You do not have permission to perform this action')
code = 401
class JSONWebTokenExpired(GrapheneDjangoJWTBaseException):
default_message = _('Signature has expired')
code = 401
class JSONRefreshTokenExpired(GrapheneDjangoJWTBaseException):
default_message = _('Refresh token has expired')
code = 401
|
# https://www.codingame.com/training/easy/morellets-random-lines
def gcd(a, b):
while b:
a, b = b, a % b
return a
def is_point_on_line(x, y, line):
return line[0] * x + line[1] * y + line[2] == 0
def reduce_equation(a, b, c):
g = gcd(gcd(a, b), c)
return a // g, b // g, c // g
def get_line(x1, y1, x2, y2):
m = (y1 - y2) / (x1 - x2)
b = (x1 * y2 - x2 * y1) / (x1 - x2)
return m, b
def get_intersection_point_between_lines(sm, sb, la, lb, lc):
x = (-lb * sb - lc) / (la + lb * sm)
y = sm * x + sb
return x, y
def does_segment_intersect_line(sm, sb, la, lb, lc, xa, ya, xb, yb):
px, py = get_intersection_point_between_lines(sm, sb, la, lb, lc)
return min(xa, xb) <= px <= max(xa, xb) and min(ya, yb) <= py <= max(ya, yb)
def solution():
xa, ya, xb, yb = map(int, input().split())
num_lines = int(input())
equations = {reduce_equation(*map(int, input().split()))
for _ in range(num_lines)}
for x, y in ((xa, ya), (xb, yb)):
if any(is_point_on_line(x, y, line) for line in equations):
print('ON A LINE')
return
sm, sb = get_line(xa, ya, xb, yb)
num_crossed_lines = sum(
does_segment_intersect_line(sm, sb, a, b, c, xa, ya, xb, yb)
for a, b, c in equations
)
print('YES' if num_crossed_lines % 2 == 0 else 'NO')
solution()
|
import logging
from typing import NamedTuple, Union, Dict, Optional, Type
from pydantic.main import BaseModel
from hikaru.model.rel_1_16 import *
from .models import PrometheusKubernetesAlert, PrometheusAlert
from ..helper import exact_match, prefix_match
from ..kubernetes.custom_models import RobustaPod, RobustaDeployment, RobustaJob
from ...core.playbooks.base_trigger import BaseTrigger, TriggerEvent
from ...core.reporting.base import Finding
from ...core.model.events import ExecutionBaseEvent
class PrometheusTriggerEvent(TriggerEvent):
alert: PrometheusAlert
def get_event_name(self) -> str:
return PrometheusTriggerEvent.__name__
class ResourceMapping(NamedTuple):
hikaru_class: Union[
Type[RobustaPod], Type[RobustaDeployment], Type[Job], Type[DaemonSet]
]
attribute_name: str
prometheus_label: str
MAPPINGS = [
ResourceMapping(RobustaPod, "pod", "pod"),
ResourceMapping(RobustaDeployment, "deployment", "deployment"),
ResourceMapping(RobustaJob, "job", "job_name"),
ResourceMapping(DaemonSet, "daemonset", "daemonset"),
]
class PrometheusAlertTrigger(BaseTrigger):
alert_name: str = None
status: str = None
pod_name_prefix: str = None
namespace_prefix: str = None
instance_name_prefix: str = None
def get_trigger_event(self):
return PrometheusTriggerEvent.__name__
def should_fire(self, event: TriggerEvent):
if not isinstance(event, PrometheusTriggerEvent):
return False
labels = event.alert.labels
if not exact_match(self.alert_name, labels["alertname"]):
return False
if not exact_match(self.status, event.alert.status):
return False
if not prefix_match(self.pod_name_prefix, labels.get("pod")):
return False
if not prefix_match(self.namespace_prefix, labels.get("namespace")):
return False
if not prefix_match(self.instance_name_prefix, labels.get("instance")):
return False
return True
@classmethod
def __find_node_by_ip(cls, ip) -> Node:
nodes: NodeList = NodeList.listNode().obj
for node in nodes.items:
addresses = [a.address for a in node.status.addresses]
logging.info(f"node {node.metadata.name} has addresses {addresses}")
if ip in addresses:
return node
raise Exception(f"No node exists with IP '{ip}'")
@classmethod
def __load_node(cls, alert: PrometheusAlert, node_name: str) -> Node:
node = None
try:
# sometimes we get an IP:PORT instead of the node name. handle that case
if ":" in node_name:
node = cls.__find_node_by_ip(node_name.split(":")[0])
else:
node = Node().read(name=node_name)
except Exception as e:
logging.info(f"Error loading Node kubernetes object {alert}. error: {e}")
return node
def build_execution_event(
self, event: PrometheusTriggerEvent, findings: Dict[str, Finding]
) -> Optional[ExecutionBaseEvent]:
labels = event.alert.labels
execution_event = PrometheusKubernetesAlert(
findings=findings,
alert=event.alert,
alert_name=labels["alertname"],
alert_severity=labels.get("severity"),
)
namespace = labels.get("namespace", "default")
for mapping in MAPPINGS:
try:
resource_name = labels.get(mapping.prometheus_label, None)
if not resource_name:
continue
resource = mapping.hikaru_class().read(
name=resource_name, namespace=namespace
)
setattr(execution_event, mapping.attribute_name, resource)
logging.info(
f"Successfully loaded Kubernetes resource {resource_name} for alert {execution_event.alert_name}"
)
except Exception as e:
logging.info(
f"Error loading {mapping.hikaru_class} kubernetes object {execution_event.alert}. error: {e}"
)
node_name = labels.get("node")
if node_name:
execution_event.node = self.__load_node(execution_event.alert, node_name)
# we handle nodes differently than other resources
node_name = labels.get("instance", None)
job_name = labels.get(
"job", None
) # a prometheus "job" not a kubernetes "job" resource
# when the job_name is kube-state-metrics "instance" refers to the IP of kube-state-metrics not the node
# If the alert has pod, the 'instance' attribute contains the pod ip
if not execution_event.pod and not execution_event.node and node_name and job_name != "kube-state-metrics":
execution_event.node = self.__load_node(execution_event.alert, node_name)
return execution_event
@staticmethod
def get_execution_event_type() -> type:
return PrometheusKubernetesAlert
class PrometheusAlertTriggers(BaseModel):
on_prometheus_alert: Optional[PrometheusAlertTrigger]
|
#__all__ = ["scraper", "logging_aux", "res_obj_manipulator","jdownloader"]
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from ducktape.mark import parametrize
from ducktape.mark.resource import cluster
from ducktape.tests.test import Test
from ducktape.utils.util import wait_until
from kafkatest.services.trogdor.degraded_network_fault_spec import DegradedNetworkFaultSpec
from kafkatest.services.trogdor.trogdor import TrogdorService
from kafkatest.services.zookeeper import ZookeeperService
class NetworkDegradeTest(Test):
"""
These tests ensure that the network degrade Trogdor specs (which use "tc") are working as expected in whatever
environment the system tests may be running in. The linux tools "ping" and "iperf" are used for validation
and need to be available along with "tc" in the test environment.
"""
def __init__(self, test_context):
super(NetworkDegradeTest, self).__init__(test_context)
self.zk = ZookeeperService(test_context, num_nodes=3)
self.trogdor = TrogdorService(context=self.test_context, client_services=[self.zk])
def setUp(self):
self.zk.start()
self.trogdor.start()
def teardown(self):
self.trogdor.stop()
self.zk.stop()
@cluster(num_nodes=5)
@parametrize(task_name="latency-100", device_name="eth0", latency_ms=50, rate_limit_kbit=0)
@parametrize(task_name="latency-100-rate-1000", device_name="eth0", latency_ms=50, rate_limit_kbit=1000)
def test_latency(self, task_name, device_name, latency_ms, rate_limit_kbit):
spec = DegradedNetworkFaultSpec(0, 10000)
for node in self.zk.nodes:
spec.add_node_spec(node.name, device_name, latency_ms, rate_limit_kbit)
latency = self.trogdor.create_task(task_name, spec)
zk0 = self.zk.nodes[0]
zk1 = self.zk.nodes[1]
# Capture the ping times from the ping stdout
# 64 bytes from ducker01 (172.24.0.2): icmp_seq=1 ttl=64 time=0.325 ms
r = re.compile(r".*time=(?P<time>[\d.]+)\sms.*")
times = []
for line in zk0.account.ssh_capture("ping -i 1 -c 20 %s" % zk1.account.hostname):
self.logger.debug("Ping output: %s" % line)
m = r.match(line)
if m is not None and m.group("time"):
times.append(float(m.group("time")))
self.logger.info("Parsed ping time of %d" % float(m.group("time")))
self.logger.debug("Captured ping times: %s" % times)
# We expect to see some low ping times (before and after the task runs) as well as high ping times
# (during the task). For the high time, it's twice the configured latency since both links apply the
# rule, 80% for a little variance buffer
high_time_ms = 0.8 * 2 * latency_ms
low_time_ms = 10
slow_times = [t for t in times if t > high_time_ms]
fast_times = [t for t in times if t < low_time_ms]
latency.stop()
latency.wait_for_done()
# We captured 20 ping times. Assert that at least 5 were "fast" and 5 were "slow"
assert len(slow_times) > 5, "Expected to see more slow ping times (lower than %d)" % low_time_ms
assert len(fast_times) > 5, "Expected to see more fast ping times (higher than %d)" % high_time_ms
@cluster(num_nodes=5)
@parametrize(task_name="rate-1000", device_name="eth0", latency_ms=0, rate_limit_kbit=1000000)
@parametrize(task_name="rate-1000-latency-50", device_name="eth0", latency_ms=50, rate_limit_kbit=1000000)
def test_rate(self, task_name, device_name, latency_ms, rate_limit_kbit):
zk0 = self.zk.nodes[0]
zk1 = self.zk.nodes[1]
spec = DegradedNetworkFaultSpec(0, 60000)
spec.add_node_spec(zk0.name, device_name, latency_ms, rate_limit_kbit)
# start the task and wait
rate_limit = self.trogdor.create_task(task_name, spec)
wait_until(lambda: rate_limit.running(),
timeout_sec=10,
err_msg="%s failed to start within 10 seconds." % rate_limit)
# Run iperf server on zk1, iperf client on zk0
iperf_server = zk1.account.ssh_capture("iperf -s")
# Capture the measured kbps between the two nodes.
# [ 3] 0.0- 1.0 sec 2952576 KBytes 24187503 Kbits/sec
r = re.compile(r"^.*\s(?P<rate>[\d.]+)\sKbits/sec$")
measured_rates = []
for line in zk0.account.ssh_capture("iperf -i 1 -t 20 -f k -c %s" % zk1.account.hostname):
self.logger.info("iperf output %s" % line)
m = r.match(line)
if m is not None:
measured_rate = float(m.group("rate"))
measured_rates.append(measured_rate)
self.logger.info("Parsed rate of %d kbit/s from iperf" % measured_rate)
# kill iperf server and consume the stdout to ensure clean exit
zk1.account.kill_process("iperf")
for _ in iperf_server:
continue
rate_limit.stop()
rate_limit.wait_for_done()
self.logger.info("Measured rates: %s" % measured_rates)
# We expect to see measured rates within an order of magnitude of our target rate
low_kbps = rate_limit_kbit // 10
high_kbps = rate_limit_kbit * 10
acceptable_rates = [r for r in measured_rates if low_kbps < r < high_kbps]
msg = "Expected most of the measured rates to be within an order of magnitude of target %d." % rate_limit_kbit
msg += " This means `tc` did not limit the bandwidth as expected."
assert len(acceptable_rates) > 5, msg
|
# Variablendefinition
a = 15
b = 5
# Funktionsdefinitionen
def trenner(anzahl):
for zaehler in range(anzahl):
print("-", end="")
print()
def restalkohol(promille, stundenschlaf):
abgebaut = (promille / 10) * stundenschlaf
restalkohol = round(promille - abgebaut, 2)
return restalkohol
def zahlenAbfrage(art_des_wertes):
richtige_eingabe = False
while not richtige_eingabe:
try:
print("Bitte geben Sie einen Wert für", art_des_wertes, "ein.")
zahl = float(input("Eingabe: "))
richtige_eingabe = True
return zahl
except:
print("Bitte geben Sie nur Zahlen ein.")
# Hauptprogramm
restalkohol_calc = restalkohol(zahlenAbfrage("Promille"), zahlenAbfrage("Schlaf"))
if restalkohol_calc > 0:
print("Ihr Restalkohol beträgt:", restalkohol_calc)
print("Bitte kein Auto fahren.")
else:
print("Ihr Restalkohol beträgt:", restalkohol_calc)
print("Gute Fahrt.")
|
import logging
import numpy as np
from scipy.stats import beta
from scipy.ndimage import gaussian_filter
logger = logging.getLogger(__name__)
def make(n=101, r=25, ishift=0, jshift=0,
sigma_noise=0.1, sigma_smooth=2,
cut_b=0, cut_theta=0, cut_thickness=5, rs=None):
"""
Make a bright circle with a thick line removed.
Parameters
----------
n: int, default=101
The image and segmentation arrays will be shape (n,n).
r: float, default=25
Radius of the circle.
ishift,jshift: int, default=0
The center of the circle (index offsets from the center of the image).
sigma_noise: float, default=0.1
The additive noise amplitude to be added to the image.
sigma_smooth: float, default=2
The Gaussian smoothing factor to apply to the image.
cut_b: float, default=0
The offset parameter of the line through the circle.
cut_theta: float, default=0
The orientation of the line through the circle.
cut_thickness: float, default=5
The region a distance `cut_thickness` from the line through
the circle will be set to the background image value.
rs: numpy.Random.RandomState
RandomState object for reproducible results.
Returns
-------
img, seg, meta : ndarray (dtype=float), ndarray (dtype=bool), dict
The image and segmentation image are returned as well as
a dictionary of the parameters used.
"""
ci = n/2 + ishift; cj = n/2 + jshift
if not (ci+r < n-1 and ci-r > 0):
raise ValueError("Circle outside bounds, axis 0.")
if not (cj+r < n-1 and cj-r > 0):
raise ValueError("Circle outside bounds, axis 1.")
if cut_b > r:
raise ValueError("`cut_b` should be less than radius.")
if cut_thickness > r:
raise ValueError("`cut_thickness` should be less than radius.")
rs = rs if rs is not None else np.random.RandomState()
ii,jj = np.indices((n,n), dtype=np.float)
seg = np.sqrt((ii-ci)**2 + (jj-cj)**2) <= r
xx, yy = jj-cj, n-ii-1-(n-ci-1)
# Take a cut out of the circle.
dist = np.abs(np.cos(cut_theta)*xx + np.sin(cut_theta)*yy - cut_b)
inline = dist <= cut_thickness
img = np.logical_and(seg, ~inline).astype(np.float)
img = gaussian_filter(img, sigma_smooth)
img *= (~inline).astype(np.float)
img = (img - img.min()) / (img.max() - img.min())
img += sigma_noise*rs.randn(n,n)
info = dict(
ci=ci, cj=cj, r=r,
sigma_noise=sigma_noise,
sigma_smooth=sigma_smooth,
cut_b=cut_b,
cut_thickness=cut_thickness,
cut_theta=cut_theta
)
return img,seg,info
def make_dataset(N, n=51, rad=[15,21], shift=[0,0],
nsig=[0.3,0.5], ssig=[0,0], cthick=[4,7],
ctheta=[0,2*np.pi], cb=[0,10], return_meta=False,
verbose=True, random_state=None, print_mistakes=False):
"""
Make a randomly generated dataset of hamburger data.
Parameters
----------
N: int
The number of examples.
n: int
The image size.
rad: list, len=2
Interval of radii from which to sample.
shift: list, len=2
The interval of shift values from which to sample.
nsig: list, len=2
The interval of values from which to sample `sigma_noise`.
ssig: list, len=2
The interval of values from which to sample `sigma_smooth`.
cthick: list, len=2
The interval of values from which to sample `cut_thickness`.
ctheta: list, len=2
The interval of values form which to sample `cut_theta`.
cb: list, len=2
The interval of values form which to sample `cut_b`.
return_meta: bool, default=False
Return a list of meta data attributes for each example if True.
random_state: numpy.random.RandomState, default=None
Include a for reproducible results.
print_mistakes: bool, default=False
"""
random_state = random_state if random_state is not None else np.random.RandomState()
def betarvs(**kwargs):
return beta.rvs(3, 3, random_state=random_state, **kwargs)
imgs = np.zeros((N,n,n))
segs = np.zeros((N,n,n), dtype=np.bool)
if verbose:
q = len(str(N))
pstr = "Creating dataset ... %%0%dd / %d" % (q,N)
if return_meta:
meta = []
i = 0
while i < N :
try:
r = betarvs(loc=rad[0],scale=rad[1]-rad[0])
ishift,jshift = beta.rvs(3, 3, loc=shift[0],
scale=shift[1]-shift[0], size=2)
sigma_noise = betarvs(loc=nsig[0], scale=nsig[1]-nsig[0])
sigma_smooth = betarvs(loc=ssig[0], scale=ssig[1]-ssig[0])
cut_b = betarvs(loc=cb[0], scale=cb[1]-cb[0])
cut_thickness = betarvs(loc=cthick[0], scale=cthick[1]-cthick[0])
cut_theta = betarvs(loc=ctheta[0], scale=ctheta[1]-ctheta[0])
img,seg,info = make(n, r, ishift, jshift,
sigma_noise, sigma_smooth,
cut_b=cut_b, cut_theta=cut_theta,
cut_thickness=cut_thickness, rs=random_state)
imgs[i] = img
segs[i] = seg
if return_meta:
meta.append(info)
i+=1
if verbose: logger.info(pstr % i)
except ValueError as e:
if print_mistakes:
print("Bad params:", e, "... Continuing to next iteration")
continue
if return_meta:
return imgs, segs, meta
else:
return imgs, segs
|
from setuptools import setup, find_packages
import os
import sys
assert sys.version_info >= (3, 4), (
"Please use Python version 3.4 or higher, "
"lower versions are not supported"
)
here = os.path.abspath(os.path.dirname(__file__))
# Get the long description from the README file
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
ext_modules = [
]
# As of Python 3.6, CCompiler has a `has_flag` method.
setup(
name='pyrivet',
use_scm_version=True,
setup_requires=[
'numpy',
'setuptools_scm',
],
install_requires=[
'numpy',
],
author='The RIVET developers',
author_email='xoltar@xoltar.org',
url='https://github.com/rivettda/rivet-python',
description='Python interface to RIVET (Rank Invariant Visualization and Exploration Tool)',
license='BSD-3',
keywords='topology, algebraic topology, topological data analysis',
long_description=long_description,
ext_modules=ext_modules,
# cmdclass={'build_ext': BuildExt},
packages=find_packages(),
python_requires='>=3.4',
zip_safe=False,
)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import curses
from math import floor
from datetime import datetime as date
import sys
import time
import signal
from exchanges.bitfinex import Bitfinex
last_price = 0
screen = curses.initscr()
width = 0
height = 0
origin_x = 0
origin_y = 0
glyph = {
'0': [" ##### ", " ## ## ", "## ## ", "## ## ", "## ## ",
" ## ## ", " ##### "],
'1': [" ## ", " #### ", " ## ", " ## ", " ## ",
" ## ", " ###### "],
'2': [" ####### ", "## ## ", " ## ", " ####### ", "## ",
"## ", "######### "],
'3': [" ####### ", "## ## ", " ## ", " ####### ", " ## ",
"## ## ", " ####### "],
'4': ["## ", "## ## ", "## ## ", "## ## ", "######### ",
" ## ", " ## "],
'5': [" ######## ", " ## ", " ## ", " ####### ", " ## ",
" ## ## ", " ###### "],
'6': [" ####### ", "## ## ", "## ", "######## ", "## ## ",
"## ## ", " ####### "],
'7': [" ######## ", " ## ## ", " ## ", " ## ", " ## ",
" ## ", " ## "],
'8': [" ####### ", "## ## ", "## ## ", " ####### ", "## ## ",
"## ## ", " ####### "],
'9': [" ####### ", "## ## ", "## ## ", " ######## ", " ## ",
"## ## ", " ####### "],
':': [" ", " ", " # ", " ", " # ", " ", " "]
}
def addstr(y, x, string, color):
try:
screen.addstr(origin_y + y, origin_x + x, string, color)
screen.refresh()
except:
return
def print_price(now):
global last_price
"""main price function"""
try:
price = Bitfinex().get_current_price()
except Exception as e:
return
price = int(round(price))
tick_color = 4 if price >= last_price else 2
usd_color = 5 if price >= last_price else 3
last_price = price
time_line = str(price)
time_array = ["" for i in range(0, 7)]
for char in time_line:
char_array = glyph[char]
for row in range(0, len(char_array)):
time_array[row] += char_array[row]
total_x = 65
offset_x = int(round((total_x - len(time_array[0])) / 2))
for y in range(0, len(time_array)):
for x in range(0, len(time_array[y])):
char = time_array[y][x]
color = 1 if char == " " else tick_color
addstr(y, x + offset_x, " ", curses.color_pair(color))
addstr(6, offset_x + len(time_array[0]), 'USD', curses.color_pair(usd_color))
def print_date(now):
"""main date function"""
day_line = now.strftime("%A").center(11, " ")
date_line = now.strftime("%B %d, %Y")
addstr(8, 0, day_line, curses.color_pair(0))
addstr(8, len(day_line) + 40, date_line, curses.color_pair(0))
def gracefull_exit(signal=None):
"""exit with grace, prevents a messed up terminal"""
curses.endwin()
sys.exit()
def win_resize():
"""window resize function"""
global width, height, origin_x, origin_y, last_t
screen.clear()
height, width = screen.getmaxyx()
origin_x = floor(width / 2) - 34
origin_y = floor(height / 2) - 4
last_t = None
screen.keypad(1)
curses.curs_set(0)
curses.start_color()
curses.use_default_colors()
# init curses color pairs
curses.init_pair(1, 0, -1)
# black on red
curses.init_pair(2, 0, 1)
# red on black
curses.init_pair(3, 1, 0)
# black on green
curses.init_pair(4, 0, 2)
# green on black
curses.init_pair(5, 2, 0)
curses.noecho()
curses.cbreak()
screen.timeout(0)
# Register signal handlers for graceful exit on for instance CTRL-C
signal.signal(signal.SIGINT, gracefull_exit)
signal.signal(signal.SIGTERM, gracefull_exit)
def main():
"""lets run this thing"""
a = 0
win_resize()
now = date.now()
print_price(now)
print_date(now)
while True:
char = screen.getch()
if char == curses.KEY_RESIZE:
win_resize()
elif char in (ord('q'), ord('Q')):
break
now = date.now()
if last_t and now.timetuple()[:6] != last_t.timetuple()[:6]:
print_price(now)
print_date(now)
time.sleep(15)
last_t = now
gracefull_exit()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.