content stringlengths 5 1.05M |
|---|
import os
#############################################
# BenchMark Bike
#############################################
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d bike_nyc.data.yml '
'-p graph:Distance-Correlation-Interaction')
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d bike_chicago.data.yml '
'-p graph:Distance-Correlation-Interaction')
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d bike_dc.data.yml '
'-p graph:Distance-Correlation-Interaction')
# ###############################################
# # BenchMark DiDi
# ###############################################
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d didi_xian.data.yml '
'-p graph:Distance-Correlation-Interaction')
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d didi_chengdu.data.yml '
'-p mark:V0')
###############################################
# BenchMark Metro
###############################################
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d metro_chongqing.data.yml '
'-p graph:Distance')
#
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml -d metro_shanghai.data.yml '
'-p graph:Distance-Correlation-Line')
###############################################
# BenchMark ChargeStation
###############################################
os.system('python STMeta_Obj.py -m STMeta_v0.model.yml'
' -d chargestation_beijing.data.yml -p graph:Distance,mark:V0')
|
from base64 import b64decode
from Crypto.Cipher import AES
from S2C09 import pkcs7_pad, pkcs7_unpad
from S1C07 import aes_ecb_decrypt
def aes_ecb_encrypt(data, key):
"""Encrypts the given data with AES-ECB, using the given key.
The data is always PKCS 7 padded before being encrypted.
"""
cipher = AES.new(key, AES.MODE_ECB)
return cipher.encrypt(pkcs7_pad(data, AES.block_size))
def xor_data(binary_data_1, binary_data_2):
"""Returns the xor of the two binary arrays given."""
return bytes([b1 ^ b2 for b1, b2 in zip(binary_data_1, binary_data_2)])
def aes_cbc_encrypt(data, key, iv):
"""Encrypts the given data with AES-CBC, using the given key and iv."""
ciphertext = b''
prev = iv
# Process the encryption block by block
for i in range(0, len(data), AES.block_size):
# Always PKCS 7 pad the current plaintext block before proceeding
curr_plaintext_block = pkcs7_pad(data[i:i + AES.block_size], AES.block_size)
block_cipher_input = xor_data(curr_plaintext_block, prev)
encrypted_block = aes_ecb_encrypt(block_cipher_input, key)
ciphertext += encrypted_block
prev = encrypted_block
return ciphertext
def aes_cbc_decrypt(data, key, iv, unpad=True):
"""Decrypts the given AES-CBC encrypted data with the given key and iv.
Returns the unpadded decrypted message when unpad is true, or keeps the plaintext
padded when unpad is false.
"""
plaintext = b''
prev = iv
# Process the decryption block by block
for i in range(0, len(data), AES.block_size):
curr_ciphertext_block = data[i:i + AES.block_size]
decrypted_block = aes_ecb_decrypt(curr_ciphertext_block, key)
plaintext += xor_data(prev, decrypted_block)
prev = curr_ciphertext_block
# Return the plaintext either unpadded or left with the padding depending on the unpad flag
return pkcs7_unpad(plaintext) if unpad else plaintext
def main():
iv = b'\x00' * AES.block_size
key = b'YELLOW SUBMARINE'
with open("S2C10_input.txt") as input_file:
binary_data = b64decode(input_file.read())
# Compute and print the decrypted plaintext with the given input
print(aes_cbc_decrypt(binary_data, key, iv).decode().rstrip())
# Check that the encryption/decryption methods work fine with a custom input
custom_input = b'Trying to decrypt something else to see if it works.'
assert aes_cbc_decrypt(aes_cbc_encrypt(custom_input, key, iv), key, iv) == custom_input
if __name__ == '__main__':
main()
|
"""Configuration classes"""
import collections
import json
import typing
from dataclasses import dataclass, field
from pathlib import Path
from dataclasses_json import DataClassJsonMixin
@dataclass
class AudioConfig(DataClassJsonMixin):
filter_length: int = 1024
hop_length: int = 256
win_length: int = 1024
n_mel_channels: int = 80
sampling_rate: int = 22050
sample_bytes: int = 2
channels: int = 1
mel_fmin: float = 0.0
mel_fmax: float = 8000.0
normalized: bool = True
@dataclass
class ModelConfig(DataClassJsonMixin):
# Symbols
n_symbols: int = 0
symbols_embedding_dim: int = 512
mask_padding: bool = False
# Encoding
encoder_kernel_size: int = 5
encoder_n_convolutions: int = 3
encoder_embedding_dim: int = 512
# Decoder
n_frames_per_step: int = 1
decoder_rnn_dim: int = 1024
prenet_dim: int = 256
max_decoder_steps: int = 2000
gate_threshold: float = 0.5
p_attention_dropout: float = 0.1
p_decoder_dropout: float = 0.1
decoder_no_early_stopping: bool = False
# Attention
attention_rnn_dim: int = 1024
attention_dim: int = 128
attention_location_n_filters: int = 32
attention_location_kernel_size: int = 31
# Guided attention
guided_attention_alpha: float = 5.0
guided_attention_sigma: float = 0.4
# Postnet
postnet_embedding_dim: int = 512
postnet_kernel_size: int = 5
postnet_n_convolutions: int = 5
@dataclass
class TrainingConfig(DataClassJsonMixin):
seed: int = 1234
epochs: int = 10000
learning_rate: float = 1e-3
weight_decay: float = 1e-6
grad_clip_threshold: float = 1.0
grad_clip: float = 5.0
anneal_steps: typing.Optional[typing.Tuple[int, ...]] = None
anneal_factor: float = 0.1 # choices: 0.1, 0.3
dynamic_loss_scaling: bool = True
disable_uniform_initialize_bn_weight: bool = False
batch_size: int = 32
fp16_run: bool = False
audio: AudioConfig = field(default_factory=AudioConfig)
model: ModelConfig = field(default_factory=ModelConfig)
version: int = 1
git_commit: str = ""
def save(self, config_file: typing.TextIO):
"""Save config as JSON to a file"""
json.dump(self.to_dict(), config_file, indent=4)
@staticmethod
def load(config_file: typing.TextIO) -> "TrainingConfig":
"""Load config from a JSON file"""
return TrainingConfig.from_json(config_file.read())
@staticmethod
def load_and_merge(
config: "TrainingConfig",
config_files: typing.Iterable[typing.Union[str, Path, typing.TextIO]],
) -> "TrainingConfig":
"""Loads one or more JSON configuration files and overlays them on top of an existing config"""
base_dict = config.to_dict()
for maybe_config_file in config_files:
if isinstance(maybe_config_file, (str, Path)):
# File path
config_file = open(maybe_config_file, "r")
else:
# File object
config_file = maybe_config_file
with config_file:
# Load new config and overlay on existing config
new_dict = json.load(config_file)
TrainingConfig.recursive_update(base_dict, new_dict)
return TrainingConfig.from_dict(base_dict)
@staticmethod
def recursive_update(
base_dict: typing.Dict[typing.Any, typing.Any],
new_dict: typing.Mapping[typing.Any, typing.Any],
) -> None:
"""Recursively overwrites values in base dictionary with values from new dictionary"""
for k, v in new_dict.items():
if isinstance(v, collections.Mapping) and (base_dict.get(k) is not None):
TrainingConfig.recursive_update(base_dict[k], v)
else:
base_dict[k] = v
|
# Generated by Django 3.0.8 on 2020-07-17 03:25
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app_portfolio_skills', '0002_auto_20200717_0322'),
]
operations = [
migrations.AlterModelOptions(
name='skill',
options={'ordering': ['id']},
),
]
|
DesignA = False
DesignB = False
DesignC = True
N_DOMAINS=1
execfile('Analysis.py')
|
# https://leetcode.com/contest/weekly-contest-67/problems/partition-labels/
# 基本思路是首先找到每个字符的最后出现的位置
# 然后从字符串头开始遍历这个字符串,要保证当前的字符的位置要小于此字符出现的最后位置,在遍历的过程中,要动态更新最后的位置。
# 时间复杂度就是O(n)
class Solution:
def partitionLabels(self, S):
"""
:type S: str
:rtype: List[int]
"""
output=[]
map=[0]*26
for i,ch in enumerate(S):
map[ord(ch)-ord('a')]=i
i=0
while i<len(S):
pos=map[ord(S[i])-ord('a')]
start=i
end=pos
while i<end:
i+=1
pos=map[ord(S[i]) - ord('a')]
end=max(pos,end)
internal=end-start+1
output.append(internal)
i=i+1
return output
solution=Solution()
print(solution.partitionLabels("ntswuqqbidunnixxpoxxuuupotaatwdainsotwvpxpsdvdbwvbtdiptwtxnnbtqbdvnbowqitudutpsxsbbsvtipibqpvpnivottsxvoqqaqdxiviidivndvdtbvadnxboiqivpusuxaaqnqaobutdbpiosuitdnopoboivopaapadvqwwnnwvxndpxbapixaspwxxxvppoptqxitsvaaawxwaxtbxuixsoxoqdtopqqivaitnpvutzchkygjjgjkcfzjzrkmyerhgkglcyffezmehjcllmlrjghhfkfylkgyhyjfmljkzglkklykrjgrmzjyeyzrrkymccefggczrjflykclfhrjjckjlmglrmgfzlkkhffkjrkyfhegyykrzgjzcgjhkzzmzyejycfrkkekmhzjgggrmchkeclljlyhjkchmhjlehhejjyccyegzrcrerfzczfelzrlfylzleefgefgmzzlggmejjjygehmrczmkrc"))
print(solution.partitionLabels("ababcbacadefegdehijhklij"))
|
import urllib
def digitalRead(port, url='http://127.0.0.1/arduino'):
return urllib.urlopen(url + "/digital/" + str(port)).read()
def digitalWrite(port, state, url='http://127.0.0.1/arduino'):
return urllib.urlopen(url + "/digital/" + str(port) + "/" + str(state)).read()
def analogRead(port, url='http://127.0.0.1/arduino'):
return urllib.urlopen(url + "/analog/" + str(port)).read()
def analogWrite(port, value, url='http://127.0.0.1/arduino'):
return urllib.urlopen(url + "/analog/" + str(port) + "/" + str(value)).read()
def pinMode(port, mode, url='http://127.0.0.1/arduino'):
return urllib.urlopen(url + "/mode/" + str(port) + "/" + str(mode)).read()
|
#map
seq = range(8)
newseq = map(lambda x : x*x, seq);
print newseq
#reduce
rseq = range(16)
rnewseq = reduce(lambda x,y: x + y, rseq)
print rnewseq
rnewseq2 = reduce(lambda x,y: x + y, [], 8)
print rnewseq2
#filter
fseq = range(16)
fnewseq = filter(lambda x: x % 2 == 0, fseq)
print fnewseq
#mapoverstring
def f(x):
return ord(x)
print map(f, "abcdef")
#filter over string returns string
string = filter(lambda c: c != 'a', "abc")
print type(string)
print string
#filter over tuple returns tuple
tup = filter(lambda t: t % 2 == 0, (1,2,3,4,5,6,7,8,9,10))
print type(tup)
print tup
#filter with default identity func
print filter(None, [0,1,"","hello",False,True])
#map with two iterables
b = range(8)
c = range(10)
def mapy(x, y):
if (x == None): x = 0
if (y == None): y = 0
return x + y
print map(mapy, b, c)
#map with default identity func
print map(None, [0, 1, {}, "", "hello", False, True]); |
from panther import lookup_aws_account_name
def rule(event):
return (event['eventName'] == 'ConsoleLogin' and
event['userIdentity'].get('type') == 'Root' and
event.get('responseElements', {}).get('ConsoleLogin') == 'Failure')
def title(event):
return 'AWS root login failed from [{ip}] in account [{account}]'.format(
ip=event['sourceIPAddress'],
account=lookup_aws_account_name(event.get('recipientAccountId')))
|
class Solution:
def isStrobogrammatic(self, num: str) -> bool:
strobogrammatic = {
'1': '1',
'0': '0',
'6': '9',
'9': '6',
'8': '8'
}
for idx, digit in enumerate(num):
if digit not in strobogrammatic or strobogrammatic[digit] != num[len(num) - idx -1]:
return False
return True
|
# test/unit/sort_search/utils_test.py
#
# Author: Daniel Clark, 2016
'''
Unit test module to perform testing on utils module
'''
# Import packages
import unittest
class AMergeBTestCase(unittest.TestCase):
'''
TestCase for the sorting module
'''
# Set up test case
def setUp(self):
'''
Initialize test case with attributes
'''
# Init instance attributes
pass
def test_a_merge_b(self):
'''
Test the a_merge_b function is working properly
'''
# Import packages
from pytools.sort_search import utils
# Init variables
a_arr = [1, 3, 5, 7, None, None, None]
b_arr = [2, 4, 6]
ab_sorted = [1, 2, 3, 4, 5, 6, 7]
# Test they are equal
ab_merged = utils.a_merge_b(a_arr, b_arr)
self.assertEqual(ab_merged, ab_sorted)
# Where b needs to be at beg of a
a_arr = [2, 3, 5, 7, None, None, None]
b_arr = [0, 1, 6]
ab_sorted = [0, 1, 2, 3, 5, 6, 7]
# Test they are equal
ab_merged = utils.a_merge_b(a_arr, b_arr)
self.assertEqual(ab_merged, ab_sorted)
class RotArrSearchTestCase(unittest.TestCase):
'''
TestCase for the sorting module
'''
# Set up test case
def setUp(self):
'''
Initialize test case with attributes
'''
# Init instance attributes
pass
def test_rot_arr_search(self):
'''
Test the rot_arr_search function is working properly
'''
# Import packages
from pytools.sort_search import utils
# Init variables
rot_arr = [15, 16, 19, 20, 25, 1, 3, 4, 5, 7, 10, 14]
elem = 5
# Run function
pos = utils.rot_arr_search(rot_arr, elem)
self.assertEqual(pos, rot_arr.index(elem))
if __name__ == '__main__':
unittest.main() |
#! /usr/bin/python
# coding: utf-8
# Copyright 2018 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Train assistant instance with intents and entities
Intent input schema (headerless):
| utterance | intent |
Entity input schema (headerless):
| entity | value | synonym/pattern 0 | synonym/pattern 1 | ...
"""
import json
from time import sleep
import csv
import pandas as pd
from argparse import ArgumentParser
from ibm_watson import AssistantV1
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
import sys
import traceback
from __init__ import UTF_8, DEFAULT_WA_VERSION,\
UTTERANCE_COLUMN, INTENT_COLUMN, \
TIME_TO_WAIT, WORKSPACE_ID_TAG
ENTITY_COLUMN = 'entity'
ENTITY_VALUE_COLUMN = 'value'
EXAMPLES_COLUMN = 'examples'
ENTITY_VALUES_ARR_COLUMN = 'values'
SLEEP_INCRE = 10
INTENT_CSV_HEADER = [UTTERANCE_COLUMN, INTENT_COLUMN]
ENTITY_CSV_HEADER = [ENTITY_COLUMN, ENTITY_VALUE_COLUMN]
class TrainTimeoutException(Exception):
""" To be thrown if training is timeout
"""
def __init__(self, message):
self.message = message
class TrainWorkspaceCountException(Exception):
""" To be thrown if too many workspaces are in use
"""
def __init__(self, message):
self.message = message
def to_examples(intent_group):
""" Parse each row of intent group into a CreateIntent[]
"""
res = []
for _, row in intent_group.iterrows():
if row['utterance']: # Ignore empty examples
res.append({'text': row['utterance']})
return res
def to_entity_values(entity_group):
""" Parse current entity group content into a CreateEntity[]
"""
values = []
for _, row in entity_group.iterrows():
value = row[ENTITY_VALUE_COLUMN]
if not value: # Handle reserved entities
continue
synonyms = []
patterns = []
# Drop first two item and iterate the rest items (synonym or pattern)
for _, val in row.drop([ENTITY_COLUMN, ENTITY_VALUE_COLUMN]) \
.iteritems():
if not pd.isnull(val):
if val.startswith('/'): # is pattern?
patterns.append(val[:-1][1:])
else:
synonyms.append(val)
# Construct CreateValue[]
if len(patterns) != 0:
values.append({'value': value, 'patterns': patterns,
'type': 'patterns'})
else:
values.append({'value': value, 'synonyms': synonyms,
'type': 'synonyms'})
return values
def func(args):
entities = []
workspace_name = ''
workspace_description = ''
intents = []
language = 'en'
dialog_nodes = []
counterexamples = []
metadata = {}
learning_opt_out = False
system_settings = {}
if args.workspace_base_json is not None:
with open(args.workspace_base_json, 'r') as f:
workspace_json = json.load(f)
if 'entities' in workspace_json:
entities = workspace_json['entities']
if 'intents' in workspace_json:
intents = workspace_json['intents']
if 'language' in workspace_json:
language = workspace_json['language']
if 'dialog_nodes' in workspace_json:
dialog_nodes = workspace_json['dialog_nodes']
if 'counterexamples' in workspace_json:
counterexamples = workspace_json['counterexamples']
if 'metadata' in workspace_json:
metadata = workspace_json['metadata']
if 'learning_opt_out' in workspace_json:
learning_opt_out = workspace_json['learning_opt_out']
if 'system_settings' in workspace_json:
system_settings = workspace_json['system_settings']
if args.intentfile is not None:
# First, group utterances by INTENT_COLUMN. In each intent group,
# construct the CreateIntent[] and return as a cell of the series.
# Convert the series into dataframe and restore the intent column
# from index to an explicit column.
intent_df = pd.read_csv(filepath_or_buffer=args.intentfile, quoting=csv.QUOTE_ALL,
encoding=UTF_8, header=None,
names=INTENT_CSV_HEADER,
keep_default_na=False) \
.groupby(by=[INTENT_COLUMN]).apply(to_examples) \
.to_frame().reset_index(level=[INTENT_COLUMN]) \
.rename(columns={0: EXAMPLES_COLUMN})
# Construct the CreateIntent[]
intents = [{'intent': row[INTENT_COLUMN],
'examples': row[EXAMPLES_COLUMN]}
for _, row in intent_df.iterrows()]
if args.entityfile is not None:
# Read csv with unknown number of columns into dataframe
rows = None
with open(args.entityfile, 'r', encoding='utf-8') as f:
reader = csv.reader(f, quoting=csv.QUOTE_ALL)
rows = list(reader)
entity_df = pd.DataFrame(rows)
# Rename 1st, 2nd column to ENTITY_COLUMN, ENTITY_VALUE_COLUMN.
# Group rows by entity name. In each entity group,
# construct the CreateEntity[] and return as a cell of the series.
# Convert the series into dataframe and restore
# the intent column from index to an explicit column.
entity_df = entity_df.rename(
columns={0: ENTITY_COLUMN, 1: ENTITY_VALUE_COLUMN}) \
.groupby(by=[ENTITY_COLUMN]).apply(to_entity_values).to_frame() \
.reset_index(level=[ENTITY_COLUMN]) \
.rename(columns={0: ENTITY_VALUES_ARR_COLUMN})
# Construct the CreateEntity[]
entities = [{'entity': row[ENTITY_COLUMN],
'values': row[ENTITY_VALUES_ARR_COLUMN]}
for _, row in entity_df.iterrows()]
authenticator = IAMAuthenticator(args.iam_apikey)
conv = AssistantV1(
version=args.version,
authenticator=authenticator
)
conv.set_service_url(args.url)
if args.workspace_name is not None:
workspace_name = args.workspace_name
if args.workspace_description is not None:
workspace_description = args.workspace_description
# Create workspace with provided content
raw_resp = conv.create_workspace(name=workspace_name, language=language,
description=workspace_description,
intents=intents, entities=entities,
dialog_nodes=dialog_nodes,
counterexamples=counterexamples,
metadata=metadata,
learning_opt_out=learning_opt_out,
system_settings=system_settings)
try:
#V2 API syntax
resp = raw_resp.get_result()
except:
#V1 API syntax
resp = raw_resp
# Poke the training status every SLEEP_INCRE secs
sleep_counter = 0
while sleep_counter < TIME_TO_WAIT:
raw_resp = conv.get_workspace(workspace_id=resp[WORKSPACE_ID_TAG])
try:
#V2 API syntax
resp = raw_resp.get_result()
except:
#V1 API syntax
resp = raw_resp
if resp['status'] == 'Available':
print(json.dumps(resp, indent=4)) # double quoted valid JSON
return
sleep_counter += SLEEP_INCRE
sleep(10)
raise TrainTimeoutException('Assistant training is timeout')
def create_parser():
parser = ArgumentParser(
description='Train assistant instance with intents and entities')
parser.add_argument('-i', '--intentfile', type=str,
help='Intent file')
parser.add_argument('-e', '--entityfile', type=str, help='Entity file')
parser.add_argument('-w', '--workspace_base_json', type=str,
help='Workspace base JSON file')
parser.add_argument('-n', '--workspace_name', type=str,
help='Workspace name')
parser.add_argument('-d', '--workspace_description', type=str,
help='Workspace description')
parser.add_argument('-a', '--iam_apikey', type=str, required=True,
help='Assistant service IAM api key')
parser.add_argument('-l', '--url', type=str, default='https://gateway.watsonplatform.net/assistant/api',
help='URL to Watson Assistant. Ex: https://gateway-wdc.watsonplatform.net/assistant/api')
parser.add_argument('-v', '--version', type=str, default=DEFAULT_WA_VERSION,
help='Watson Assistant API version in YYYY-MM-DD form')
return parser
if __name__ == '__main__':
ARGS = create_parser().parse_args()
try:
func(ARGS)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
as_string = repr(traceback.format_exception(exc_type, exc_value, exc_traceback))
if "workspaces limit" in as_string:
message = "\n"
message = message + "******************************************************************************************************\n"
message = message + "Too many workspaces in use.\n"
message = message + "Delete extra workspaces and/or reduce `fold_num` in your configuration file (ex: `fold_num=3`)\n"
message = message + "******************************************************************************************************\n"
raise TrainWorkspaceCountException(message)
else:
raise
|
def read_file(file_name):
data = ""
with open(file_name, "r") as file:
data = file.read()
return data
def banks_string_representation(banks):
return ' '.join(str(x) for x in banks)
def is_infinite_loop(banks, previous_allocations):
return banks_string_representation(banks) in previous_allocations
def reallocate_banks(banks):
max_index = banks.index(max(banks))
size = len(banks)
blocks = banks[max_index]
banks[max_index] = 0
i = (max_index + 1) % size
for redistribute_blocks in range(blocks, 0, -1):
banks[i] += 1
i = (i + 1) % size
def part1():
banks = [int(x) for x in read_file("day6_input.txt").split("\t")]
previous_allocations = set()
reallocations = 0
while not is_infinite_loop(banks, previous_allocations):
previous_allocations.add(banks_string_representation(banks))
reallocate_banks(banks)
reallocations += 1
print(reallocations)
def part2():
banks = [int(x) for x in read_file("day6_input.txt").split("\t")]
previous_allocations = set()
previous_allocations_cycle = dict()
reallocations = 0
while not is_infinite_loop(banks, previous_allocations):
previous_allocations.add(banks_string_representation(banks))
reallocate_banks(banks)
reallocations += 1
if not banks_string_representation(banks) in previous_allocations_cycle:
previous_allocations_cycle[banks_string_representation(banks)] = reallocations
print(previous_allocations_cycle[banks_string_representation(banks)])
print(reallocations - previous_allocations_cycle[banks_string_representation(banks)])
print(reallocations)
part1()
part2()
|
import discord
from discord.ext import commands, tasks
from itertools import cycle
cl = commands.Bot(command_prefix='%')
stats = cycle(['Stat1', 'Stat2'])
@cl.event
async def on_ready():
print('ready')
@tasks.loop(seconds = 3)
async def changeStat():
await cl.change_presence(activity=discord.Game(next(stats)))
cl.run('ODI0NjQ0MzYyMzI3MTYyODgx.YFyX6Q.wYANeBbu71zuo_09-rLMiPg_bNU')
|
class Solution:
def plus_one(self, digits: list):
# 最后一位不是9的标记位
length = len(digits) - 1
while digits[length] == 9:
digits[length] = 0
length -= 1
if length < 0:
digits = [1] + digits
else:
digits[length] += 1
return digits
def other_solution(self, digits: list):
# 将数组合并+1,再拆分,注意输出类型就ok
# 时间复杂度o(n) 不太理想
temp = "".join(map(lambda i: str(i), digits))
return [int(n) for n in str(int(temp) + 1)]
if __name__ == "__main__":
print(Solution().plus_one([9]))
print(Solution().other_solution([9]))
|
from jutge import read
def Square1(n):
for i in range(n):
print(str(n) * n)
size = read(int)
if size is not None:
Square1(size)
size = read(int)
while size is not None:
print("")
Square1(size)
size = read(int)
|
# Server Connections Alert COG
from __future__ import annotations
import discord
from discord.ext import commands, tasks
from glob import glob
import os, json, time
import traceback
from .webhook_formatter import format_push_hook
GIT_HOOK_DUMPS_DIR = "/git-hooks"
CHANNEL_ID = int(os.environ.get("DISCORD_CHANNEL_ID"))
class GithubBaseWebhooks(commands.Cog, name="Github Wehbooks"):
def __init__(self, bot):
self.bot = bot
time.sleep(3)
self.git_hook_notifier_task.start()
@commands.command(description="Check who is connected to the server")
async def ctx_info(self, ctx):
await ctx.send(f"Ctx dict: {ctx.__dict__}")
@commands.command()
async def _send_msg(self, ctx):
channel = self.bot.get_channel(CHANNEL_ID)
await channel.send(ctx.message.content.removeprefix("$_send_msg "))
@commands.command()
async def test(self, ctx):
cid = ctx.message.channel.id
c = self.bot.get_channel(cid)
if c is None:
await ctx.send(ctx.message.channel.__dict__)
return
await ctx.send(f"good: {cid}")
@tasks.loop(seconds=15)
async def git_hook_notifier_task(self):
if not os.listdir(GIT_HOOK_DUMPS_DIR):
return
channel = self.bot.get_channel(CHANNEL_ID)
if channel is None:
print("Waiting for cache ...")
return
for git_hook_file_path in map(
lambda f: os.path.join(GIT_HOOK_DUMPS_DIR, f),
os.listdir(GIT_HOOK_DUMPS_DIR),
):
timestamp = os.path.basename(git_hook_file_path)
timestamp = timestamp[:-4]
print(f"UTC: {timestamp}", git_hook_file_path)
with open(git_hook_file_path, "r") as git_hook_file:
file_content = git_hook_file.read()
await self.notify_channel(channel, file_content)
os.remove(git_hook_file_path)
print("All done")
async def notify_channel(self, channel, json_content):
json_obj = json.loads(json_content)
if "head_commit" not in json_obj.keys():
print("Not a push hook")
return
try:
messages = format_push_hook(json_obj)
except Exception as e:
error_msg = traceback.format_exc()
messages = (f"Error in parsing github hook json data :\n```{error_msg}```",)
for msg_string in messages:
try:
await channel.send(msg_string)
except:
error_msg = traceback.format_exc().replace("```", "\`\`\`")
if len(error_msg) > 1800:
error_msg = "\n...\n" + error_msg[-1800:]
msg = f"Error in sending message on discord :\n```{error_msg}```"
await channel.send(msg)
def setup(bot):
bot.add_cog(GithubBaseWebhooks(bot))
|
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
import cv2
import numpy as np
import argparse
import time
def load_yolo():
net = cv2.dnn.readNet("yolov3.weights", "yolov3.cfg")
classes = []
with open("coco.names", "r") as f:
classes = [line.strip() for line in f.readlines()]
layers_names = net.getLayerNames()
output_layers = [layers_names[i[0]-1] for i in net.getUnconnectedOutLayers()]
colors = np.random.uniform(0, 255, size=(len(classes), 3))
return net, classes, colors, output_layers
def load_image(img_path):
# image loading
img = cv2.imread(img_path)
#img = cv2.resize(img, None, fx=0.4, fy=0.4)
height, width, channels = img.shape
return img, height, width, channels
def detect_objects(img, net, outputLayers):
blob = cv2.dnn.blobFromImage(img, scalefactor=0.00392, size=(608, 608), mean=(0, 0, 0), swapRB=True, crop=False)
net.setInput(blob)
outputs = net.forward(outputLayers)
return blob, outputs
def get_box_dimensions(outputs, height, width):
boxes = []
confs = []
class_ids = []
for output in outputs:
for detect in output:
scores = detect[5:]
print(scores)
class_id = np.argmax(scores)
conf = scores[class_id]
if conf > 0.3:
center_x = int(detect[0] * width)
center_y = int(detect[1] * height)
w = int(detect[2] * width)
h = int(detect[3] * height)
x = int(center_x - w/2)
y = int(center_y - h / 2)
boxes.append([x, y, w, h])
confs.append(float(conf))
class_ids.append(class_id)
return boxes, confs, class_ids
def draw_labels(boxes, confs, colors, class_ids, classes, img):
indexes = cv2.dnn.NMSBoxes(boxes, confs, 0.4, 0.5)
font = cv2.FONT_HERSHEY_PLAIN
for i in range(len(boxes)):
if i in indexes:
x, y, w, h = boxes[i]
label = str(classes[class_ids[i]])
color = colors[i]
cv2.rectangle(img, (x,y), (x+w, y+h), color, 2)
cv2.putText(img, label, (x, y - 5), font, 1, color, 1)
print('Original Dimensions : ', img.shape)
scale_percent = 220 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
cv2.imshow("Resized image", resized)
cv2.imshow("Image", img)
def image_detect(img_path):
model, classes, colors, output_layers = load_yolo()
image, height, width, channels = load_image(img_path)
blob, outputs = detect_objects(image, model, output_layers)
boxes, confs, class_ids = get_box_dimensions(outputs, height, width)
draw_labels(boxes, confs, colors, class_ids, classes, image)
while True:
key = cv2.waitKey(1)
if key == 27:
break
def webcam_detect(videosource):
model, classes, colors, output_layers = load_yolo()
cap = cv2.VideoCapture(videosource)
while True:
_, frame = cap.read()
height, width, channels = frame.shape
blob, outputs = detect_objects(frame, model, output_layers)
boxes, confs, class_ids = get_box_dimensions(outputs, height, width)
draw_labels(boxes, confs, colors, class_ids, classes, frame)
key = cv2.waitKey(1)
if key == 27:
break
cap.release()
def start_video(video_path):
model, classes, colors, output_layers = load_yolo()
cap = cv2.VideoCapture(video_path)
while True:
_, frame = cap.read()
height, width, channels = frame.shape
blob, outputs = detect_objects(frame, model, output_layers)
boxes, confs, class_ids = get_box_dimensions(outputs, height, width)
draw_labels(boxes, confs, colors, class_ids, classes, frame)
key = cv2.waitKey(1)
if key == 27:
break
cap.release() |
from collections import OrderedDict
import keyword
#TODO: can we just sub-class tuple and be a tuple of tuples with attributes?
class Choices(object):
""" An enum-type class for creating immutable choices for django CharFields.
Usage:
MY_CHOICES = Choices([('YES', 'Yes'), ('NO', 'No'), ('MAYBE', 'Maybe')])
CharField(choices=MY_CHOICES.choices, default=MY_CHOICES.YES)
"""
def __init__(self, choices):
super(Choices, self).__setattr__('_choices', OrderedDict(choices))
assert 'choices' not in self._choices.keys()
assert 'constants' not in self._choices.keys()
# Sanity check to make sure there are no conflicting constants
# once spaces and hyphens have been removed
seen_lookups = set()
for constant, _ in choices:
sanitized = constant.replace("-", "_").replace(" ", "_")
if sanitized in seen_lookups:
raise ValueError("Attempted to add conflicting option {}".format(constant))
else:
seen_lookups.add(sanitized)
def __getattr__(self, name):
try:
if name.startswith("_") and name[1:] in self._choices:
without_underscore = name[1:]
# Check if the name is a Python keyword, builtin or an integer
# if it is, we allow access with a leading underscore, but if not
# we fall through and throw the normal KeyError
special = False
try:
int(without_underscore)
special = True
except (TypeError, ValueError):
special = without_underscore in keyword.kwlist or \
without_underscore in __builtins__.keys()
if special:
return without_underscore
if name not in self._choices:
# Another special case, if we have spaces or hyphens, provide
# access using underscores in their place
for k in self._choices:
if k.replace("-", "_").replace(" ", "_") == name:
return k
self._choices[name] #check it exists
return name
except KeyError:
raise AttributeError("Choices object has no such attribute {}".format(name))
def __setattr__(self, name, value):
""" Prevent values being changed. """
raise AttributeError("You cannot modify attributes on a %s" % self.__class__.__name__)
def __iter__(self):
return iter(self.constants)
def __repr__(self):
return unicode(self.choices)
@property
def constants(self):
return self._choices.keys() #TODO: can we make this faster by storing a constant reference?
@property
def choices(self):
return self._choices.items() #TODO: can we make this faster by storing a constant reference?
|
"""Python Libraries/Modules/Packages"""
import csv
import os
from rdflib import *
"""Imported Functions"""
from functions.admin_metadata_functions import admin_metadata_mapping
from functions.boolean_functions import class_test
from functions.formatting_functions import edit_kiegel
from functions.split_by_space import split_by_space
from functions.logical_source_functions import generate_main_logical_source
from functions.value_functions import generate_RML_for_bnode
from functions.value_functions import generate_RML_for_constant
from functions.value_functions import generate_RML_for_IRI
from functions.value_functions import generate_RML_for_literal
from functions.start_RML_map import start_RML_map
from functions.subject_map_functions import generate_main_subject_map
"""Functions"""
def get_file_list(csv_dir, entity):
"""Get list of CSV files for a given entity"""
csv_file_list = os.listdir(csv_dir)
file_list = []
for file in csv_file_list:
if entity in file:
file_list.append(file)
return file_list
def get_property_kiegel_list(csv_dir, entity):
"""Get list of tuples (property number, kiegel) from CSV files for a given entity"""
file_list = get_file_list(csv_dir, entity)
if entity == "work":
rda_iri = 'http://rdaregistry.info/Elements/w/'
from functions.lists import skip_work_props as skip_prop_list
elif entity == "expression":
rda_iri = 'http://rdaregistry.info/Elements/e/'
from functions.lists import skip_expression_props as skip_prop_list
elif entity == "manifestation":
rda_iri = 'http://rdaregistry.info/Elements/m/'
from functions.lists import skip_manifestation_props as skip_prop_list
elif entity == "item":
rda_iri = 'http://rdaregistry.info/Elements/i/'
from functions.lists import skip_item_props as skip_prop_list
else:
print("Entity not recognized.")
quit()
property_kiegel_list = []
for csv_file in file_list:
with open(f"{csv_dir}/{csv_file}") as file:
csv_reader = csv.reader(file, delimiter=',')
line_count = 0
for line in csv_reader:
if line_count == 0: # ignore header row
pass
elif "rdf-syntax" in line[1]:
pass
elif line[1].strip(rda_iri) in skip_prop_list:
pass
elif "P10002" in line[1] or "P20002" in line[1] or "P30004" in line[1] or "P40001" in line[1]:
pass
else:
"""Find property number"""
prop_IRI = line[1]
if "rdaregistry" in prop_IRI:
prop_num = prop_IRI.lstrip(rda_iri)
else:
prop_num = prop_IRI.lstrip('https://doi.org/10.6069/uwlib.55.d.4')
prop_num = prop_num.strip('#')
"""Find kiegel"""
kiegel = line[3]
# remove subclass variables, if they exist
if ".subclass" in kiegel:
split_kiegel = kiegel.split("\nor\n")
for line in split_kiegel:
if ".subclass" in line:
split_kiegel.remove(line)
kiegel = "\nor\n".join(split_kiegel)
"""Create tuple and add to list"""
property_kiegel_tuple = (prop_num, kiegel)
property_kiegel_list.append(property_kiegel_tuple)
line_count += 1
return property_kiegel_list
def create_kiegel_dict(csv_dir, entity):
"""Create dictionary ["property number": [kiegel split into list]"""
kiegel_dict = {}
property_kiegel_list = get_property_kiegel_list(csv_dir, entity)
for tuple in property_kiegel_list:
property_number = tuple[0]
kiegel_dict[property_number] = []
"""Turn kiegel into list"""
kiegel = tuple[1]
mapping_dict = edit_kiegel(kiegel)
kiegel_dict[property_number] = mapping_dict
return kiegel_dict
def kiegel_reader(csv_dir, entity):
"""Start RML map"""
RML_graph = start_RML_map()
RML_graph = generate_main_logical_source(RML_graph, entity)
RML_graph = generate_main_subject_map(RML_graph, entity)
RML_graph = admin_metadata_mapping(RML_graph, entity)
if entity == "work":
from functions.identifiedBy_functions import P10002_mapping
RML_graph = P10002_mapping(RML_graph)
elif entity == "expression":
from functions.identifiedBy_functions import P20002_mapping
RML_graph = P20002_mapping(RML_graph)
elif entity == "manifestation":
from functions.identifiedBy_functions import P30004_mapping
RML_graph = P30004_mapping(RML_graph)
elif entity == "item":
from functions.identifiedBy_functions import P40001_mapping
RML_graph = P40001_mapping(RML_graph)
else:
print("Entity not recognized.")
quit()
"""For each property in kiegel_dict, convert kiegel map to RML code"""
kiegel_dict = create_kiegel_dict(csv_dir, entity)
bnode_po_dict = {}
logsource_subject_list = []
default_map_name = entity.capitalize()
for prop_num in kiegel_dict.keys():
prop_dict = kiegel_dict[prop_num]
for value_type in prop_dict.keys(): # value_type == "IRI" or value_type == "literal"
map_list = prop_dict[value_type]
for mapping in map_list:
map_name = default_map_name
node_list = split_by_space(mapping)
node_range = range(0, len(node_list))
for num in node_range:
node = node_list[num].strip()
if node == ">":
pass
elif node == "not":
pass
elif node == "mapped":
pass
elif "*" in node:
"""Property takes an IRI value"""
RML_graph = generate_RML_for_IRI(RML_graph, default_map_name, map_name, node, prop_num)
elif "=" in node:
"""Property takes a constant value"""
RML_graph = generate_RML_for_constant(RML_graph, map_name, node)
elif node == ">>":
"""Previous property in kiegel mapping takes a blank node as an object"""
generate_RML_for_bnode_tuple = generate_RML_for_bnode(RML_graph, bnode_po_dict, logsource_subject_list, entity, prop_num, value_type, mapping, node_list, num, map_name)
RML_graph = generate_RML_for_bnode_tuple[0]
bnode_po_dict = generate_RML_for_bnode_tuple[1]
logsource_subject_list = generate_RML_for_bnode_tuple[2]
map_name = generate_RML_for_bnode_tuple[3]
else:
"""Property takes a literal or a blank node"""
if num != len(node_list)-1 and node_list[num+1] == ">>":
"""This property takes a blank node; pass, and get it in the previous elif on the next loop"""
pass
else:
"""Make sure it's a property, and not a class for a blank node. Otherwise, it takes a literal"""
its_a_class = class_test(node)
if its_a_class == False:
RML_graph = generate_RML_for_literal(RML_graph, default_map_name, map_name, prop_num, node)
return RML_graph
|
from django.test import RequestFactory, TestCase, override_settings
from django.views.debug import CLEANSED_SUBSTITUTE
from jellyglass.models import Spoon
from jellyglass.utils import record_spoon
class SensitivePostParametersTest(TestCase):
def setUp(self):
rf = RequestFactory()
self.request = rf.post('/post/', data={
'insecure': 'insecure value',
'secure': 'secure value',
})
def test_disabled_none(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=None)
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
'secure': 'secure value',
})
def test_disabled_empty(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=[])
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
'secure': 'secure value',
})
def test_disabled_secure(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=['secure'])
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
'secure': 'secure value',
})
@override_settings(JELLYGLASS_HIDE_SENSITIVE_POST_PARAMETERS=True)
def test_enabled_none(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=None)
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
'secure': 'secure value',
})
@override_settings(JELLYGLASS_HIDE_SENSITIVE_POST_PARAMETERS=True)
def test_enabled_empty(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=[])
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': CLEANSED_SUBSTITUTE,
'secure': CLEANSED_SUBSTITUTE,
})
@override_settings(JELLYGLASS_HIDE_SENSITIVE_POST_PARAMETERS=True)
def test_enabled_secure(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=['secure'])
def view(request):
pass
view(self.request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
'secure': CLEANSED_SUBSTITUTE,
})
@override_settings(JELLYGLASS_HIDE_SENSITIVE_POST_PARAMETERS=True)
def test_enabled_no_secure(self):
@record_spoon('Jelly', post_fields='__ALL__', sensitive=['secure'])
def view(request):
pass
request = RequestFactory().post('/post/', data={
'insecure': 'insecure value',
})
view(request)
spoon = Spoon.objects.first()
self.assertJSONEqual(spoon.post, {
'insecure': 'insecure value',
})
|
######################################################################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
import calendar
import unittest
from scheduling.monthday_setbuilder import MonthdaySetBuilder
class TestMonthdaySetBuilder(unittest.TestCase):
def test_name(self):
years = [2016, 2017] # leap and normal year
for year in years:
for month in range(1, 13):
_, days = calendar.monthrange(year, month)
for day in range(1, days):
self.assertEqual(MonthdaySetBuilder(year, month).build(str(day)), {day})
def test_L_wildcard(self):
years = [2016, 2017] # leap and normal year
for year in years:
for month in range(1, 13):
_, days = calendar.monthrange(year, month)
self.assertEqual(MonthdaySetBuilder(year, month).build("L"), {days})
def test_W_wildcard(self):
years = [2016, 2017] # leap and normal year
for year in years:
for month in range(1, 13):
_, days = calendar.monthrange(year, month)
for day in range(1, days):
weekday = calendar.weekday(year, month, day)
result = day
if weekday == 5:
result = day - 1 if day > 1 else day + 2
elif weekday == 6:
result = day + 1 if day < days else day - 2
self.assertEqual(MonthdaySetBuilder(year, month).build(str(day) + "W"), {result})
def test_exceptions(self):
for h in range(13, 25):
self.assertRaises(ValueError, MonthdaySetBuilder(2016, 1).build, "W")
self.assertRaises(ValueError, MonthdaySetBuilder(2016, 1).build, "32W")
|
import numpy as np
import cv2
# Helper to identify HSV parameters of XRay images
def nothing():
pass
cv2.namedWindow("Tracking")
cv2.createTrackbar("LH", "Tracking", 0, 255, nothing)
cv2.createTrackbar("LS", "Tracking", 0, 255, nothing)
cv2.createTrackbar("LV", "Tracking", 0, 255, nothing)
cv2.createTrackbar("UH", "Tracking", 255, 255, nothing)
cv2.createTrackbar("US", "Tracking", 255, 255, nothing)
cv2.createTrackbar("UV", "Tracking", 255, 255, nothing)
while True:
# Image to detect
img_path = '/home/ronald/PycharmProjects/x-ray-deep-learning/X-ray_Object_Detection/data/raw/multi/images/train/B0046_0100.png'
# Read in image
img = cv2.imread(img_path)
img = cv2.resize(img, dsize=(256, 256))
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
l_h = cv2.getTrackbarPos("LH", "Tracking") # 0
l_s = cv2.getTrackbarPos("LS", "Tracking") # 0
l_v = cv2.getTrackbarPos("LV", "Tracking") # 0
u_h = cv2.getTrackbarPos("UH", "Tracking") # 0
u_s = cv2.getTrackbarPos("US", "Tracking") # 0
u_v = cv2.getTrackbarPos("UV", "Tracking") # 38
lb = np.array([l_h, l_s, l_h])
ub = np.array([u_h, u_s, u_v])
mask = cv2.inRange(hsv, lb, ub)
res = cv2.bitwise_and(img, img, mask=mask)
cv2.imshow('frame', img)
cv2.imshow('mask', mask)
cv2.imshow('res', res)
key = cv2.waitKey(1)
if key == 27:
break
cv2.destroyAllWindows() |
# Note.
# Highly unoptimized code which really needs factoring, but work nonetheless
from math import floor, ceil
class Node:
def __init__(self, literal = True, left = None, right = None, val = 0, parent = None, isLeftchild = True) -> None:
self.literal = literal
self.val = val
self.left = left
self.right = right
self.parent = parent
self.isLeftchild = isLeftchild
def __str__(self):
if self.literal:
return str(self.val)
return '[' + self.left.__str__() + ',' + self.right.__str__() + ']'
def FindLeftmost(node: Node):
crawl = node
while crawl.isLeftchild and crawl.parent != None:
crawl = crawl.parent
if crawl.parent == None: return None
crawl = crawl.parent.left
# print(crawl)
while not crawl.literal and crawl != None:
crawl = crawl.right
return crawl
def FindRightmost(node: Node):
crawl = node
while not crawl.isLeftchild and crawl.parent != None:
crawl = crawl.parent
if crawl.parent == None: return None
crawl = crawl.parent.right
# print(crawl)
while not crawl.literal and crawl != None:
crawl = crawl.left
return crawl
root = Node()
def Split(node: Node):
if node.literal:
if node.val >= 10:
node.left = Node(val=floor(node.val/2), isLeftchild=True)
node.right = Node(val=ceil(node.val/2), isLeftchild=False)
node.literal = False
node.val = 0
return True
return False
if Split(node.left): return True
elif Split(node.right): return True
return False
def Fix(node: Node):
if node.literal:
return
node.left.isLeftchild = True
node.left.parent = node
node.right.isLeftchild = False
node.right.parent = node
Fix(node.left)
Fix(node.right)
def Exploding(node: Node, level = 0):
global root
if node.literal: return False
elif level >= 4:
# print(node)
leftmostNode = FindLeftmost(node)
if leftmostNode != None:
leftmostNode.val += node.left.val
# print(f'leftmostNode = {leftmostNode}')
rightmostNode = FindRightmost(node)
if rightmostNode != None:
rightmostNode.val += node.right.val
# print(f'rightmostNode = {rightmostNode}')
node.left = None
node.right = None
node.val = 0
node.literal = True
# print(root)
return True
else:
if node.left != None and Exploding(node.left, level+1): return True
elif node.right != None and Exploding(node.right, level+1): return True
return False
def Parsing(s, idx = 0, parent=None, isLeftchild=True):
node = Node(literal=False, parent=parent, isLeftchild=isLeftchild)
# left child
idx += 1
if s[idx] == '[':
node.left, idx = Parsing(s, idx, node, True)
else:
next_idx = s.find(',', idx)
val = int(s[idx: next_idx])
node.left = Node(literal=True,val=val,parent=node,isLeftchild=True)
idx = next_idx+1
#right child
if s[idx] == '[':
node.right, idx = Parsing(s, idx, node, False)
else:
next_idx = s.find(']', idx)
val = int(s[idx: next_idx])
node.right = Node(literal=True,val=val,parent=node,isLeftchild=False)
idx = next_idx+1
idx += 1
return node, idx
def CalculateSum(node: Node):
if node.literal: return node.val
return 3*CalculateSum(node.left) + 2*CalculateSum(node.right)
def part1():
global root
with open('Advent of Code/2021/d18.txt') as file:
s = file.readline().strip()
root, _ = Parsing(s)
# print(root)
for line in file:
node, _ = Parsing(line.strip())
# print(node)
x = Node(literal=False,left=root,right=node)
node.parent = x
node.isLeftchild = False
root.parent = x
root.isLeftchild = True
root = x
# print(f'before\t= {root}')
while True:
Fix(root)
if Exploding(root):
# print(f'explode\t: {root}')
continue
elif Split(root):
# print(f'split\t: {root}')
continue
break
# print(f'after\t= {root}')
print(CalculateSum(root))
part1()
def part2():
global root
nodes = []
with open('Advent of Code/2021/d18.txt') as file:
for line in file:
nodes.append(line.strip())
ans = 0
for i in range(len(nodes)):
for j in range(len(nodes)):
if i == j: continue
a, _ = Parsing(nodes[i])
b, _ = Parsing(nodes[j])
root = Node(literal=False,left=a,right=b)
a.parent = root
a.isLeftchild = False
b.parent = root
b.isLeftchild = True
# print(f'before\t= {root}')
while True:
Fix(root)
if Exploding(root):
# print(f'explode\t: {root}')
continue
elif Split(root):
# print(f'split\t: {root}')
continue
break
# print(f'after\t= {root}')
ans = max(CalculateSum(root), ans)
print(ans)
part2() |
# -*- coding: utf-8 -*-
import scrapy
class BioonSpider(scrapy.Spider):
name = "bioon"
start_urls = ['http://news.bioon.com/mobileMedical/research-%d.html'%x for x in range(1,51)]
start_urls.extend(['http://news.bioon.com/mobileMedical/company-%d.html'%x for x in range(1,39)])
start_urls.extend(['http://news.bioon.com/mobileMedical/products-%d.html'%x for x in range(1,35)])
start_urls.extend(['http://news.bioon.com/mobileMedical/industry-%d.html'%x for x in range(1,51)])
def parse(self, response):
for div in response.css('div.cntx'):
href = div.css('a::attr(href)').extract_first()
date = div.css('div.fl.huise::text').extract_first()
yield scrapy.Request(response.urljoin(href),
callback=self.herf_with_date(date))
def herf_with_date(self, date):
def href(response):
title = response.css('title::text').extract_first()
cntx = ''.join(response.css('div.text3 p::text').extract())
sat = date + ":" + title.replace("/",",").replace(":","-")
filename = '%s.txt' % sat
with open(f'bioon/{filename}', 'w') as f:
f.write(cntx)
self.log('Saved file %s' % filename)
return href |
import numpy as np
import scipy.ndimage as ndi
from skimage import morphology
from numba import jit, prange
from tqdm import tqdm
import torch
@jit(nopython=True)
def _local_thick_2d(mask, med_axis, distance, search_extent, sampling):
out = np.zeros_like(mask, dtype=np.float32)
nonzero_x = np.nonzero(mask)
ii = nonzero_x[0]
jj = nonzero_x[1]
nonzero_med_axis = np.nonzero(med_axis)
mm = nonzero_med_axis[0]
nn = nonzero_med_axis[1]
for e in range(len(ii)):
i = ii[e]
j = jj[e]
best_val = 0
if search_extent is not None:
r0 = max(i - search_extent[0], 0)
r1 = min(i + search_extent[0], mask.shape[0] - 1)
c0 = max(j - search_extent[1], 0)
c1 = min(j + search_extent[1], mask.shape[1] - 1)
for w in range(len(mm)):
m = mm[w]
n = nn[w]
if search_extent is not None:
if m < r0 or m > r1 or n < c0 or n > c1:
continue
scaled_i = (i - m) * sampling[0]
scaled_j = (j - n) * sampling[1]
if (scaled_i ** 2 + scaled_j ** 2) < (distance[m, n] ** 2):
if distance[m, n] > best_val:
best_val = distance[m, n]
out[i, j] = best_val
return out
@jit(nopython=True, parallel=True)
def _local_thick_3d(mask, med_axis, distance, search_extent, sampling):
out = np.zeros_like(mask, dtype=np.float32)
nonzero_mask = np.nonzero(mask)
ii = nonzero_mask[0]
jj = nonzero_mask[1]
kk = nonzero_mask[2]
num_pts_mask = len(ii)
nonzero_med_axis = np.nonzero(med_axis)
mm = nonzero_med_axis[0]
nn = nonzero_med_axis[1]
oo = nonzero_med_axis[2]
num_pts_med_axis = len(mm)
best_vals = np.zeros((num_pts_mask, ))
for e in prange(num_pts_mask):
i = ii[e]
j = jj[e]
k = kk[e]
if search_extent is not None:
r0 = max(i - search_extent[0], 0)
r1 = min(i + search_extent[0], mask.shape[0] - 1)
c0 = max(j - search_extent[1], 0)
c1 = min(j + search_extent[1], mask.shape[1] - 1)
p0 = max(k - search_extent[2], 0)
p1 = min(k + search_extent[2], mask.shape[2] - 1)
for w in range(num_pts_med_axis):
m = mm[w]
n = nn[w]
o = oo[w]
if search_extent is not None:
if m < r0 or m > r1 or n < c0 or n > c1 or o < p0 or o > p1:
continue
# Check if distance between mask and medial axis < distance to nearest edge
scaled_i = (i - m) * sampling[0]
scaled_j = (j - n) * sampling[1]
scaled_k = (k - o) * sampling[2]
if (scaled_i ** 2 + scaled_j ** 2 + scaled_k ** 2) <= (distance[m, n, o] ** 2):
if distance[m, n, o] > best_vals[e]:
best_vals[e] = distance[m, n, o]
out[i, j, k] = best_vals[e]
return out
def _local_thickness(mask, *, mode='med2d_dist3d_lth3d',
spacing_mm=None, stack_axis=None,
thickness_max_mm=None,
return_med_axis=False, return_distance=False):
"""
Inspired by https://imagej.net/Local_Thickness .
Args:
mask: (D0, D1[, D2]) ndarray
mode: One of {'straight_skel_3d', 'stacked_2d',
'med2d_dist2d_lth3d', 'med2d_dist3d_lth3d'} or None
Implementation mode for 3D ``mask``. Ignored for 2D.
spacing_mm: tuple of ``mask.ndim`` elements
Size of ``mask`` voxels in mm.
stack_axis: None or int
Index of axis to perform slice selection along. Ignored for 2D.
thickness_max: None or int
Hypothesised maximum thickness in absolute values.
Used to constrain local ROIs to speed up best candidate search.
return_med_axis: bool
Whether to return the medial axis.
return_distance: bool
Whether to return the distance transform.
Returns:
out: ndarray
Local thickness.
med_axis: ndarray
Medial axis. Returned only if ``return_med_axis`` is True.
distance: ndarray
Distance transform. Returned only if ``return_distance`` is True.
"""
# 1. Compute the distance transform
# 2. Find the distance ridge (/ exclude the redundant points)
# 3. Compute local thickness
if spacing_mm is None:
spacing_mm = (1,) * mask.ndim
if thickness_max_mm is None:
search_extent = None
else:
# Distance to the closest surface point is half of the thickness
distance_max_mm = thickness_max_mm / 2.
search_extent = np.ceil(distance_max_mm / np.asarray(spacing_mm)).astype(np.uint)
if mask.ndim == 2:
med_axis = morphology.medial_axis(mask)
distance = ndi.distance_transform_edt(mask, sampling=spacing_mm)
out = _local_thick_2d(mask=mask, med_axis=med_axis, distance=distance,
search_extent=search_extent, sampling=spacing_mm)
elif mask.ndim == 3:
if mode == 'straight_skel_3d':
from warnings import warn
msg = 'Straight skeleton is not suitable for local thickness'
warn(msg)
if thickness_max_mm is not None:
msg = f'`thickness_max_mm` is not supported in mode {mode}'
raise NotImplementedError(msg)
skeleton = morphology.skeletonize_3d(mask)
distance = ndi.distance_transform_edt(mask, sampling=spacing_mm)
out = _local_thick_3d(mask=mask, med_axis=skeleton, distance=distance)
med_axis = skeleton
elif mode == 'stacked_2d':
if thickness_max_mm is not None:
msg = f'`thickness_max_mm` is not supported in mode {mode}'
raise NotImplementedError(msg)
acc_med = []
acc_dist = []
acc_out = []
for idx_slice in tqdm(range(mask.shape[stack_axis]), desc='Calculating thickness'):
sel_idcs = [slice(None), ] * mask.ndim
sel_idcs[stack_axis] = idx_slice
sel_idcs = tuple(sel_idcs)
if spacing_mm is None:
sel_spacing = None
else:
sel_spacing = (list(spacing_mm[:stack_axis]) +
list(spacing_mm[stack_axis+1:]))
sel_mask = mask[sel_idcs]
sel_res = _local_thickness(sel_mask, spacing_mm=sel_spacing,
return_med_axis=True, return_distance=True)
acc_med.append(sel_res[1])
acc_dist.append(sel_res[2])
acc_out.append(sel_res[0] / 2)
med_axis = np.stack(acc_med, axis=stack_axis)
distance = np.stack(acc_dist, axis=stack_axis)
out = np.stack(acc_out, axis=stack_axis)
elif mode == 'med2d_dist2d_lth3d':
if thickness_max_mm is not None:
msg = f'`thickness_max_mm` is not supported in mode {mode}'
raise NotImplementedError()
acc_med = []
acc_dist = []
# Slice-by-slice implementation for medial axis and distance transform
for idx_slice in tqdm(range(mask.shape[stack_axis]), desc='Calculating medial axis and distance transform'):
sel_idcs = [slice(None), ] * mask.ndim
sel_idcs[stack_axis] = idx_slice
sel_idcs = tuple(sel_idcs)
sel_med = morphology.medial_axis(mask[sel_idcs])
sel_dist = ndi.distance_transform_edt(mask[sel_idcs], sampling=spacing_mm)
acc_med.append(sel_med)
acc_dist.append(sel_dist)
med_axis = np.stack(acc_med, axis=stack_axis)
distance = np.stack(acc_dist, axis=stack_axis)
out = _local_thick_3d(mask=mask, med_axis=med_axis, distance=distance, search_extent=search_extent)
elif mode == 'med2d_dist3d_lth3d':
acc_med = []
# Slice-by-slice implementation for medial axis
for idx_slice in tqdm(range(mask.shape[stack_axis]), desc='Calculating medial axis'):
sel_idcs = [slice(None), ] * mask.ndim
sel_idcs[stack_axis] = idx_slice
sel_idcs = tuple(sel_idcs)
sel_res = morphology.medial_axis(mask[sel_idcs])
acc_med.append(sel_res)
# Medial axis
med_axis = np.stack(acc_med, axis=stack_axis)
# Distance transform
distance = ndi.distance_transform_edt(mask, sampling=spacing_mm)
# Local thickness
if type(spacing_mm) is tuple:
spacing_mm = np.array(spacing_mm)
out = _local_thick_3d(mask=mask, med_axis=med_axis, distance=distance,
search_extent=search_extent, sampling=spacing_mm)
elif mode == 'exact_3d':
raise NotImplementedError(f'Mode {mode} is not yet supported')
else:
raise ValueError(f'Invalid mode: {mode}')
else:
msg = 'Only 2D and 3D arrays are supported'
raise ValueError(msg)
# Thickness is twice the distance to the closest surface point
out = 2 * out
if return_med_axis:
if return_distance:
return out, med_axis, distance
else:
return out, med_axis
else:
if return_distance:
return out, distance
else:
return out
def local_thickness(input_, num_classes, stack_axis, spacing_mm=(1, 1, 1),
skip_classes=None, mode='med2d_dist3d_lth3d', thickness_max_mm=None):
"""
Args:
input_: (b, d0, ..., dn) ndarray or tensor
num_classes: int
Total number of classes.
stack_axis: int
Index of axis to perform slice selection along. Ignored for 2D.
spacing_mm: 3-tuple
Pixel spacing in mm, one per each spatial dimension of `input_`.
skip_classes: None or tuple of ints
Returns:
out: (b, d0, ..., dn) ndarray
Thickness map for each class in each batch sample.
"""
if skip_classes is None:
skip_classes = tuple()
if torch.is_tensor(input_):
num_samples = tuple(input_.size())[0]
dims = tuple(input_.size())[1:]
else:
num_samples = input_.shape[0]
dims = input_.shape[1:]
th_maps = np.zeros((num_samples, *dims))
for sample_idx in range(num_samples):
th_map = np.zeros_like(input_[sample_idx])
for class_idx in range(num_classes):
if class_idx in skip_classes:
continue
sel_input_ = input_[sample_idx] == class_idx
th_map_class = _local_thickness(
sel_input_, mode=mode,
spacing_mm=spacing_mm, stack_axis=stack_axis,
return_med_axis=False, return_distance=False, thickness_max_mm=thickness_max_mm)
th_map[sel_input_] = th_map_class[sel_input_]
th_maps[sample_idx, :] = th_map
return th_maps
|
import weakref
import os
import utils
import unittest
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
from Qt.QtWidgets import QTreeView, QPushButton, QCheckBox # noqa: E402
from Qt.QtCore import QModelIndex, Qt # noqa: E402
import src # noqa: E402
import src.tool # noqa: E402
import src.io # noqa: E402
from utils import make_session # noqa: E402
from chimerax.core.models import Model # noqa: E402
class MockBundleInfo:
pass
class MockToolInfo:
def __init__(self, name):
self.name = name
class MockRMFNode:
def __init__(self, name, index):
self.name, self.index = name, index
def get_name(self):
return self.name
def get_index(self):
return self.index
def make_node(name, index, resolution=None):
n = MockRMFNode(name, index)
h = src.io._RMFHierarchyNode(n)
h.resolution = resolution
return h
def make_feature(name, index):
n = MockRMFNode(name, index)
return src.io._RMFFeature(n)
def make_provenance(name, index):
n = MockRMFNode(name, index)
return src.io._RMFProvenance(n)
class Tests(unittest.TestCase):
def test_rmf_hierarchy_model_none(self):
"""Test RMFHierarchyModel class with null hierarchy"""
resolutions = set((1.0, 10.0))
m = src.tool._RMFHierarchyModel(None, resolutions)
self.assertEqual(m.columnCount(None), 1)
self.assertEqual(m.rowCount(QModelIndex()), 0)
self.assertFalse(m.index(0, 0, QModelIndex()).isValid())
self.assertFalse(m.parent(QModelIndex()).isValid())
self.assertIsNone(m.data(QModelIndex(), Qt.DisplayRole))
def test_rmf_hierarchy_model(self):
"""Test RMFHierarchyModel class"""
root = make_node("root", 0)
child1 = make_node("child1", 1)
child2 = make_node("child2", 2)
grandchild = make_node("grandchild", 3)
child2.add_children([grandchild])
root.add_children((child1, child2))
resolutions = set((None,))
m = src.tool._RMFHierarchyModel(root, resolutions)
self.assertEqual(m.columnCount(None), 1)
# Top level has one child (RMF root)
self.assertEqual(m.rowCount(QModelIndex()), 1)
# RMF root has two children
ind = m.createIndex(0, 0, root)
self.assertEqual(m.rowCount(ind), 2)
# Test indices under RMF root
self.assertEqual(m.index(0, 0, ind).internalPointer().name, 'child1')
self.assertEqual(m.index(1, 0, ind).internalPointer().name, 'child2')
self.assertFalse(m.index(2, 0, ind).isValid())
# Test top level index
self.assertEqual(m.index(0, 0, QModelIndex()).internalPointer().name,
'root')
i = m.index_for_node(child2)
self.assertEqual(i.row(), 1)
self.assertEqual(i.column(), 0)
self.assertEqual(i.internalPointer().name, 'child2')
# Top level doesn't have a parent
self.assertFalse(m.parent(ind).isValid())
self.assertFalse(m.parent(QModelIndex()).isValid())
childind = m.createIndex(0, 0, child1)
self.assertEqual(m.parent(childind).internalPointer().name, 'root')
grandchildind = m.createIndex(0, 0, grandchild)
parentind = m.parent(grandchildind)
self.assertEqual(parentind.internalPointer().name, 'child2')
self.assertEqual(parentind.row(), 1)
self.assertEqual(m.data(childind, Qt.DisplayRole), "child1")
self.assertIsNone(m.data(childind, Qt.SizeHintRole))
def test_rmf_hierarchy_resolution(self):
"""Test RMFHierarchyModel filtering by resolution"""
root = make_node("root", 0)
child1 = make_node("child1", 1, resolution=1)
child2 = make_node("child2", 2, resolution=10)
child3 = make_node("child3", 3)
root.add_children((child1, child2, child3))
resolutions = set((None, 1))
m = src.tool._RMFHierarchyModel(root, resolutions)
# Only child1 and child3 (resolution=1,None) should be selected
self.assertEqual(root._filtered_children, [child1, child3])
self.assertIs(m.index_for_node(child3).internalPointer(), child3)
self.assertFalse(m.index_for_node(child2).isValid())
# Add resolution 10, now all children selected
m.set_resolution_filter(10, shown=True)
self.assertEqual(root._filtered_children, [child1, child2, child3])
# Remove resolution 1
m.set_resolution_filter(1, shown=False)
self.assertEqual(root._filtered_children, [child2, child3])
def test_rmf_features_model(self):
"""Test RMFFeaturesModel class"""
f1 = make_feature("f1", 1)
f2 = make_feature("f2", 2)
child1 = make_feature("child1", 3)
f2.add_child(child1)
child2 = make_feature("child2", 4)
f2.add_child(child2)
grandchild = make_feature("child3", 5)
child2.add_child(grandchild)
features = [f1, f2]
m = src.tool._RMFFeaturesModel(features)
top = QModelIndex()
f2_ind = m.createIndex(1, 0, features[1])
child1_ind = m.createIndex(0, 0, child1)
self.assertEqual(m.columnCount(None), 1)
self.assertEqual(m.rowCount(top), 2)
self.assertEqual(m.rowCount(f2_ind), 2)
self.assertEqual(m.rowCount(child1_ind), 0)
# Test indices
self.assertEqual(m.index(0, 0, top).internalPointer().name, 'f1')
self.assertEqual(m.index(1, 0, top).internalPointer().name, 'f2')
self.assertFalse(m.index(2, 0, top).isValid())
self.assertEqual(m.index(0, 0, f2_ind).internalPointer().name,
'child1')
self.assertEqual(m.index(1, 0, f2_ind).internalPointer().name,
'child2')
self.assertFalse(m.index(2, 0, f2_ind).isValid())
# No parents
self.assertFalse(m.parent(top).isValid())
self.assertFalse(m.parent(f2_ind).isValid())
self.assertEqual(m.parent(child1_ind).internalPointer().name, 'f2')
grandchild_ind = m.createIndex(0, 0, grandchild)
self.assertEqual(m.parent(grandchild_ind).internalPointer().name,
'child2')
self.assertEqual(m.data(f2_ind, Qt.DisplayRole), "f2")
self.assertIsNone(m.data(f2_ind, Qt.SizeHintRole))
def test_rmf_provenance_model(self):
"""Test RMFProvenanceModel class"""
f1 = make_provenance("f1", 1)
f2 = make_provenance("f2", 2)
child = make_provenance("child", 3)
f1.previous = child
child.next = weakref.proxy(f1)
provs = [f1, f2]
m = src.tool._RMFProvenanceModel(provs)
top = QModelIndex()
self.assertEqual(m.columnCount(None), 1)
self.assertEqual(m.rowCount(top), 2)
f1_ind = m.createIndex(0, 0, f1)
child_ind = m.createIndex(0, 0, child)
self.assertEqual(m.columnCount(f1_ind), 1)
# Test indices
self.assertEqual(m.index(0, 0, top).internalPointer().name, 'f1')
self.assertEqual(m.index(1, 0, top).internalPointer().name, 'f2')
self.assertFalse(m.index(2, 0, top).isValid())
self.assertEqual(m.index(0, 0, f1_ind).internalPointer().name, 'child')
# No parents for top level
self.assertFalse(m.parent(top).isValid())
self.assertFalse(m.parent(f1_ind).isValid())
self.assertEqual(m.parent(child_ind).internalPointer().name, 'f1')
self.assertEqual(m.data(f1_ind, Qt.DisplayRole), "f1")
self.assertIsNone(m.data(f1_ind, Qt.SizeHintRole))
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_rmf_viewer(self):
"""Test creation of RMFViewer tool"""
mock_session = make_session()
m1 = Model(mock_session, 'test')
m1.rmf_hierarchy = None
m1.rmf_features = []
m1.rmf_provenance = []
m1._rmf_resolutions = set((1.0, 10.0))
m1._selected_rmf_resolutions = set((1.0, 10.0, None))
m2 = Model(mock_session, 'test')
mock_session.models.add((m1, m2))
_ = src.tool.RMFViewer(mock_session, "RMF Viewer")
# Test update on model creation
m3 = Model(mock_session, 'test')
m3.rmf_hierarchy = None
m3.rmf_features = []
m3.rmf_provenance = []
m3._rmf_resolutions = set((1.0, 10.0))
m3._selected_rmf_resolutions = set((1.0, 10.0, None))
mock_session.models.add((m3,))
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_bundle_api_make_tool(self):
"""Test open of tool via BundleAPI"""
bundle_api = src.bundle_api
mock_session = make_session()
m1 = Model(mock_session, 'test')
mock_session.models.add((m1,))
bi = MockBundleInfo()
ti = MockToolInfo("RMF Viewer")
bundle_api.start_tool(mock_session, bi, ti)
ti = MockToolInfo("Bad tool")
self.assertRaises(ValueError, bundle_api.start_tool,
mock_session, bi, ti)
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_button_clicks(self):
"""Test clicking on select/show/hide buttons"""
def get_first_tree(stack):
for w in stack.widget(1).children():
if isinstance(w, QTreeView):
self.assertIsInstance(
w.model(), src.tool._RMFHierarchyModel)
return w
raise ValueError("could not find tree")
def get_buttons(stack):
for w in stack.widget(1).children():
if isinstance(w, QPushButton):
yield w
class TestChimeraObj:
def __init__(self, deleted=False):
self.deleted = deleted
root = make_node("root", 0, resolution=10)
child1 = make_node("child1", 1)
child1.chimera_obj = TestChimeraObj()
child2 = make_node("child2", 2)
grandchild = make_node("grandchild", 3)
child2.add_children([grandchild])
delchild = make_node("child3", 3)
delchild.chimera_obj = TestChimeraObj(deleted=True)
root.add_children((child1, child2, delchild))
mock_session = make_session()
m1 = Model(mock_session, 'test')
m1.rmf_hierarchy = root
m1.rmf_features = [make_node("f1", 4), make_node("f2", 5)]
m1.rmf_provenance = []
m1._rmf_resolutions = set((1, 10))
m1._selected_rmf_resolutions = set((1, 10, None))
mock_session.models.add((m1,))
r = src.tool.RMFViewer(mock_session, "RMF Viewer")
tree1 = get_first_tree(r.model_stack.widget(0))
buttons = list(get_buttons(r.model_stack.widget(0)))
# Show, View, Hide, Select
self.assertEqual(len(buttons), 5)
# Call "clicked" methods directly
r._select_button_clicked(tree1)
tree1.selectAll()
r._show_button_clicked(tree1)
r._show_only_button_clicked(tree1)
r._hide_button_clicked(tree1)
r._view_button_clicked(tree1)
# Call indirectly via clicking each button
for b in buttons:
b.click()
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_resolution_clicked(self):
"""Test clicking on resolution checkboxes"""
def get_first_tree(stack):
for w in stack.widget(1).children():
if isinstance(w, QTreeView):
self.assertIsInstance(
w.model(), src.tool._RMFHierarchyModel)
return w
raise ValueError("could not find tree")
def get_buttons(stack):
for w in stack.widget(1).children():
if isinstance(w, QCheckBox):
yield w
class TestChimeraObj:
pass
root = make_node("root", 0)
child1 = make_node("child1", 1, resolution=1)
child2 = make_node("child2", 2, resolution=10)
root.add_children((child1, child2))
mock_session = make_session()
m1 = Model(mock_session, 'test')
m1.rmf_hierarchy = root
m1.rmf_features = [make_node("f1", 4), make_node("f2", 5)]
m1.rmf_provenance = []
m1._rmf_resolutions = set((1, 10))
m1._selected_rmf_resolutions = set((1, None))
mock_session.models.add((m1,))
r = src.tool.RMFViewer(mock_session, "RMF Viewer")
tree1 = get_first_tree(r.model_stack.widget(0))
res1b, res10b = list(get_buttons(r.model_stack.widget(0)))
# Call "clicked" methods directly
# Show/hide resolution 10
cb = QCheckBox('foo')
cb.setChecked(True)
r._resolution_button_clicked(cb, tree1, 10)
cb = QCheckBox('bar')
cb.setChecked(False)
r._resolution_button_clicked(cb, tree1, 10)
tree1.selectAll()
# Call indirectly via clicking each button
for b in res1b, res10b:
b.click()
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_feature_selected(self):
"""Test selecting features"""
def get_first_tree(stack):
for w in stack.widget(0).children():
if isinstance(w, QTreeView):
self.assertIsInstance(
w.model(), src.tool._RMFFeaturesModel)
return w
raise ValueError("could not find tree")
root = make_node("root", 0)
mock_session = make_session()
m1 = Model(mock_session, 'test')
m1.rmf_hierarchy = root
m1.rmf_features = [make_node("f1", 4), make_node("f2", 5)]
m1.rmf_features[0].chimera_obj = 'test object'
m1.rmf_provenance = []
m1._rmf_resolutions = set((1.0, 10.0))
m1._selected_rmf_resolutions = set((1.0, 10.0, None))
mock_session.models.add((m1,))
r = src.tool.RMFViewer(mock_session, "RMF Viewer")
tree1 = get_first_tree(r.model_stack.widget(0))
r._select_feature(tree1)
tree1.selectAll()
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_load_provenance(self):
"""Test loading provenance"""
def get_first_tree(stack):
for w in stack.widget(2).children():
if isinstance(w, QTreeView):
self.assertIsInstance(
w.model(), src.tool._RMFProvenanceModel)
return w
raise ValueError("could not find tree")
def get_buttons(stack):
for w in stack.widget(2).children():
if isinstance(w, QPushButton):
yield w
root = make_node("root", 0)
mock_session = make_session()
m1 = src.io._RMFModel(mock_session, 'test')
m1.rmf_hierarchy = root
m1.rmf_features = []
p1 = make_provenance("f1", 4)
p2 = make_provenance("f2", 5)
p3 = make_provenance("f3", 6)
p1.hierarchy_node = root
p2.hierarchy_node = root
m1.rmf_provenance = [p1, p2, p3]
mock_session.models.add((m1,))
r = src.tool.RMFViewer(mock_session, "RMF Viewer")
tree1 = get_first_tree(r.model_stack.widget(0))
tree1.selectAll()
r._load_button_clicked(tree1, m1)
load_button, = list(get_buttons(r.model_stack.widget(0)))
load_button.click()
@unittest.skipIf(utils.no_gui, "Cannot test without GUI")
def test_rmf_viewer_snapshot(self):
"""Test snapshot of RMFViewer tool"""
mock_session = make_session()
m1 = Model(mock_session, 'test')
m1.rmf_hierarchy = None
m1.rmf_features = []
m1.rmf_provenance = []
m1._rmf_resolutions = set((1.0, 10.0))
m1._selected_rmf_resolutions = set((1.0, None))
mock_session.models.add((m1,))
r = src.tool.RMFViewer(mock_session, "RMF Viewer")
s = r.take_snapshot(mock_session, None)
newr = src.tool.RMFViewer.restore_snapshot(mock_session, s)
self.assertIsInstance(newr, src.tool.RMFViewer)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
"""Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2016, Caleb Bell <Caleb.Andrew.Bell@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from math import isnan
from fluids.numerics import assert_close, assert_close1d
import pytest
import pandas as pd
import numpy as np
from chemicals.refractivity import *
from chemicals.refractivity import RI_data_CRC_organic
def test_refractivity_CRC():
assert_close(RI_data_CRC_organic['RI'].sum(), 6602.78821)
assert_close(RI_data_CRC_organic['RIT'].sum(), 1304152.35)
@pytest.mark.slow
@pytest.mark.fuzz
def test_refractivity_all_answers():
vals = [RI(i) for i in RI_data_CRC_organic.index.values]
RI_sum = sum([v[0] for v in vals])
T_sum = sum([v[1] for v in vals if not isnan(v[1])])
assert len(vals) == 4490
assert type(vals[0][0]) is float
assert type(vals[0][1]) is float
assert_close(RI_sum, 6602.78821, rtol=1e-10)
assert_close(T_sum, 1304152.35, rtol=1e-10)
def test_refractivity_general():
vals = RI(CASRN='64-17-5')
assert type(vals) is tuple
assert_close1d(vals, (1.3611, 293.15))
vals = RI_methods(CASRN='64-17-5')
assert vals == ['CRC']
assert RI_data_CRC_organic.index.is_unique
assert RI_data_CRC_organic.shape == (4490, 2)
assert RI_methods(CASRN='6400000-17-5') == []
with pytest.raises(Exception):
RI(CASRN='64-17-5', method='FAIL')
def test_polarizability_from_RI():
# Ethanol, with known datum RI and Vm
alpha = polarizability_from_RI(1.3611, 5.8676E-5)
assert_close(alpha, 5.147658123614415e-30)
# Experimental value is 5.112 Angstrom^3 from cccbdb, http://cccbdb.nist.gov/polcalccomp2.asp?method=55&basis=9
# Very good comparison.
def test_molar_refractivity_from_RI():
# Ethanol, with known datum RI and Vm
Rm = molar_refractivity_from_RI(1.3611, 5.8676E-5)
assert_close(Rm, 1.2985217089649597e-05)
# Confirmed with a value of 12.5355 cm^3/mol in http://rasayanjournal.co.in/vol-4/issue-4/38.pdf
def test_RI_from_molar_refractivity():
RI = RI_from_molar_refractivity(1.2985e-5, 5.8676E-5)
assert_close(RI, 1.3610932757685672)
# Same value backwards
assert_close(RI_from_molar_refractivity(molar_refractivity_from_RI(1.3611, 5.8676E-5), 5.8676E-5), 1.3611)
def test_RI_IAPWS():
assert_close(RI_IAPWS(298.15, 997.047435, 0.5893), 1.3328581926471605, rtol=1e-12)
def test_RI_to_brix():
assert_close(RI_to_brix(1.33299), 0.0)
assert_close(RI_to_brix(1.532), 95.)
assert_close(RI_to_brix(1.341452), 5.8)
assert_close(RI_to_brix(1.3), -23.069930069929285)
def test_brix_to_RI():
assert_close(brix_to_RI(5.8), 1.341452)
assert_close(brix_to_RI(0), 1.33299)
assert_close(brix_to_RI(95.0), 1.532)
assert_close(brix_to_RI(RI_to_brix(1.3)), 1.3)
|
from setuptools import setup
setup(name='pyW215',
version='0.4',
description='Interface for d-link W215 Smart Plugs.',
url='https://github.com/linuxchristian/pyW215',
author='Christian Juncker Brædstrup',
author_email='christian@fredborg-braedstrup.dk',
license='MIT',
packages=['pyW215'],
install_requires=[],
zip_safe=False)
|
from django.conf.urls import url
from . import views
urlpatterns = [
# Let React Router handle the url routing.
url(r'.*', views.index, name='index'),
]
|
import re
import html
import subprocess
import tempfile
import unidecode
from django.conf import settings
from ingredient_phrase_tagger.training import utils
def parse_ingredients(input_text):
with tempfile.NamedTemporaryFile(mode='w') as input_file:
input_file.write(utils.export_data(input_text))
input_file.flush()
output = subprocess.check_output(
['crf_test', '--verbose=1', '--model',
settings.INGREDIENT_PHRASE_TAGGER_MODEL,
input_file.name]).decode('utf-8')
return utils.import_data(output.split("\n"))
def parse_ingredient_quantity(quantity):
try:
f = float(quantity)
return f if f > 0 else None
except TypeError: # quantity is None
return None
except:
r = re.compile(r".*(\d+)\s*/\s*(\d+).*")
m = r.match(quantity)
if m:
return float(m.group(1)) / float(m.group(2))
return None
def parse_nutrient(nutrient):
if nutrient is None:
return None
try:
f = float(nutrient)
return f if f > 0 else None
except:
r = re.compile(r"(\d*\.\d*)\s*g") # e.g. "6.7 g" or " .55 g "
s = r.search(nutrient)
if s:
try:
f = float(s.group(1))
return f if f > 0 else None
except:
return None
return None
def truncate(string, length):
return string if len(string) < length else string[:length - 3] + "..."
def parse_html_text(text):
if text is None:
return ""
text = re.sub(r"<!--.*-->", "", text) # Remove HTML comments
# Convert HTML entities to Unicode chars
text = html.unescape(text.replace("&", "&"))
return text.strip()
def parse_title(title, max_length):
title = parse_html_text(title)
if len(title) >= max_length:
# Attempt to split on ":"
title = title.split(":")[0].strip()
return truncate(title, max_length)
def parse_total_time(total_time):
if isinstance(total_time, int):
if total_time == 0:
return None
else:
return total_time
elif isinstance(total_time, str):
re_str = r"((?P<hours>\d+) hour(s)?)?( )?((?P<minutes>\d+) minute(s)?)?(,)?.*"
m = re.match(re_str, total_time)
if m:
hours = int(m.group('hours')) if m.group('hours') else 0
minutes = int(m.group('minutes')) if m.group('minutes') else 0
return hours * 60 + minutes
else:
return None
else:
return None
pattern_nonalphanum = re.compile('[^\w ]')
pattern_brackets = re.compile('\([^)(]*\)|\{[^}{]*\}|\[[^][]*\]')
pattern_unicode_fractions = re.compile(
"\u00BC|\u00BD|\u00BE|\u2150|\u2151|\u2152|\u2153|\u2154|\u2155|\u2156|"
"\u2157|\u2158|\u2159|\u215A|\u215B|\u215C|\u215D|\u215E|\u215F")
# Mixed fraction, qty range, and alternates handling
mixed_fraction = r"\d*\s*\d+\/\d+" # e.g. "5 1/4" OR "51/4"
unit = r"[a-z]+" # simple string unit
qty = f"({mixed_fraction}|\d+)" # mixed fraction or just a number
qty_or_range = f"({qty}\s*-\s*{qty}|{qty})" # also handle "5 1/4 - 7"
qty_unit = f"{qty_or_range}\s*{unit}" # including a string unit
alternate_or_single_amount = re.compile( # e.g. "20-50 ml/4-10 1/8 tsp"
f"({qty_unit}\s*\/\s*{qty_unit}|{qty_unit})")
def preprocess_ingredient_string(ingredient):
# Case-fold and remove unicode characters
ingredient = pattern_unicode_fractions.sub('', ingredient)
ingredient = unidecode.unidecode(ingredient)
ingredient = ingredient.lower()
# Remove parentheses with contents, e.g. (8-oz) steak
ingredient = pattern_brackets.sub('', ingredient)
# Essentially remove any possible unit, incl. ranges, mixed fractions, etc.
# ingredient = alternate_or_single_amount.sub('', ingredient)
# Remove non-alphanumerical characters, e.g. unclosed parentheses
ingredient = pattern_nonalphanum.sub('', ingredient)
# Replace common shorthands with full words
ingredient = ingredient.replace('tsp', 'teaspoons')
ingredient = ingredient.replace('tbsp', 'tablespoons')
ingredient = ingredient.replace('oz', 'ounces')
ingredient = ingredient.replace('kg', 'g') # matches better but wrong
# Unnecessary whitespace is dealt with in parse_ingredients
return ingredient
pattern_ml = re.compile('^((\d)+ ?)?ml') # e.g. "140 ml" OR "140ml" or "ml"
# e.g. 34cups OR 34 cups OR 1 cup
pattern_cups = re.compile('^(\d+)? ?cup(s)?')
pattern_numqty = re.compile('^\d+( )?') # numbers at start of string
def postprocess_ingredient_string(ingredient):
# Since string beginning is important, strip before doing anything else
ingredient = ingredient.strip()
if 'ounces' in ingredient:
# e.g. "100 milliliters10fl ounces double cream"
ingredient = ingredient.split('ounces')[1]
if 'lb' in ingredient:
# e.g. "450 grams1lb caster sugar"
ingredient = ingredient.split('lb')[1]
if ingredient[:2] == 'g ':
ingredient = ingredient[2:] # e.g. "g caster sugar"
if ingredient[:2] == 'x ':
ingredient = ingredient[2:] # e.g. "x tomatoes"
ingredient = pattern_ml.sub('', ingredient) # e.g. 140ml milk
ingredient = pattern_cups.sub('', ingredient) # e.g. 34 cups flour
ingredient = pattern_numqty.sub('', ingredient) # e.g. 30 carrots
ingredient = ingredient.strip()
return ingredient
tag_separator = re.compile(r",|\/")
tag_remove = re.compile(r"inspired|styled?|n\/a|https?[^\s]+")
def preprocess_tags(tags):
if tags is None:
return []
# Case-fold and remove parentheses, e.g. "vegan (plant based)" -> "vegan "
tags = pattern_brackets.sub("", tags.lower())
# Replace "-" with spaces, e.g. "spanish-inspired" -> "spanish inspired"
tags = tags.replace("-", " ")
# Remove redundant words like "inspired", or empty tag "n/a"
tags = tag_remove.sub("", tags)
# Split on ',' and '/' and strip whitespace
tags = map(str.strip, tag_separator.split(tags))
# Filter out empty strings, potentially cause by removing some tags
return [t for t in tags if t != ""]
|
#!/usr/bin/env python3
from distutils.core import setup
setup(name='sptrader',
version='0.10.0',
description='SharpPoint trading system',
author='Joseph C Wang',
author_email='joequant@gmail.com',
url='https://github.com/joequant/sptrader',
packages=['sptrader'],
install_requires=[
'cffi',
'flask',
'backtrader==1.9.12.99',
'matplotlib',
'sseclient-py',
'requests',
'pytz',
'tendo'
]
)
|
from pyod.models.iforest import IForest
from pyod.utils.data import generate_data
from pyod.utils.example import visualize
X_train, X_test, y_train, y_test = generate_data(n_train=2000, n_test=1000, n_features=2, behaviour="new")
clf_name = 'IsolationForest'
clf = IForest()
clf.fit(X_train)
y_train_pred = clf.labels_
y_train_scores = clf.decision_scores_
y_test_pred = clf.predict(X_test)
y_test_scores = clf.decision_function(X_test)
visualize(clf_name, X_train, y_train, X_test, y_test, y_train_pred, y_test_pred, show_figure=True, save_figure=False) |
import argparse
from hio import help
from hio.base import doing
from keri.app import directing, indirecting
from keri.app.cli.common import existing
from keri.vdr import registering
logger = help.ogler.getLogger()
parser = argparse.ArgumentParser(description='Initialize a prefix')
parser.set_defaults(handler=lambda args: registryIncept(args),
transferable=True)
parser.add_argument('--name', '-n', help='Human readable reference', required=True)
parser.add_argument('--registry-name', '-r', help='Human readable name for registry, defaults to name of Habitat',
default=None)
parser.add_argument("--no-backers", "-nb", help="do not allow setting up backers different from the ahcnoring KEL "
"witnesses", default=True, action="store")
parser.add_argument('--backers', '-b', help='New set of backers different from the anchoring KEL witnesses. Can '
'appear multiple times', metavar="<prefix>", default=[], action="append",
required=False)
parser.add_argument("--establishment-only", "-e", help="Only allow establishment events for the anchoring events of "
"this registry", default=False, action="store")
def registryIncept(args):
name = args.name
registryName = args.registry_name if args.registry_name is not None else name
estOnly = args.establishment_only
noBackers = args.no_backers
backers = args.backers
if noBackers and backers:
print("--no-backers and --backers can not both be provided")
return -1
icpDoer = RegistryInceptor(name=name, registryName=registryName, estOnly=estOnly, noBackers=noBackers, baks=backers)
doers = [icpDoer]
directing.runController(doers=doers, expire=0.0)
class RegistryInceptor(doing.DoDoer):
"""
"""
def __init__(self, name, registryName, **kwa):
"""
"""
self.name = name
self.registryName = registryName
self.hab, doers = existing.setupHabitat(name=self.name)
mbx = indirecting.MailboxDirector(hab=self.hab, topics=["/receipt", "/multisig", "/replay"])
self.icpr = registering.RegistryInceptDoer(hab=self.hab)
doers.extend([self.icpr, mbx])
self.toRemove = list(doers)
doers.extend([doing.doify(self.inceptDo)])
super(RegistryInceptor, self).__init__(doers=doers, **kwa)
def inceptDo(self, tymth, tock=0.0, **kwa):
"""
"""
yield self.tock
msg = dict(name=self.registryName)
self.icpr.msgs.append(msg)
regk = None
while not regk:
while self.icpr.cues:
cue = self.icpr.cues.popleft()
if cue["kin"] == "finished":
regk = cue["regk"]
break
yield self.tock
yield self.tock
print("Regsitry: {}({}) \n\tcreated for Identifier Prefix: {}".format(self.registryName, regk, self.hab.pre))
self.remove(self.toRemove)
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import dask
import numpy as np
import numpy.typing as npt
from typing import *
# Dask multithreading is only suited for mostly non-Python code (like pandas, numpy, etc.)
#tag::threads[]
dask.config.set(scheduler='threads')
#end::threads[]
#tag::process[]
dask.config.set(scheduler='processes')
#end::process[]
#tag::dask_use_forkserver[]
dask.config.set({"multiprocessing.context": "forkserver", "scheduler": "processes"})
#end::dask_use_forkserver[]
# In[ ]:
#tag::make_dask_k8s_client[]
import dask
from dask.distributed import Client
from dask_kubernetes import KubeCluster, make_pod_spec
worker_template = make_pod_spec(image='holdenk/dask:latest',
memory_limit='8G', memory_request='8G',
cpu_limit=1, cpu_request=1, extra_container_config={ "imagePullPolicy": "Always" })
scheduler_template = make_pod_spec(image='holdenk/dask:latest',
memory_limit='4G', memory_request='4G',
cpu_limit=1, cpu_request=1, extra_container_config={ "imagePullPolicy": "Always" })
cluster = KubeCluster(pod_template = worker_template, scheduler_pod_template = scheduler_template)
cluster.adapt(minimum=1) # or create and destroy workers dynamically based on workload
from dask.distributed import Client
client = Client(cluster)
#end::make_dask_k8s_client[]
# In[ ]:
client
# In[ ]:
client.dashboard_link
# In[ ]:
#tag::fib_task_hello_world[]
def dask_fib(x):
if x < 2:
return x
a = dask.delayed(dask_fib(x-1))
b = dask.delayed(dask_fib(x-2))
c, d = dask.compute(a, b) # Compute in parallel
return c + d
def seq_fib(x):
if x < 2:
return x
return seq_fib(x-1) + seq_fib(x-2)
import functools
@functools.lru_cache
def fib(x):
if x < 2:
return x
return fib(x-1) + fib(x-2)
import timeit
seq_time = timeit.timeit(lambda: seq_fib(14), number=1)
dask_time = timeit.timeit(lambda: dask_fib(14), number=1)
local_memoized_time = timeit.timeit(lambda: fib(14), number=1)
print("In sequence {}, in parallel {}, local memoized {}".format(seq_time, dask_time, local_memoized_time))
#end::fib_task_hello_world[]
# In[ ]:
#tag::fail_to_ser[]
class ConnectionClass:
def __init__(self, host, port):
import socket
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((host, port))
@dask.delayed
def bad_fun(x):
return ConnectionClass("www.scalingpythonml.com", 80)
# Fails to serialize
if False:
dask.compute(bad_fun(1))
#end::fail_to_ser[]
# In[ ]:
#tag::custom_serializer_not_own_class[]
class SerConnectionClass:
def __init__(self, conn):
import socket
self.conn = conn
def __getstate__(self):
state_dict = {"host": self.conn.socket.getpeername()[0], "port": self.conn.socket.getpeername()[1]}
return state_dict
def __setsate__(self, state):
self.conn = ConnectionClass(state["host"], state["port"])
#end::custom_serializer_not_own_class[]
# In[ ]:
# now we can sort of serialize the connection
@dask.delayed
def ok_fun(x):
return SerConnectionClass(ConnectionClass("www.scalingpythonml.com", 80))
dask.compute(ok_fun(1))
# In[ ]:
# See https://github.com/dask/distributed/issues/5561
@dask.delayed
def bad_fun(x):
return ConnectionClass("www.scalingpythonml.com", 80)
from distributed.protocol import dask_serialize, dask_deserialize
@dask_serialize.register(ConnectionClass)
def serialize(bad: ConnectionClass) -> Tuple[Dict, List[bytes]]:
import cloudpickle
header = {}
frames = [cloudpickle.dumps({"host": bad.socket.getpeername()[0], "port": bad.socket.getpeername()[1]})]
return header, frames
@dask_deserialize.register(ConnectionClass)
def deserialize(bad: Dict, frames: List[bytes]) -> ConnectionClass:
import cloudpickle
info = cloudpickle.loads(frames[0])
return ConnectionClass(info["host"], info["port"])
# note: this does not work because dask_serialize didn't make it to the worker :/
# dask.compute(bad_fun(1))
# In[ ]:
#tag::serialize_class_with_numpy[]
class NumpyInfo:
def __init__(self, name: str, features: npt.ArrayLike):
self.name = name
self.features = features
i = NumpyInfo("boo", np.array(0))
numpybits = [i]
# Surprisingly this works, despite the implication that we would need to call register_generic
from distributed.protocol import register_generic
register_generic(NumpyInfo)
dask.compute(ok_fun(1))
#end::serialize_class_with_numpy[]
# In[ ]:
dask.visualize(ok_fun(1))
# In[ ]:
ok_fun(1).visualize()
# In[ ]:
ok_fun(1)
# In[ ]:
# From ch2 for visualize
@dask.delayed
def crawl(url, depth=0, maxdepth=1, maxlinks=4):
links = []
link_futures = []
try:
import requests
from bs4 import BeautifulSoup
f = requests.get(url)
links += [(url, f.text)]
if (depth > maxdepth):
return links # base case
soup = BeautifulSoup(f.text, 'html.parser')
c = 0
for link in soup.find_all('a'):
if "href" in link:
c = c + 1
link_futures += crawl(link["href"], depth=(depth+1), maxdepth=maxdepth)
# Don't branch too much were still in local mode and the web is big
if c > maxlinks:
break
for r in dask.compute(link_futures):
links += r
return links
except requests.exceptions.InvalidSchema:
return [] # Skip non-web links
import dask.bag as db
githubs = ["https://github.com/scalingpythonml/scalingpythonml", "https://github.com/dask/distributed"]
initial_bag = db.from_delayed(map(crawl, githubs))
words_bag = initial_bag.map(lambda url_contents: url_contents[1].split(" ")).flatten()
#tag::visualize[]
dask.visualize(words_bag.frequencies())
#end::visualize[]
# In[ ]:
dask.visualize(words_bag.frequencies(), filename="wc.pdf")
# In[ ]:
dir(cluster)
# In[ ]:
import dask.array as da
#tag::make_chunked_array[]
distributed_array = da.from_array(list(range(0, 10000)), chunks=10)
#end::make_chunked_array[]
# In[ ]:
# From ch2 so we can continue the WC example
@dask.delayed
def crawl(url, depth=0, maxdepth=1, maxlinks=4):
links = []
link_futures = []
try:
import requests
from bs4 import BeautifulSoup
f = requests.get(url)
links += [(url, f.text)]
if (depth > maxdepth):
return links # base case
soup = BeautifulSoup(f.text, 'html.parser')
c = 0
for link in soup.find_all('a'):
if "href" in link:
c = c + 1
link_futures += crawl(link["href"], depth=(depth+1), maxdepth=maxdepth)
# Don't branch too much were still in local mode and the web is big
if c > maxlinks:
break
for r in dask.compute(link_futures):
links += r
return links
except requests.exceptions.InvalidSchema:
return [] # Skip non-web links
# In[ ]:
import dask.bag as db
githubs = ["https://github.com/scalingpythonml/scalingpythonml", "https://github.com/dask/distributed"]
some_bag = db.from_delayed(map(crawl, githubs))
#tag::repartition_bag[]
some_bag.repartition(npartitions=10)
#end::repartition_bag[]
# In[ ]:
some_bag.npartitions
# In[ ]:
distributed_array.chunks
# In[ ]:
import dask.dataframe as dd
df = dd.from_dask_array(distributed_array)
# In[ ]:
df.index
# In[ ]:
#tag::manual_persist[]
df.persist
# You do a bunch of things on DF
# I'm done!
from distributed.client import futures_of
list(map(lambda x: x.release(), futures_of(df)))
#end::manual_persist[]
# In[ ]:
# In[ ]:
|
url_map = {
'https://media.services.pbs.org/api/v1/episodes/0057421c-9655-405c-b63e-144153d46666/': 'test_fixtures/nova_episodes_0057421c-9655-405c-b63e-144153d46666.json',
'https://media.services.pbs.org/api/v1/episodes/0057421c-9655-405c-b63e-144153d46666/assets/': 'test_fixtures/nova_episodes_0057421c-9655-405c-b63e-144153d46666_assets.json',
'https://media.services.pbs.org/api/v1/episodes/006bbb99-138a-40c9-9b3b-8fa6cf149b42/': 'test_fixtures/nova_episodes_006bbb99-138a-40c9-9b3b-8fa6cf149b42.json',
'https://media.services.pbs.org/api/v1/episodes/006bbb99-138a-40c9-9b3b-8fa6cf149b42/assets/': 'test_fixtures/nova_episodes_006bbb99-138a-40c9-9b3b-8fa6cf149b42_assets.json',
'https://media.services.pbs.org/api/v1/episodes/01c0e096-30b4-4ac2-a739-dec7a84eda7f/': 'test_fixtures/nova_episodes_01c0e096-30b4-4ac2-a739-dec7a84eda7f.json',
'https://media.services.pbs.org/api/v1/episodes/01c0e096-30b4-4ac2-a739-dec7a84eda7f/assets/': 'test_fixtures/nova_episodes_01c0e096-30b4-4ac2-a739-dec7a84eda7f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/02061944-6466-437b-849f-be8824a7b371/': 'test_fixtures/nova_episodes_02061944-6466-437b-849f-be8824a7b371.json',
'https://media.services.pbs.org/api/v1/episodes/02061944-6466-437b-849f-be8824a7b371/assets/': 'test_fixtures/nova_episodes_02061944-6466-437b-849f-be8824a7b371_assets.json',
'https://media.services.pbs.org/api/v1/episodes/02a41278-45e4-4fae-aa71-ad3bdeebdf41/': 'test_fixtures/nova_episodes_02a41278-45e4-4fae-aa71-ad3bdeebdf41.json',
'https://media.services.pbs.org/api/v1/episodes/02a41278-45e4-4fae-aa71-ad3bdeebdf41/assets/': 'test_fixtures/nova_episodes_02a41278-45e4-4fae-aa71-ad3bdeebdf41_assets.json',
'https://media.services.pbs.org/api/v1/episodes/035ed301-907a-4fa2-a169-9fc211adaf3c/': 'test_fixtures/nova_episodes_035ed301-907a-4fa2-a169-9fc211adaf3c.json',
'https://media.services.pbs.org/api/v1/episodes/035ed301-907a-4fa2-a169-9fc211adaf3c/assets/': 'test_fixtures/nova_episodes_035ed301-907a-4fa2-a169-9fc211adaf3c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/036256a4-cb85-4be2-bd43-202b5872c69f/': 'test_fixtures/nova_episodes_036256a4-cb85-4be2-bd43-202b5872c69f.json',
'https://media.services.pbs.org/api/v1/episodes/036256a4-cb85-4be2-bd43-202b5872c69f/assets/': 'test_fixtures/nova_episodes_036256a4-cb85-4be2-bd43-202b5872c69f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/058d1f20-3872-4d3d-a346-db35b7197fbc/': 'test_fixtures/nova_episodes_058d1f20-3872-4d3d-a346-db35b7197fbc.json',
'https://media.services.pbs.org/api/v1/episodes/058d1f20-3872-4d3d-a346-db35b7197fbc/assets/': 'test_fixtures/nova_episodes_058d1f20-3872-4d3d-a346-db35b7197fbc_assets.json',
'https://media.services.pbs.org/api/v1/episodes/05e20eba-ad43-4053-a4eb-ffa158697411/': 'test_fixtures/nova_episodes_05e20eba-ad43-4053-a4eb-ffa158697411.json',
'https://media.services.pbs.org/api/v1/episodes/05e20eba-ad43-4053-a4eb-ffa158697411/assets/': 'test_fixtures/nova_episodes_05e20eba-ad43-4053-a4eb-ffa158697411_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0650c773-5c57-4749-8ca7-215fd507281c/': 'test_fixtures/nova_episodes_0650c773-5c57-4749-8ca7-215fd507281c.json',
'https://media.services.pbs.org/api/v1/episodes/0650c773-5c57-4749-8ca7-215fd507281c/assets/': 'test_fixtures/nova_episodes_0650c773-5c57-4749-8ca7-215fd507281c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/067d5f1b-4d0f-4d8b-b1d9-f70d7e78a02e/': 'test_fixtures/nova_episodes_067d5f1b-4d0f-4d8b-b1d9-f70d7e78a02e.json',
'https://media.services.pbs.org/api/v1/episodes/067d5f1b-4d0f-4d8b-b1d9-f70d7e78a02e/assets/': 'test_fixtures/nova_episodes_067d5f1b-4d0f-4d8b-b1d9-f70d7e78a02e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/08161789-765f-49c2-8900-ecc025dcca21/': 'test_fixtures/nova_episodes_08161789-765f-49c2-8900-ecc025dcca21.json',
'https://media.services.pbs.org/api/v1/episodes/08161789-765f-49c2-8900-ecc025dcca21/assets/': 'test_fixtures/nova_episodes_08161789-765f-49c2-8900-ecc025dcca21_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0834e64f-68c0-4fb9-b650-0fc9cf68a118/': 'test_fixtures/nova_episodes_0834e64f-68c0-4fb9-b650-0fc9cf68a118.json',
'https://media.services.pbs.org/api/v1/episodes/0834e64f-68c0-4fb9-b650-0fc9cf68a118/assets/': 'test_fixtures/nova_episodes_0834e64f-68c0-4fb9-b650-0fc9cf68a118_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0834fc35-116c-4d52-8b8d-c00c99cd9dbb/': 'test_fixtures/nova_episodes_0834fc35-116c-4d52-8b8d-c00c99cd9dbb.json',
'https://media.services.pbs.org/api/v1/episodes/0834fc35-116c-4d52-8b8d-c00c99cd9dbb/assets/': 'test_fixtures/nova_episodes_0834fc35-116c-4d52-8b8d-c00c99cd9dbb_assets.json',
'https://media.services.pbs.org/api/v1/episodes/08656dd1-3d3a-4772-bf20-dc5270307f22/': 'test_fixtures/nova_episodes_08656dd1-3d3a-4772-bf20-dc5270307f22.json',
'https://media.services.pbs.org/api/v1/episodes/08656dd1-3d3a-4772-bf20-dc5270307f22/assets/': 'test_fixtures/nova_episodes_08656dd1-3d3a-4772-bf20-dc5270307f22_assets.json',
'https://media.services.pbs.org/api/v1/episodes/09a1cf18-8476-4cd7-bd97-e68fbb7edcb4/': 'test_fixtures/nova_episodes_09a1cf18-8476-4cd7-bd97-e68fbb7edcb4.json',
'https://media.services.pbs.org/api/v1/episodes/09a1cf18-8476-4cd7-bd97-e68fbb7edcb4/assets/': 'test_fixtures/nova_episodes_09a1cf18-8476-4cd7-bd97-e68fbb7edcb4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/09f3940b-0168-4d72-883e-537146704823/': 'test_fixtures/nova_episodes_09f3940b-0168-4d72-883e-537146704823.json',
'https://media.services.pbs.org/api/v1/episodes/09f3940b-0168-4d72-883e-537146704823/assets/': 'test_fixtures/nova_episodes_09f3940b-0168-4d72-883e-537146704823_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0a98139b-61fd-43c1-a332-53c8c7d3067a/': 'test_fixtures/nova_episodes_0a98139b-61fd-43c1-a332-53c8c7d3067a.json',
'https://media.services.pbs.org/api/v1/episodes/0a98139b-61fd-43c1-a332-53c8c7d3067a/assets/': 'test_fixtures/nova_episodes_0a98139b-61fd-43c1-a332-53c8c7d3067a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0b4be5d5-82cb-432e-9ff1-e436dbaac4c3/': 'test_fixtures/nova_episodes_0b4be5d5-82cb-432e-9ff1-e436dbaac4c3.json',
'https://media.services.pbs.org/api/v1/episodes/0b4be5d5-82cb-432e-9ff1-e436dbaac4c3/assets/': 'test_fixtures/nova_episodes_0b4be5d5-82cb-432e-9ff1-e436dbaac4c3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0b967913-0b2b-4abf-afb5-26f3cdaca243/': 'test_fixtures/nova_episodes_0b967913-0b2b-4abf-afb5-26f3cdaca243.json',
'https://media.services.pbs.org/api/v1/episodes/0b967913-0b2b-4abf-afb5-26f3cdaca243/assets/': 'test_fixtures/nova_episodes_0b967913-0b2b-4abf-afb5-26f3cdaca243_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0bce1a35-fcd5-41b6-9dd6-ae6b3c4193bc/': 'test_fixtures/nova_episodes_0bce1a35-fcd5-41b6-9dd6-ae6b3c4193bc.json',
'https://media.services.pbs.org/api/v1/episodes/0bce1a35-fcd5-41b6-9dd6-ae6b3c4193bc/assets/': 'test_fixtures/nova_episodes_0bce1a35-fcd5-41b6-9dd6-ae6b3c4193bc_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0d8675f9-0cfe-4cb8-9a1b-2c44fbc62e36/': 'test_fixtures/nova_episodes_0d8675f9-0cfe-4cb8-9a1b-2c44fbc62e36.json',
'https://media.services.pbs.org/api/v1/episodes/0d8675f9-0cfe-4cb8-9a1b-2c44fbc62e36/assets/': 'test_fixtures/nova_episodes_0d8675f9-0cfe-4cb8-9a1b-2c44fbc62e36_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0dad814e-bc7f-464b-b121-ba0fede0714e/': 'test_fixtures/nova_episodes_0dad814e-bc7f-464b-b121-ba0fede0714e.json',
'https://media.services.pbs.org/api/v1/episodes/0dad814e-bc7f-464b-b121-ba0fede0714e/assets/': 'test_fixtures/nova_episodes_0dad814e-bc7f-464b-b121-ba0fede0714e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0e51adc5-9389-49d4-9995-2977207381e4/': 'test_fixtures/nova_episodes_0e51adc5-9389-49d4-9995-2977207381e4.json',
'https://media.services.pbs.org/api/v1/episodes/0e51adc5-9389-49d4-9995-2977207381e4/assets/': 'test_fixtures/nova_episodes_0e51adc5-9389-49d4-9995-2977207381e4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/0ff57ac5-e4fb-40d4-9c15-038ab05f7cfa/': 'test_fixtures/nova_episodes_0ff57ac5-e4fb-40d4-9c15-038ab05f7cfa.json',
'https://media.services.pbs.org/api/v1/episodes/0ff57ac5-e4fb-40d4-9c15-038ab05f7cfa/assets/': 'test_fixtures/nova_episodes_0ff57ac5-e4fb-40d4-9c15-038ab05f7cfa_assets.json',
'https://media.services.pbs.org/api/v1/episodes/107268fc-0437-4877-8c3b-d5fdcef32737/': 'test_fixtures/nova_episodes_107268fc-0437-4877-8c3b-d5fdcef32737.json',
'https://media.services.pbs.org/api/v1/episodes/107268fc-0437-4877-8c3b-d5fdcef32737/assets/': 'test_fixtures/nova_episodes_107268fc-0437-4877-8c3b-d5fdcef32737_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1260ce14-2d07-48e5-a96a-670c583490ed/': 'test_fixtures/nova_episodes_1260ce14-2d07-48e5-a96a-670c583490ed.json',
'https://media.services.pbs.org/api/v1/episodes/1260ce14-2d07-48e5-a96a-670c583490ed/assets/': 'test_fixtures/nova_episodes_1260ce14-2d07-48e5-a96a-670c583490ed_assets.json',
'https://media.services.pbs.org/api/v1/episodes/12e812ff-df85-4c45-9d64-6adbef6c40c8/': 'test_fixtures/nova_episodes_12e812ff-df85-4c45-9d64-6adbef6c40c8.json',
'https://media.services.pbs.org/api/v1/episodes/12e812ff-df85-4c45-9d64-6adbef6c40c8/assets/': 'test_fixtures/nova_episodes_12e812ff-df85-4c45-9d64-6adbef6c40c8_assets.json',
'https://media.services.pbs.org/api/v1/episodes/149ebeae-b6e0-4edd-a26a-87a0f1eba3d5/': 'test_fixtures/nova_episodes_149ebeae-b6e0-4edd-a26a-87a0f1eba3d5.json',
'https://media.services.pbs.org/api/v1/episodes/149ebeae-b6e0-4edd-a26a-87a0f1eba3d5/assets/': 'test_fixtures/nova_episodes_149ebeae-b6e0-4edd-a26a-87a0f1eba3d5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1763da0c-72d1-4e23-9c65-03ad167a5914/': 'test_fixtures/nova_episodes_1763da0c-72d1-4e23-9c65-03ad167a5914.json',
'https://media.services.pbs.org/api/v1/episodes/1763da0c-72d1-4e23-9c65-03ad167a5914/assets/': 'test_fixtures/nova_episodes_1763da0c-72d1-4e23-9c65-03ad167a5914_assets.json',
'https://media.services.pbs.org/api/v1/episodes/177c7bf7-d861-473d-9df4-8b4aba33c653/': 'test_fixtures/nova_episodes_177c7bf7-d861-473d-9df4-8b4aba33c653.json',
'https://media.services.pbs.org/api/v1/episodes/177c7bf7-d861-473d-9df4-8b4aba33c653/assets/': 'test_fixtures/nova_episodes_177c7bf7-d861-473d-9df4-8b4aba33c653_assets.json',
'https://media.services.pbs.org/api/v1/episodes/189a5812-acab-4a10-aef5-776ea2d8a196/': 'test_fixtures/nova_episodes_189a5812-acab-4a10-aef5-776ea2d8a196.json',
'https://media.services.pbs.org/api/v1/episodes/189a5812-acab-4a10-aef5-776ea2d8a196/assets/': 'test_fixtures/nova_episodes_189a5812-acab-4a10-aef5-776ea2d8a196_assets.json',
'https://media.services.pbs.org/api/v1/episodes/18d9adce-ef76-4a1a-bef3-bc41178e42c6/': 'test_fixtures/nova_episodes_18d9adce-ef76-4a1a-bef3-bc41178e42c6.json',
'https://media.services.pbs.org/api/v1/episodes/18d9adce-ef76-4a1a-bef3-bc41178e42c6/assets/': 'test_fixtures/nova_episodes_18d9adce-ef76-4a1a-bef3-bc41178e42c6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1990138c-6a5a-406e-9652-8b1f87f8b983/': 'test_fixtures/nova_episodes_1990138c-6a5a-406e-9652-8b1f87f8b983.json',
'https://media.services.pbs.org/api/v1/episodes/1990138c-6a5a-406e-9652-8b1f87f8b983/assets/': 'test_fixtures/nova_episodes_1990138c-6a5a-406e-9652-8b1f87f8b983_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1ac406df-53cf-46c9-8feb-e9709b729a7a/': 'test_fixtures/nova_episodes_1ac406df-53cf-46c9-8feb-e9709b729a7a.json',
'https://media.services.pbs.org/api/v1/episodes/1ac406df-53cf-46c9-8feb-e9709b729a7a/assets/': 'test_fixtures/nova_episodes_1ac406df-53cf-46c9-8feb-e9709b729a7a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1b2e436f-4a8b-48bd-876f-5b5c548c8520/': 'test_fixtures/nova_episodes_1b2e436f-4a8b-48bd-876f-5b5c548c8520.json',
'https://media.services.pbs.org/api/v1/episodes/1b2e436f-4a8b-48bd-876f-5b5c548c8520/assets/': 'test_fixtures/nova_episodes_1b2e436f-4a8b-48bd-876f-5b5c548c8520_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1e07147b-3019-4f83-b597-1910a7378c0e/': 'test_fixtures/nova_episodes_1e07147b-3019-4f83-b597-1910a7378c0e.json',
'https://media.services.pbs.org/api/v1/episodes/1e07147b-3019-4f83-b597-1910a7378c0e/assets/': 'test_fixtures/nova_episodes_1e07147b-3019-4f83-b597-1910a7378c0e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1eefd86c-fc02-4caf-b237-96279889f300/': 'test_fixtures/nova_episodes_1eefd86c-fc02-4caf-b237-96279889f300.json',
'https://media.services.pbs.org/api/v1/episodes/1eefd86c-fc02-4caf-b237-96279889f300/assets/': 'test_fixtures/nova_episodes_1eefd86c-fc02-4caf-b237-96279889f300_assets.json',
'https://media.services.pbs.org/api/v1/episodes/1f82e934-838e-4376-a42b-e1a21ca508e3/': 'test_fixtures/nova_episodes_1f82e934-838e-4376-a42b-e1a21ca508e3.json',
'https://media.services.pbs.org/api/v1/episodes/1f82e934-838e-4376-a42b-e1a21ca508e3/assets/': 'test_fixtures/nova_episodes_1f82e934-838e-4376-a42b-e1a21ca508e3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2099394d-95b9-4a77-a405-8bdba8b9642b/': 'test_fixtures/nova_episodes_2099394d-95b9-4a77-a405-8bdba8b9642b.json',
'https://media.services.pbs.org/api/v1/episodes/2099394d-95b9-4a77-a405-8bdba8b9642b/assets/': 'test_fixtures/nova_episodes_2099394d-95b9-4a77-a405-8bdba8b9642b_assets.json',
'https://media.services.pbs.org/api/v1/episodes/20ea866e-e0eb-41f7-8bfe-7eba9cd7ae9d/': 'test_fixtures/nova_episodes_20ea866e-e0eb-41f7-8bfe-7eba9cd7ae9d.json',
'https://media.services.pbs.org/api/v1/episodes/20ea866e-e0eb-41f7-8bfe-7eba9cd7ae9d/assets/': 'test_fixtures/nova_episodes_20ea866e-e0eb-41f7-8bfe-7eba9cd7ae9d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/21d51c83-690c-4751-8380-fd340d604f56/': 'test_fixtures/nova_episodes_21d51c83-690c-4751-8380-fd340d604f56.json',
'https://media.services.pbs.org/api/v1/episodes/21d51c83-690c-4751-8380-fd340d604f56/assets/': 'test_fixtures/nova_episodes_21d51c83-690c-4751-8380-fd340d604f56_assets.json',
'https://media.services.pbs.org/api/v1/episodes/22200d46-32ba-4429-96a0-278d1b5091b8/': 'test_fixtures/nova_episodes_22200d46-32ba-4429-96a0-278d1b5091b8.json',
'https://media.services.pbs.org/api/v1/episodes/22200d46-32ba-4429-96a0-278d1b5091b8/assets/': 'test_fixtures/nova_episodes_22200d46-32ba-4429-96a0-278d1b5091b8_assets.json',
'https://media.services.pbs.org/api/v1/episodes/228bb7a3-7879-4b35-b50a-26113252f174/': 'test_fixtures/nova_episodes_228bb7a3-7879-4b35-b50a-26113252f174.json',
'https://media.services.pbs.org/api/v1/episodes/228bb7a3-7879-4b35-b50a-26113252f174/assets/': 'test_fixtures/nova_episodes_228bb7a3-7879-4b35-b50a-26113252f174_assets.json',
'https://media.services.pbs.org/api/v1/episodes/23ac3a50-8cd1-45e7-9722-d1330b533ee7/': 'test_fixtures/nova_episodes_23ac3a50-8cd1-45e7-9722-d1330b533ee7.json',
'https://media.services.pbs.org/api/v1/episodes/23ac3a50-8cd1-45e7-9722-d1330b533ee7/assets/': 'test_fixtures/nova_episodes_23ac3a50-8cd1-45e7-9722-d1330b533ee7_assets.json',
'https://media.services.pbs.org/api/v1/episodes/23ddd1fd-9f62-4699-8b4b-1bdfc141d3a5/': 'test_fixtures/nova_episodes_23ddd1fd-9f62-4699-8b4b-1bdfc141d3a5.json',
'https://media.services.pbs.org/api/v1/episodes/23ddd1fd-9f62-4699-8b4b-1bdfc141d3a5/assets/': 'test_fixtures/nova_episodes_23ddd1fd-9f62-4699-8b4b-1bdfc141d3a5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/249fbfcc-92c6-486e-a1fb-d624f285272d/': 'test_fixtures/nova_episodes_249fbfcc-92c6-486e-a1fb-d624f285272d.json',
'https://media.services.pbs.org/api/v1/episodes/249fbfcc-92c6-486e-a1fb-d624f285272d/assets/': 'test_fixtures/nova_episodes_249fbfcc-92c6-486e-a1fb-d624f285272d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2500a183-6a3e-4d5a-b205-f5c77f46af07/': 'test_fixtures/nova_episodes_2500a183-6a3e-4d5a-b205-f5c77f46af07.json',
'https://media.services.pbs.org/api/v1/episodes/2500a183-6a3e-4d5a-b205-f5c77f46af07/assets/': 'test_fixtures/nova_episodes_2500a183-6a3e-4d5a-b205-f5c77f46af07_assets.json',
'https://media.services.pbs.org/api/v1/episodes/266a8244-0839-4fa8-bcc6-db9c9df5c164/': 'test_fixtures/nova_episodes_266a8244-0839-4fa8-bcc6-db9c9df5c164.json',
'https://media.services.pbs.org/api/v1/episodes/266a8244-0839-4fa8-bcc6-db9c9df5c164/assets/': 'test_fixtures/nova_episodes_266a8244-0839-4fa8-bcc6-db9c9df5c164_assets.json',
'https://media.services.pbs.org/api/v1/episodes/269e852c-fc9d-476a-b257-816d75299df4/': 'test_fixtures/nova_episodes_269e852c-fc9d-476a-b257-816d75299df4.json',
'https://media.services.pbs.org/api/v1/episodes/269e852c-fc9d-476a-b257-816d75299df4/assets/': 'test_fixtures/nova_episodes_269e852c-fc9d-476a-b257-816d75299df4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/26efb155-fbfb-44d0-9ba4-f2f3a3da43cf/': 'test_fixtures/nova_episodes_26efb155-fbfb-44d0-9ba4-f2f3a3da43cf.json',
'https://media.services.pbs.org/api/v1/episodes/26efb155-fbfb-44d0-9ba4-f2f3a3da43cf/assets/': 'test_fixtures/nova_episodes_26efb155-fbfb-44d0-9ba4-f2f3a3da43cf_assets.json',
'https://media.services.pbs.org/api/v1/episodes/270a7e86-8078-41b3-ba00-01ef980fd0c3/': 'test_fixtures/nova_episodes_270a7e86-8078-41b3-ba00-01ef980fd0c3.json',
'https://media.services.pbs.org/api/v1/episodes/270a7e86-8078-41b3-ba00-01ef980fd0c3/assets/': 'test_fixtures/nova_episodes_270a7e86-8078-41b3-ba00-01ef980fd0c3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/28062e59-97a2-4ba0-840f-de7818315459/': 'test_fixtures/nova_episodes_28062e59-97a2-4ba0-840f-de7818315459.json',
'https://media.services.pbs.org/api/v1/episodes/28062e59-97a2-4ba0-840f-de7818315459/assets/': 'test_fixtures/nova_episodes_28062e59-97a2-4ba0-840f-de7818315459_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2cd11241-2906-478f-814a-09b02bcebfdb/': 'test_fixtures/nova_episodes_2cd11241-2906-478f-814a-09b02bcebfdb.json',
'https://media.services.pbs.org/api/v1/episodes/2cd11241-2906-478f-814a-09b02bcebfdb/assets/': 'test_fixtures/nova_episodes_2cd11241-2906-478f-814a-09b02bcebfdb_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2cf17b7b-5f4c-4271-af3f-abfa2c22084f/': 'test_fixtures/nova_episodes_2cf17b7b-5f4c-4271-af3f-abfa2c22084f.json',
'https://media.services.pbs.org/api/v1/episodes/2cf17b7b-5f4c-4271-af3f-abfa2c22084f/assets/': 'test_fixtures/nova_episodes_2cf17b7b-5f4c-4271-af3f-abfa2c22084f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2fb4ebde-e91b-45d7-bde8-2d76ae604b49/': 'test_fixtures/nova_episodes_2fb4ebde-e91b-45d7-bde8-2d76ae604b49.json',
'https://media.services.pbs.org/api/v1/episodes/2fb4ebde-e91b-45d7-bde8-2d76ae604b49/assets/': 'test_fixtures/nova_episodes_2fb4ebde-e91b-45d7-bde8-2d76ae604b49_assets.json',
'https://media.services.pbs.org/api/v1/episodes/2fd7eafc-fe50-43c7-bd5e-526caf5889c5/': 'test_fixtures/nova_episodes_2fd7eafc-fe50-43c7-bd5e-526caf5889c5.json',
'https://media.services.pbs.org/api/v1/episodes/2fd7eafc-fe50-43c7-bd5e-526caf5889c5/assets/': 'test_fixtures/nova_episodes_2fd7eafc-fe50-43c7-bd5e-526caf5889c5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/31286bd0-6bbd-45a3-b4df-e663f01559e3/': 'test_fixtures/nova_episodes_31286bd0-6bbd-45a3-b4df-e663f01559e3.json',
'https://media.services.pbs.org/api/v1/episodes/31286bd0-6bbd-45a3-b4df-e663f01559e3/assets/': 'test_fixtures/nova_episodes_31286bd0-6bbd-45a3-b4df-e663f01559e3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3128b8f8-71dc-4cc9-9042-ea932467a956/': 'test_fixtures/nova_episodes_3128b8f8-71dc-4cc9-9042-ea932467a956.json',
'https://media.services.pbs.org/api/v1/episodes/3128b8f8-71dc-4cc9-9042-ea932467a956/assets/': 'test_fixtures/nova_episodes_3128b8f8-71dc-4cc9-9042-ea932467a956_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3427714a-1cc7-4ba5-8f54-fdf0d59e1af7/': 'test_fixtures/nova_episodes_3427714a-1cc7-4ba5-8f54-fdf0d59e1af7.json',
'https://media.services.pbs.org/api/v1/episodes/3427714a-1cc7-4ba5-8f54-fdf0d59e1af7/assets/': 'test_fixtures/nova_episodes_3427714a-1cc7-4ba5-8f54-fdf0d59e1af7_assets.json',
'https://media.services.pbs.org/api/v1/episodes/349490fc-d260-4b77-8c8c-013917fb6ca9/': 'test_fixtures/nova_episodes_349490fc-d260-4b77-8c8c-013917fb6ca9.json',
'https://media.services.pbs.org/api/v1/episodes/349490fc-d260-4b77-8c8c-013917fb6ca9/assets/': 'test_fixtures/nova_episodes_349490fc-d260-4b77-8c8c-013917fb6ca9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3498bdf7-e126-4037-93a4-6bdb4fa29de1/': 'test_fixtures/nova_episodes_3498bdf7-e126-4037-93a4-6bdb4fa29de1.json',
'https://media.services.pbs.org/api/v1/episodes/3498bdf7-e126-4037-93a4-6bdb4fa29de1/assets/': 'test_fixtures/nova_episodes_3498bdf7-e126-4037-93a4-6bdb4fa29de1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/37f3b872-e4e0-4cbb-a5b7-fa6f8d15ca90/': 'test_fixtures/nova_episodes_37f3b872-e4e0-4cbb-a5b7-fa6f8d15ca90.json',
'https://media.services.pbs.org/api/v1/episodes/37f3b872-e4e0-4cbb-a5b7-fa6f8d15ca90/assets/': 'test_fixtures/nova_episodes_37f3b872-e4e0-4cbb-a5b7-fa6f8d15ca90_assets.json',
'https://media.services.pbs.org/api/v1/episodes/39a10624-c10a-42fc-aa4d-202aa4e2332f/': 'test_fixtures/nova_episodes_39a10624-c10a-42fc-aa4d-202aa4e2332f.json',
'https://media.services.pbs.org/api/v1/episodes/39a10624-c10a-42fc-aa4d-202aa4e2332f/assets/': 'test_fixtures/nova_episodes_39a10624-c10a-42fc-aa4d-202aa4e2332f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3b9e350d-316a-40f4-99ae-37aba7c94255/': 'test_fixtures/nova_episodes_3b9e350d-316a-40f4-99ae-37aba7c94255.json',
'https://media.services.pbs.org/api/v1/episodes/3b9e350d-316a-40f4-99ae-37aba7c94255/assets/': 'test_fixtures/nova_episodes_3b9e350d-316a-40f4-99ae-37aba7c94255_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3b9f0ba3-5a33-4257-881d-0697721327ba/': 'test_fixtures/nova_episodes_3b9f0ba3-5a33-4257-881d-0697721327ba.json',
'https://media.services.pbs.org/api/v1/episodes/3b9f0ba3-5a33-4257-881d-0697721327ba/assets/': 'test_fixtures/nova_episodes_3b9f0ba3-5a33-4257-881d-0697721327ba_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3e502a5d-9424-486d-83c2-ff211f4e0525/': 'test_fixtures/nova_episodes_3e502a5d-9424-486d-83c2-ff211f4e0525.json',
'https://media.services.pbs.org/api/v1/episodes/3e502a5d-9424-486d-83c2-ff211f4e0525/assets/': 'test_fixtures/nova_episodes_3e502a5d-9424-486d-83c2-ff211f4e0525_assets.json',
'https://media.services.pbs.org/api/v1/episodes/3e6f0214-090a-4a4c-8234-256330fbe27f/': 'test_fixtures/nova_episodes_3e6f0214-090a-4a4c-8234-256330fbe27f.json',
'https://media.services.pbs.org/api/v1/episodes/3e6f0214-090a-4a4c-8234-256330fbe27f/assets/': 'test_fixtures/nova_episodes_3e6f0214-090a-4a4c-8234-256330fbe27f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4177f07b-814e-47a5-ad4b-f12a69cb1da9/': 'test_fixtures/nova_episodes_4177f07b-814e-47a5-ad4b-f12a69cb1da9.json',
'https://media.services.pbs.org/api/v1/episodes/4177f07b-814e-47a5-ad4b-f12a69cb1da9/assets/': 'test_fixtures/nova_episodes_4177f07b-814e-47a5-ad4b-f12a69cb1da9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/42999204-5048-47b6-9a45-96bad90f4efe/': 'test_fixtures/nova_episodes_42999204-5048-47b6-9a45-96bad90f4efe.json',
'https://media.services.pbs.org/api/v1/episodes/42999204-5048-47b6-9a45-96bad90f4efe/assets/': 'test_fixtures/nova_episodes_42999204-5048-47b6-9a45-96bad90f4efe_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4557c4c6-9c9a-4c30-95fe-8432fe3e68a1/': 'test_fixtures/nova_episodes_4557c4c6-9c9a-4c30-95fe-8432fe3e68a1.json',
'https://media.services.pbs.org/api/v1/episodes/4557c4c6-9c9a-4c30-95fe-8432fe3e68a1/assets/': 'test_fixtures/nova_episodes_4557c4c6-9c9a-4c30-95fe-8432fe3e68a1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/46774dab-eaf9-48f1-a0b2-b8b47f9fa777/': 'test_fixtures/nova_episodes_46774dab-eaf9-48f1-a0b2-b8b47f9fa777.json',
'https://media.services.pbs.org/api/v1/episodes/46774dab-eaf9-48f1-a0b2-b8b47f9fa777/assets/': 'test_fixtures/nova_episodes_46774dab-eaf9-48f1-a0b2-b8b47f9fa777_assets.json',
'https://media.services.pbs.org/api/v1/episodes/46d44280-b4ce-4968-b71c-df66a29d234e/': 'test_fixtures/nova_episodes_46d44280-b4ce-4968-b71c-df66a29d234e.json',
'https://media.services.pbs.org/api/v1/episodes/46d44280-b4ce-4968-b71c-df66a29d234e/assets/': 'test_fixtures/nova_episodes_46d44280-b4ce-4968-b71c-df66a29d234e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/48092ddf-675c-4d09-a0c1-50d61c4160b3/': 'test_fixtures/nova_episodes_48092ddf-675c-4d09-a0c1-50d61c4160b3.json',
'https://media.services.pbs.org/api/v1/episodes/48092ddf-675c-4d09-a0c1-50d61c4160b3/assets/': 'test_fixtures/nova_episodes_48092ddf-675c-4d09-a0c1-50d61c4160b3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/489d8df1-4ee1-43d4-8b4f-c3e4f62354a2/': 'test_fixtures/nova_episodes_489d8df1-4ee1-43d4-8b4f-c3e4f62354a2.json',
'https://media.services.pbs.org/api/v1/episodes/489d8df1-4ee1-43d4-8b4f-c3e4f62354a2/assets/': 'test_fixtures/nova_episodes_489d8df1-4ee1-43d4-8b4f-c3e4f62354a2_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4a0e5d5e-3e8e-43bb-b952-1f6079b4a9ec/': 'test_fixtures/nova_episodes_4a0e5d5e-3e8e-43bb-b952-1f6079b4a9ec.json',
'https://media.services.pbs.org/api/v1/episodes/4a0e5d5e-3e8e-43bb-b952-1f6079b4a9ec/assets/': 'test_fixtures/nova_episodes_4a0e5d5e-3e8e-43bb-b952-1f6079b4a9ec_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4ab1bd94-a37f-4a9f-8716-e301e3636c12/': 'test_fixtures/nova_episodes_4ab1bd94-a37f-4a9f-8716-e301e3636c12.json',
'https://media.services.pbs.org/api/v1/episodes/4ab1bd94-a37f-4a9f-8716-e301e3636c12/assets/': 'test_fixtures/nova_episodes_4ab1bd94-a37f-4a9f-8716-e301e3636c12_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4bce8f7a-86f8-45ed-adf6-0eab2163c360/': 'test_fixtures/nova_episodes_4bce8f7a-86f8-45ed-adf6-0eab2163c360.json',
'https://media.services.pbs.org/api/v1/episodes/4bce8f7a-86f8-45ed-adf6-0eab2163c360/assets/': 'test_fixtures/nova_episodes_4bce8f7a-86f8-45ed-adf6-0eab2163c360_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4c886700-7a9d-45ad-8de7-757ddf4036ae/': 'test_fixtures/nova_episodes_4c886700-7a9d-45ad-8de7-757ddf4036ae.json',
'https://media.services.pbs.org/api/v1/episodes/4c886700-7a9d-45ad-8de7-757ddf4036ae/assets/': 'test_fixtures/nova_episodes_4c886700-7a9d-45ad-8de7-757ddf4036ae_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4d331b6f-36cc-44ef-8263-4d5ed5014631/': 'test_fixtures/nova_episodes_4d331b6f-36cc-44ef-8263-4d5ed5014631.json',
'https://media.services.pbs.org/api/v1/episodes/4d331b6f-36cc-44ef-8263-4d5ed5014631/assets/': 'test_fixtures/nova_episodes_4d331b6f-36cc-44ef-8263-4d5ed5014631_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4d778171-11a1-4283-9d01-526ce550d202/': 'test_fixtures/nova_episodes_4d778171-11a1-4283-9d01-526ce550d202.json',
'https://media.services.pbs.org/api/v1/episodes/4d778171-11a1-4283-9d01-526ce550d202/assets/': 'test_fixtures/nova_episodes_4d778171-11a1-4283-9d01-526ce550d202_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4d984119-8571-43f7-bc90-a99b835f55f3/': 'test_fixtures/nova_episodes_4d984119-8571-43f7-bc90-a99b835f55f3.json',
'https://media.services.pbs.org/api/v1/episodes/4d984119-8571-43f7-bc90-a99b835f55f3/assets/': 'test_fixtures/nova_episodes_4d984119-8571-43f7-bc90-a99b835f55f3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4eb15f70-e78d-4014-9a5c-aed618c96a56/': 'test_fixtures/nova_episodes_4eb15f70-e78d-4014-9a5c-aed618c96a56.json',
'https://media.services.pbs.org/api/v1/episodes/4eb15f70-e78d-4014-9a5c-aed618c96a56/assets/': 'test_fixtures/nova_episodes_4eb15f70-e78d-4014-9a5c-aed618c96a56_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4eb47c7d-37f9-4bad-8fa6-4fc4bb84795e/': 'test_fixtures/nova_episodes_4eb47c7d-37f9-4bad-8fa6-4fc4bb84795e.json',
'https://media.services.pbs.org/api/v1/episodes/4eb47c7d-37f9-4bad-8fa6-4fc4bb84795e/assets/': 'test_fixtures/nova_episodes_4eb47c7d-37f9-4bad-8fa6-4fc4bb84795e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/4ff09235-d5c5-4f74-a44d-16201d815341/': 'test_fixtures/nova_episodes_4ff09235-d5c5-4f74-a44d-16201d815341.json',
'https://media.services.pbs.org/api/v1/episodes/4ff09235-d5c5-4f74-a44d-16201d815341/assets/': 'test_fixtures/nova_episodes_4ff09235-d5c5-4f74-a44d-16201d815341_assets.json',
'https://media.services.pbs.org/api/v1/episodes/509a0269-1bd1-4be5-a881-dd902dc41ea9/': 'test_fixtures/nova_episodes_509a0269-1bd1-4be5-a881-dd902dc41ea9.json',
'https://media.services.pbs.org/api/v1/episodes/509a0269-1bd1-4be5-a881-dd902dc41ea9/assets/': 'test_fixtures/nova_episodes_509a0269-1bd1-4be5-a881-dd902dc41ea9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/51ea46f3-e393-40e6-baad-21ab3c6e2250/': 'test_fixtures/nova_episodes_51ea46f3-e393-40e6-baad-21ab3c6e2250.json',
'https://media.services.pbs.org/api/v1/episodes/51ea46f3-e393-40e6-baad-21ab3c6e2250/assets/': 'test_fixtures/nova_episodes_51ea46f3-e393-40e6-baad-21ab3c6e2250_assets.json',
'https://media.services.pbs.org/api/v1/episodes/5200277b-6cad-4ba0-a0a2-4b04ae18e4cb/': 'test_fixtures/nova_episodes_5200277b-6cad-4ba0-a0a2-4b04ae18e4cb.json',
'https://media.services.pbs.org/api/v1/episodes/5200277b-6cad-4ba0-a0a2-4b04ae18e4cb/assets/': 'test_fixtures/nova_episodes_5200277b-6cad-4ba0-a0a2-4b04ae18e4cb_assets.json',
'https://media.services.pbs.org/api/v1/episodes/5337e8c0-5f9d-4ba7-b8fe-b4b4162b7222/': 'test_fixtures/nova_episodes_5337e8c0-5f9d-4ba7-b8fe-b4b4162b7222.json',
'https://media.services.pbs.org/api/v1/episodes/5337e8c0-5f9d-4ba7-b8fe-b4b4162b7222/assets/': 'test_fixtures/nova_episodes_5337e8c0-5f9d-4ba7-b8fe-b4b4162b7222_assets.json',
'https://media.services.pbs.org/api/v1/episodes/553cea28-19f0-49f7-9db3-4396c6c64634/': 'test_fixtures/nova_episodes_553cea28-19f0-49f7-9db3-4396c6c64634.json',
'https://media.services.pbs.org/api/v1/episodes/553cea28-19f0-49f7-9db3-4396c6c64634/assets/': 'test_fixtures/nova_episodes_553cea28-19f0-49f7-9db3-4396c6c64634_assets.json',
'https://media.services.pbs.org/api/v1/episodes/57852c61-d2bb-47ee-8618-4074cdb6752a/': 'test_fixtures/nova_episodes_57852c61-d2bb-47ee-8618-4074cdb6752a.json',
'https://media.services.pbs.org/api/v1/episodes/57852c61-d2bb-47ee-8618-4074cdb6752a/assets/': 'test_fixtures/nova_episodes_57852c61-d2bb-47ee-8618-4074cdb6752a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/58f8a8b5-68db-4c4f-a254-7e5b76efdb43/': 'test_fixtures/nova_episodes_58f8a8b5-68db-4c4f-a254-7e5b76efdb43.json',
'https://media.services.pbs.org/api/v1/episodes/58f8a8b5-68db-4c4f-a254-7e5b76efdb43/assets/': 'test_fixtures/nova_episodes_58f8a8b5-68db-4c4f-a254-7e5b76efdb43_assets.json',
'https://media.services.pbs.org/api/v1/episodes/593ee4c0-4d1d-4924-a5d4-e4b5d757ba4a/': 'test_fixtures/nova_episodes_593ee4c0-4d1d-4924-a5d4-e4b5d757ba4a.json',
'https://media.services.pbs.org/api/v1/episodes/593ee4c0-4d1d-4924-a5d4-e4b5d757ba4a/assets/': 'test_fixtures/nova_episodes_593ee4c0-4d1d-4924-a5d4-e4b5d757ba4a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/59ae5bba-8d9a-41a0-8e36-e1c38defcf75/': 'test_fixtures/nova_episodes_59ae5bba-8d9a-41a0-8e36-e1c38defcf75.json',
'https://media.services.pbs.org/api/v1/episodes/59ae5bba-8d9a-41a0-8e36-e1c38defcf75/assets/': 'test_fixtures/nova_episodes_59ae5bba-8d9a-41a0-8e36-e1c38defcf75_assets.json',
'https://media.services.pbs.org/api/v1/episodes/5a9b1c4a-df5e-4f88-bfdd-2da819c7ddec/': 'test_fixtures/nova_episodes_5a9b1c4a-df5e-4f88-bfdd-2da819c7ddec.json',
'https://media.services.pbs.org/api/v1/episodes/5a9b1c4a-df5e-4f88-bfdd-2da819c7ddec/assets/': 'test_fixtures/nova_episodes_5a9b1c4a-df5e-4f88-bfdd-2da819c7ddec_assets.json',
'https://media.services.pbs.org/api/v1/episodes/5dcdde1c-c8c0-40c7-aa52-4a5f2d4892b5/': 'test_fixtures/nova_episodes_5dcdde1c-c8c0-40c7-aa52-4a5f2d4892b5.json',
'https://media.services.pbs.org/api/v1/episodes/5dcdde1c-c8c0-40c7-aa52-4a5f2d4892b5/assets/': 'test_fixtures/nova_episodes_5dcdde1c-c8c0-40c7-aa52-4a5f2d4892b5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/5e2cce2d-9465-4779-a0ef-aec4e5d1e073/': 'test_fixtures/nova_episodes_5e2cce2d-9465-4779-a0ef-aec4e5d1e073.json',
'https://media.services.pbs.org/api/v1/episodes/5e2cce2d-9465-4779-a0ef-aec4e5d1e073/assets/': 'test_fixtures/nova_episodes_5e2cce2d-9465-4779-a0ef-aec4e5d1e073_assets.json',
'https://media.services.pbs.org/api/v1/episodes/600a9b69-bfa0-4149-9947-b34f7b8a054c/': 'test_fixtures/nova_episodes_600a9b69-bfa0-4149-9947-b34f7b8a054c.json',
'https://media.services.pbs.org/api/v1/episodes/600a9b69-bfa0-4149-9947-b34f7b8a054c/assets/': 'test_fixtures/nova_episodes_600a9b69-bfa0-4149-9947-b34f7b8a054c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6269d572-1051-49ac-b05b-c2db00424555/': 'test_fixtures/nova_episodes_6269d572-1051-49ac-b05b-c2db00424555.json',
'https://media.services.pbs.org/api/v1/episodes/6269d572-1051-49ac-b05b-c2db00424555/assets/': 'test_fixtures/nova_episodes_6269d572-1051-49ac-b05b-c2db00424555_assets.json',
'https://media.services.pbs.org/api/v1/episodes/63f1285d-e71f-41c5-a70b-85b2c25e18ea/': 'test_fixtures/nova_episodes_63f1285d-e71f-41c5-a70b-85b2c25e18ea.json',
'https://media.services.pbs.org/api/v1/episodes/63f1285d-e71f-41c5-a70b-85b2c25e18ea/assets/': 'test_fixtures/nova_episodes_63f1285d-e71f-41c5-a70b-85b2c25e18ea_assets.json',
'https://media.services.pbs.org/api/v1/episodes/65678bab-1b8d-493d-83bb-81caa27a579f/': 'test_fixtures/nova_episodes_65678bab-1b8d-493d-83bb-81caa27a579f.json',
'https://media.services.pbs.org/api/v1/episodes/65678bab-1b8d-493d-83bb-81caa27a579f/assets/': 'test_fixtures/nova_episodes_65678bab-1b8d-493d-83bb-81caa27a579f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/65f0063c-a3ca-4d57-a141-e76d2ab96eff/': 'test_fixtures/nova_episodes_65f0063c-a3ca-4d57-a141-e76d2ab96eff.json',
'https://media.services.pbs.org/api/v1/episodes/65f0063c-a3ca-4d57-a141-e76d2ab96eff/assets/': 'test_fixtures/nova_episodes_65f0063c-a3ca-4d57-a141-e76d2ab96eff_assets.json',
'https://media.services.pbs.org/api/v1/episodes/678e8a43-b23a-401d-be83-99da9d3c0379/': 'test_fixtures/nova_episodes_678e8a43-b23a-401d-be83-99da9d3c0379.json',
'https://media.services.pbs.org/api/v1/episodes/678e8a43-b23a-401d-be83-99da9d3c0379/assets/': 'test_fixtures/nova_episodes_678e8a43-b23a-401d-be83-99da9d3c0379_assets.json',
'https://media.services.pbs.org/api/v1/episodes/67d0ba4b-fb56-41ee-ae61-fe0ff1c9948e/': 'test_fixtures/nova_episodes_67d0ba4b-fb56-41ee-ae61-fe0ff1c9948e.json',
'https://media.services.pbs.org/api/v1/episodes/67d0ba4b-fb56-41ee-ae61-fe0ff1c9948e/assets/': 'test_fixtures/nova_episodes_67d0ba4b-fb56-41ee-ae61-fe0ff1c9948e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/69b6176d-7386-4b0f-9d9b-64da674a262b/': 'test_fixtures/nova_episodes_69b6176d-7386-4b0f-9d9b-64da674a262b.json',
'https://media.services.pbs.org/api/v1/episodes/69b6176d-7386-4b0f-9d9b-64da674a262b/assets/': 'test_fixtures/nova_episodes_69b6176d-7386-4b0f-9d9b-64da674a262b_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6a578dfb-cf3d-46b8-a1c3-6d507aef5def/': 'test_fixtures/nova_episodes_6a578dfb-cf3d-46b8-a1c3-6d507aef5def.json',
'https://media.services.pbs.org/api/v1/episodes/6a578dfb-cf3d-46b8-a1c3-6d507aef5def/assets/': 'test_fixtures/nova_episodes_6a578dfb-cf3d-46b8-a1c3-6d507aef5def_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6c534332-8f8e-4947-8018-19c99f74ad8f/': 'test_fixtures/nova_episodes_6c534332-8f8e-4947-8018-19c99f74ad8f.json',
'https://media.services.pbs.org/api/v1/episodes/6c534332-8f8e-4947-8018-19c99f74ad8f/assets/': 'test_fixtures/nova_episodes_6c534332-8f8e-4947-8018-19c99f74ad8f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6cd44bab-4a8a-4620-823e-c1a867f31316/': 'test_fixtures/nova_episodes_6cd44bab-4a8a-4620-823e-c1a867f31316.json',
'https://media.services.pbs.org/api/v1/episodes/6cd44bab-4a8a-4620-823e-c1a867f31316/assets/': 'test_fixtures/nova_episodes_6cd44bab-4a8a-4620-823e-c1a867f31316_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6fb4f6e3-fd53-4f30-ae5f-7d6357add455/': 'test_fixtures/nova_episodes_6fb4f6e3-fd53-4f30-ae5f-7d6357add455.json',
'https://media.services.pbs.org/api/v1/episodes/6fb4f6e3-fd53-4f30-ae5f-7d6357add455/assets/': 'test_fixtures/nova_episodes_6fb4f6e3-fd53-4f30-ae5f-7d6357add455_assets.json',
'https://media.services.pbs.org/api/v1/episodes/6fb5e17b-8b1d-43d2-b34a-c37ee2837b54/': 'test_fixtures/nova_episodes_6fb5e17b-8b1d-43d2-b34a-c37ee2837b54.json',
'https://media.services.pbs.org/api/v1/episodes/6fb5e17b-8b1d-43d2-b34a-c37ee2837b54/assets/': 'test_fixtures/nova_episodes_6fb5e17b-8b1d-43d2-b34a-c37ee2837b54_assets.json',
'https://media.services.pbs.org/api/v1/episodes/705eb7eb-deb6-4ba9-b671-4b7e96c6c57c/': 'test_fixtures/nova_episodes_705eb7eb-deb6-4ba9-b671-4b7e96c6c57c.json',
'https://media.services.pbs.org/api/v1/episodes/705eb7eb-deb6-4ba9-b671-4b7e96c6c57c/assets/': 'test_fixtures/nova_episodes_705eb7eb-deb6-4ba9-b671-4b7e96c6c57c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/71747bf2-d49e-4969-a815-102666d542ac/': 'test_fixtures/nova_episodes_71747bf2-d49e-4969-a815-102666d542ac.json',
'https://media.services.pbs.org/api/v1/episodes/71747bf2-d49e-4969-a815-102666d542ac/assets/': 'test_fixtures/nova_episodes_71747bf2-d49e-4969-a815-102666d542ac_assets.json',
'https://media.services.pbs.org/api/v1/episodes/731ddf75-34a6-4057-91e0-70a73e806c62/': 'test_fixtures/nova_episodes_731ddf75-34a6-4057-91e0-70a73e806c62.json',
'https://media.services.pbs.org/api/v1/episodes/731ddf75-34a6-4057-91e0-70a73e806c62/assets/': 'test_fixtures/nova_episodes_731ddf75-34a6-4057-91e0-70a73e806c62_assets.json',
'https://media.services.pbs.org/api/v1/episodes/74b5a3cd-0376-4401-85f8-987d1f970bc6/': 'test_fixtures/nova_episodes_74b5a3cd-0376-4401-85f8-987d1f970bc6.json',
'https://media.services.pbs.org/api/v1/episodes/74b5a3cd-0376-4401-85f8-987d1f970bc6/assets/': 'test_fixtures/nova_episodes_74b5a3cd-0376-4401-85f8-987d1f970bc6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7551c86c-c89d-4128-ac7c-3e34cd09d7d6/': 'test_fixtures/nova_episodes_7551c86c-c89d-4128-ac7c-3e34cd09d7d6.json',
'https://media.services.pbs.org/api/v1/episodes/7551c86c-c89d-4128-ac7c-3e34cd09d7d6/assets/': 'test_fixtures/nova_episodes_7551c86c-c89d-4128-ac7c-3e34cd09d7d6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/768b179e-6b22-43d5-90c9-febe3c3c6e72/': 'test_fixtures/nova_episodes_768b179e-6b22-43d5-90c9-febe3c3c6e72.json',
'https://media.services.pbs.org/api/v1/episodes/768b179e-6b22-43d5-90c9-febe3c3c6e72/assets/': 'test_fixtures/nova_episodes_768b179e-6b22-43d5-90c9-febe3c3c6e72_assets.json',
'https://media.services.pbs.org/api/v1/episodes/77f7fd49-bfef-487c-a110-e915fecd61a4/': 'test_fixtures/nova_episodes_77f7fd49-bfef-487c-a110-e915fecd61a4.json',
'https://media.services.pbs.org/api/v1/episodes/77f7fd49-bfef-487c-a110-e915fecd61a4/assets/': 'test_fixtures/nova_episodes_77f7fd49-bfef-487c-a110-e915fecd61a4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/78a2763b-f7f8-4439-99cc-27b4b4374923/': 'test_fixtures/nova_episodes_78a2763b-f7f8-4439-99cc-27b4b4374923.json',
'https://media.services.pbs.org/api/v1/episodes/78a2763b-f7f8-4439-99cc-27b4b4374923/assets/': 'test_fixtures/nova_episodes_78a2763b-f7f8-4439-99cc-27b4b4374923_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7a0663d2-00e0-4cb2-9ad2-72a8f01c14f5/': 'test_fixtures/nova_episodes_7a0663d2-00e0-4cb2-9ad2-72a8f01c14f5.json',
'https://media.services.pbs.org/api/v1/episodes/7a0663d2-00e0-4cb2-9ad2-72a8f01c14f5/assets/': 'test_fixtures/nova_episodes_7a0663d2-00e0-4cb2-9ad2-72a8f01c14f5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7a1465ee-f176-4b19-93aa-5faddff23a98/': 'test_fixtures/nova_episodes_7a1465ee-f176-4b19-93aa-5faddff23a98.json',
'https://media.services.pbs.org/api/v1/episodes/7a1465ee-f176-4b19-93aa-5faddff23a98/assets/': 'test_fixtures/nova_episodes_7a1465ee-f176-4b19-93aa-5faddff23a98_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7a162d09-8703-4cb3-838f-9c5a0235fad9/': 'test_fixtures/nova_episodes_7a162d09-8703-4cb3-838f-9c5a0235fad9.json',
'https://media.services.pbs.org/api/v1/episodes/7a162d09-8703-4cb3-838f-9c5a0235fad9/assets/': 'test_fixtures/nova_episodes_7a162d09-8703-4cb3-838f-9c5a0235fad9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7a9b9199-8ad3-44f1-a8af-380a405b9d55/': 'test_fixtures/nova_episodes_7a9b9199-8ad3-44f1-a8af-380a405b9d55.json',
'https://media.services.pbs.org/api/v1/episodes/7a9b9199-8ad3-44f1-a8af-380a405b9d55/assets/': 'test_fixtures/nova_episodes_7a9b9199-8ad3-44f1-a8af-380a405b9d55_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7b4eecb6-8a6b-4d97-a01f-e70fdb8359c2/': 'test_fixtures/nova_episodes_7b4eecb6-8a6b-4d97-a01f-e70fdb8359c2.json',
'https://media.services.pbs.org/api/v1/episodes/7b4eecb6-8a6b-4d97-a01f-e70fdb8359c2/assets/': 'test_fixtures/nova_episodes_7b4eecb6-8a6b-4d97-a01f-e70fdb8359c2_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7b8a127f-f2a2-48ae-b038-8c9d0d3f0cbd/': 'test_fixtures/nova_episodes_7b8a127f-f2a2-48ae-b038-8c9d0d3f0cbd.json',
'https://media.services.pbs.org/api/v1/episodes/7b8a127f-f2a2-48ae-b038-8c9d0d3f0cbd/assets/': 'test_fixtures/nova_episodes_7b8a127f-f2a2-48ae-b038-8c9d0d3f0cbd_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7d0c4c1a-4dd9-4e33-8048-3331fb0ffa2b/': 'test_fixtures/nova_episodes_7d0c4c1a-4dd9-4e33-8048-3331fb0ffa2b.json',
'https://media.services.pbs.org/api/v1/episodes/7d0c4c1a-4dd9-4e33-8048-3331fb0ffa2b/assets/': 'test_fixtures/nova_episodes_7d0c4c1a-4dd9-4e33-8048-3331fb0ffa2b_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7d5ab599-f98c-40c3-9764-6309b8de1654/': 'test_fixtures/nova_episodes_7d5ab599-f98c-40c3-9764-6309b8de1654.json',
'https://media.services.pbs.org/api/v1/episodes/7d5ab599-f98c-40c3-9764-6309b8de1654/assets/': 'test_fixtures/nova_episodes_7d5ab599-f98c-40c3-9764-6309b8de1654_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7ee75342-e7cc-4883-a59b-95054b33381f/': 'test_fixtures/nova_episodes_7ee75342-e7cc-4883-a59b-95054b33381f.json',
'https://media.services.pbs.org/api/v1/episodes/7ee75342-e7cc-4883-a59b-95054b33381f/assets/': 'test_fixtures/nova_episodes_7ee75342-e7cc-4883-a59b-95054b33381f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7fdd0c2a-f77c-4338-9a4a-7ea2906aa488/': 'test_fixtures/nova_episodes_7fdd0c2a-f77c-4338-9a4a-7ea2906aa488.json',
'https://media.services.pbs.org/api/v1/episodes/7fdd0c2a-f77c-4338-9a4a-7ea2906aa488/assets/': 'test_fixtures/nova_episodes_7fdd0c2a-f77c-4338-9a4a-7ea2906aa488_assets.json',
'https://media.services.pbs.org/api/v1/episodes/7ff22d49-8d31-4218-848e-e9fee79efb51/': 'test_fixtures/nova_episodes_7ff22d49-8d31-4218-848e-e9fee79efb51.json',
'https://media.services.pbs.org/api/v1/episodes/7ff22d49-8d31-4218-848e-e9fee79efb51/assets/': 'test_fixtures/nova_episodes_7ff22d49-8d31-4218-848e-e9fee79efb51_assets.json',
'https://media.services.pbs.org/api/v1/episodes/82b66a41-c194-41e3-af28-7c88728275a8/': 'test_fixtures/nova_episodes_82b66a41-c194-41e3-af28-7c88728275a8.json',
'https://media.services.pbs.org/api/v1/episodes/82b66a41-c194-41e3-af28-7c88728275a8/assets/': 'test_fixtures/nova_episodes_82b66a41-c194-41e3-af28-7c88728275a8_assets.json',
'https://media.services.pbs.org/api/v1/episodes/83c3b9d5-368a-4354-86e0-f18d02189e9a/': 'test_fixtures/nova_episodes_83c3b9d5-368a-4354-86e0-f18d02189e9a.json',
'https://media.services.pbs.org/api/v1/episodes/83c3b9d5-368a-4354-86e0-f18d02189e9a/assets/': 'test_fixtures/nova_episodes_83c3b9d5-368a-4354-86e0-f18d02189e9a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/8544d406-9ff7-4c3e-bca8-f464713f0ae4/': 'test_fixtures/nova_episodes_8544d406-9ff7-4c3e-bca8-f464713f0ae4.json',
'https://media.services.pbs.org/api/v1/episodes/8544d406-9ff7-4c3e-bca8-f464713f0ae4/assets/': 'test_fixtures/nova_episodes_8544d406-9ff7-4c3e-bca8-f464713f0ae4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/86f819f3-c7d3-41b4-bfdb-008910296fc0/': 'test_fixtures/nova_episodes_86f819f3-c7d3-41b4-bfdb-008910296fc0.json',
'https://media.services.pbs.org/api/v1/episodes/86f819f3-c7d3-41b4-bfdb-008910296fc0/assets/': 'test_fixtures/nova_episodes_86f819f3-c7d3-41b4-bfdb-008910296fc0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/876305d8-00f1-4155-9894-0cbe4cd1d399/': 'test_fixtures/nova_episodes_876305d8-00f1-4155-9894-0cbe4cd1d399.json',
'https://media.services.pbs.org/api/v1/episodes/876305d8-00f1-4155-9894-0cbe4cd1d399/assets/': 'test_fixtures/nova_episodes_876305d8-00f1-4155-9894-0cbe4cd1d399_assets.json',
'https://media.services.pbs.org/api/v1/episodes/8780f2a2-e84f-4eea-a679-477a09de6fee/': 'test_fixtures/nova_episodes_8780f2a2-e84f-4eea-a679-477a09de6fee.json',
'https://media.services.pbs.org/api/v1/episodes/8780f2a2-e84f-4eea-a679-477a09de6fee/assets/': 'test_fixtures/nova_episodes_8780f2a2-e84f-4eea-a679-477a09de6fee_assets.json',
'https://media.services.pbs.org/api/v1/episodes/89c71079-8450-495d-941a-727629584d1c/': 'test_fixtures/nova_episodes_89c71079-8450-495d-941a-727629584d1c.json',
'https://media.services.pbs.org/api/v1/episodes/89c71079-8450-495d-941a-727629584d1c/assets/': 'test_fixtures/nova_episodes_89c71079-8450-495d-941a-727629584d1c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/8aaffb96-3459-4d82-bcf7-f842dfd5af92/': 'test_fixtures/nova_episodes_8aaffb96-3459-4d82-bcf7-f842dfd5af92.json',
'https://media.services.pbs.org/api/v1/episodes/8aaffb96-3459-4d82-bcf7-f842dfd5af92/assets/': 'test_fixtures/nova_episodes_8aaffb96-3459-4d82-bcf7-f842dfd5af92_assets.json',
'https://media.services.pbs.org/api/v1/episodes/8ac857e8-5063-4f64-a736-4375930867e9/': 'test_fixtures/nova_episodes_8ac857e8-5063-4f64-a736-4375930867e9.json',
'https://media.services.pbs.org/api/v1/episodes/8ac857e8-5063-4f64-a736-4375930867e9/assets/': 'test_fixtures/nova_episodes_8ac857e8-5063-4f64-a736-4375930867e9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/8c75fcfb-a6cf-4372-a6c8-538d2b793709/': 'test_fixtures/nova_episodes_8c75fcfb-a6cf-4372-a6c8-538d2b793709.json',
'https://media.services.pbs.org/api/v1/episodes/8c75fcfb-a6cf-4372-a6c8-538d2b793709/assets/': 'test_fixtures/nova_episodes_8c75fcfb-a6cf-4372-a6c8-538d2b793709_assets.json',
'https://media.services.pbs.org/api/v1/episodes/900d0b90-ddec-4dd4-966e-ee5787af7632/': 'test_fixtures/nova_episodes_900d0b90-ddec-4dd4-966e-ee5787af7632.json',
'https://media.services.pbs.org/api/v1/episodes/900d0b90-ddec-4dd4-966e-ee5787af7632/assets/': 'test_fixtures/nova_episodes_900d0b90-ddec-4dd4-966e-ee5787af7632_assets.json',
'https://media.services.pbs.org/api/v1/episodes/904a7ea3-25ed-4b0c-b031-948cec3c13e4/': 'test_fixtures/nova_episodes_904a7ea3-25ed-4b0c-b031-948cec3c13e4.json',
'https://media.services.pbs.org/api/v1/episodes/904a7ea3-25ed-4b0c-b031-948cec3c13e4/assets/': 'test_fixtures/nova_episodes_904a7ea3-25ed-4b0c-b031-948cec3c13e4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/91d04810-65ad-47f4-81bf-b9ef49ae436d/': 'test_fixtures/nova_episodes_91d04810-65ad-47f4-81bf-b9ef49ae436d.json',
'https://media.services.pbs.org/api/v1/episodes/91d04810-65ad-47f4-81bf-b9ef49ae436d/assets/': 'test_fixtures/nova_episodes_91d04810-65ad-47f4-81bf-b9ef49ae436d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/91ff43f1-4653-4be0-9ba8-6fc95e977bc1/': 'test_fixtures/nova_episodes_91ff43f1-4653-4be0-9ba8-6fc95e977bc1.json',
'https://media.services.pbs.org/api/v1/episodes/91ff43f1-4653-4be0-9ba8-6fc95e977bc1/assets/': 'test_fixtures/nova_episodes_91ff43f1-4653-4be0-9ba8-6fc95e977bc1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/92bb2eb0-ae62-429b-825d-50920b598cf3/': 'test_fixtures/nova_episodes_92bb2eb0-ae62-429b-825d-50920b598cf3.json',
'https://media.services.pbs.org/api/v1/episodes/92bb2eb0-ae62-429b-825d-50920b598cf3/assets/': 'test_fixtures/nova_episodes_92bb2eb0-ae62-429b-825d-50920b598cf3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/932d6f08-2277-4fbf-88cb-c99734433bfd/': 'test_fixtures/nova_episodes_932d6f08-2277-4fbf-88cb-c99734433bfd.json',
'https://media.services.pbs.org/api/v1/episodes/932d6f08-2277-4fbf-88cb-c99734433bfd/assets/': 'test_fixtures/nova_episodes_932d6f08-2277-4fbf-88cb-c99734433bfd_assets.json',
'https://media.services.pbs.org/api/v1/episodes/946f13b4-2580-4376-84a9-0df4effe7fa6/': 'test_fixtures/nova_episodes_946f13b4-2580-4376-84a9-0df4effe7fa6.json',
'https://media.services.pbs.org/api/v1/episodes/946f13b4-2580-4376-84a9-0df4effe7fa6/assets/': 'test_fixtures/nova_episodes_946f13b4-2580-4376-84a9-0df4effe7fa6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/96f3f8a1-1eee-4308-bb1c-5ceaf04d2960/': 'test_fixtures/nova_episodes_96f3f8a1-1eee-4308-bb1c-5ceaf04d2960.json',
'https://media.services.pbs.org/api/v1/episodes/96f3f8a1-1eee-4308-bb1c-5ceaf04d2960/assets/': 'test_fixtures/nova_episodes_96f3f8a1-1eee-4308-bb1c-5ceaf04d2960_assets.json',
'https://media.services.pbs.org/api/v1/episodes/97da0584-edb0-4f5a-a6fd-f18ee9a3db5a/': 'test_fixtures/nova_episodes_97da0584-edb0-4f5a-a6fd-f18ee9a3db5a.json',
'https://media.services.pbs.org/api/v1/episodes/97da0584-edb0-4f5a-a6fd-f18ee9a3db5a/assets/': 'test_fixtures/nova_episodes_97da0584-edb0-4f5a-a6fd-f18ee9a3db5a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/981fb8ae-da6c-4871-affb-ca57b4cba66e/': 'test_fixtures/nova_episodes_981fb8ae-da6c-4871-affb-ca57b4cba66e.json',
'https://media.services.pbs.org/api/v1/episodes/981fb8ae-da6c-4871-affb-ca57b4cba66e/assets/': 'test_fixtures/nova_episodes_981fb8ae-da6c-4871-affb-ca57b4cba66e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/9b1dbf5c-3ae2-4b6f-b394-fb274fa9c63f/': 'test_fixtures/nova_episodes_9b1dbf5c-3ae2-4b6f-b394-fb274fa9c63f.json',
'https://media.services.pbs.org/api/v1/episodes/9b1dbf5c-3ae2-4b6f-b394-fb274fa9c63f/assets/': 'test_fixtures/nova_episodes_9b1dbf5c-3ae2-4b6f-b394-fb274fa9c63f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/9b351cbc-5561-4ce1-b4e9-22133570215e/': 'test_fixtures/nova_episodes_9b351cbc-5561-4ce1-b4e9-22133570215e.json',
'https://media.services.pbs.org/api/v1/episodes/9b351cbc-5561-4ce1-b4e9-22133570215e/assets/': 'test_fixtures/nova_episodes_9b351cbc-5561-4ce1-b4e9-22133570215e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/9b74d76c-7589-4dfb-8a8e-da72fa24baef/': 'test_fixtures/nova_episodes_9b74d76c-7589-4dfb-8a8e-da72fa24baef.json',
'https://media.services.pbs.org/api/v1/episodes/9b74d76c-7589-4dfb-8a8e-da72fa24baef/assets/': 'test_fixtures/nova_episodes_9b74d76c-7589-4dfb-8a8e-da72fa24baef_assets.json',
'https://media.services.pbs.org/api/v1/episodes/9bca9dcc-e28b-4f99-a9f2-b9f0a5c0967e/': 'test_fixtures/nova_episodes_9bca9dcc-e28b-4f99-a9f2-b9f0a5c0967e.json',
'https://media.services.pbs.org/api/v1/episodes/9bca9dcc-e28b-4f99-a9f2-b9f0a5c0967e/assets/': 'test_fixtures/nova_episodes_9bca9dcc-e28b-4f99-a9f2-b9f0a5c0967e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/9ce511ea-9f2b-4e95-8870-23da257909bf/': 'test_fixtures/nova_episodes_9ce511ea-9f2b-4e95-8870-23da257909bf.json',
'https://media.services.pbs.org/api/v1/episodes/9ce511ea-9f2b-4e95-8870-23da257909bf/assets/': 'test_fixtures/nova_episodes_9ce511ea-9f2b-4e95-8870-23da257909bf_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a0fc1ff3-d53a-494b-97f3-0ddac9dc69d4/': 'test_fixtures/nova_episodes_a0fc1ff3-d53a-494b-97f3-0ddac9dc69d4.json',
'https://media.services.pbs.org/api/v1/episodes/a0fc1ff3-d53a-494b-97f3-0ddac9dc69d4/assets/': 'test_fixtures/nova_episodes_a0fc1ff3-d53a-494b-97f3-0ddac9dc69d4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a1d5e956-3b7f-4b27-a539-90d840a6575d/': 'test_fixtures/nova_episodes_a1d5e956-3b7f-4b27-a539-90d840a6575d.json',
'https://media.services.pbs.org/api/v1/episodes/a1d5e956-3b7f-4b27-a539-90d840a6575d/assets/': 'test_fixtures/nova_episodes_a1d5e956-3b7f-4b27-a539-90d840a6575d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a2cb3af9-faac-4192-9ff6-a6bdaeabc2a6/': 'test_fixtures/nova_episodes_a2cb3af9-faac-4192-9ff6-a6bdaeabc2a6.json',
'https://media.services.pbs.org/api/v1/episodes/a2cb3af9-faac-4192-9ff6-a6bdaeabc2a6/assets/': 'test_fixtures/nova_episodes_a2cb3af9-faac-4192-9ff6-a6bdaeabc2a6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a2ee7194-6871-4692-814b-58a815009ec5/': 'test_fixtures/nova_episodes_a2ee7194-6871-4692-814b-58a815009ec5.json',
'https://media.services.pbs.org/api/v1/episodes/a2ee7194-6871-4692-814b-58a815009ec5/assets/': 'test_fixtures/nova_episodes_a2ee7194-6871-4692-814b-58a815009ec5_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a3af0857-2245-421c-8ed7-8cda6ee06a98/': 'test_fixtures/nova_episodes_a3af0857-2245-421c-8ed7-8cda6ee06a98.json',
'https://media.services.pbs.org/api/v1/episodes/a3af0857-2245-421c-8ed7-8cda6ee06a98/assets/': 'test_fixtures/nova_episodes_a3af0857-2245-421c-8ed7-8cda6ee06a98_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a3fdd793-89b2-4386-83b8-e57a6d027b73/': 'test_fixtures/nova_episodes_a3fdd793-89b2-4386-83b8-e57a6d027b73.json',
'https://media.services.pbs.org/api/v1/episodes/a3fdd793-89b2-4386-83b8-e57a6d027b73/assets/': 'test_fixtures/nova_episodes_a3fdd793-89b2-4386-83b8-e57a6d027b73_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a5f783a7-bb5d-440e-951c-08ccb2cbf9f7/': 'test_fixtures/nova_episodes_a5f783a7-bb5d-440e-951c-08ccb2cbf9f7.json',
'https://media.services.pbs.org/api/v1/episodes/a5f783a7-bb5d-440e-951c-08ccb2cbf9f7/assets/': 'test_fixtures/nova_episodes_a5f783a7-bb5d-440e-951c-08ccb2cbf9f7_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a6cb640a-fb60-4401-86f7-83cea10c3970/': 'test_fixtures/nova_episodes_a6cb640a-fb60-4401-86f7-83cea10c3970.json',
'https://media.services.pbs.org/api/v1/episodes/a6cb640a-fb60-4401-86f7-83cea10c3970/assets/': 'test_fixtures/nova_episodes_a6cb640a-fb60-4401-86f7-83cea10c3970_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a752fbc3-5c7a-4b27-9624-368049ce8226/': 'test_fixtures/nova_episodes_a752fbc3-5c7a-4b27-9624-368049ce8226.json',
'https://media.services.pbs.org/api/v1/episodes/a752fbc3-5c7a-4b27-9624-368049ce8226/assets/': 'test_fixtures/nova_episodes_a752fbc3-5c7a-4b27-9624-368049ce8226_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a8b88a62-875d-4c08-8ed8-e162194b3ae6/': 'test_fixtures/nova_episodes_a8b88a62-875d-4c08-8ed8-e162194b3ae6.json',
'https://media.services.pbs.org/api/v1/episodes/a8b88a62-875d-4c08-8ed8-e162194b3ae6/assets/': 'test_fixtures/nova_episodes_a8b88a62-875d-4c08-8ed8-e162194b3ae6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/a8d69eb5-49b1-4a27-b3da-7901b9c457a9/': 'test_fixtures/nova_episodes_a8d69eb5-49b1-4a27-b3da-7901b9c457a9.json',
'https://media.services.pbs.org/api/v1/episodes/a8d69eb5-49b1-4a27-b3da-7901b9c457a9/assets/': 'test_fixtures/nova_episodes_a8d69eb5-49b1-4a27-b3da-7901b9c457a9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/abe1b292-bf89-4bc8-924d-90176ae201c9/': 'test_fixtures/nova_episodes_abe1b292-bf89-4bc8-924d-90176ae201c9.json',
'https://media.services.pbs.org/api/v1/episodes/abe1b292-bf89-4bc8-924d-90176ae201c9/assets/': 'test_fixtures/nova_episodes_abe1b292-bf89-4bc8-924d-90176ae201c9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ac21bf4b-4930-4c0d-99af-a92fa2730274/': 'test_fixtures/nova_episodes_ac21bf4b-4930-4c0d-99af-a92fa2730274.json',
'https://media.services.pbs.org/api/v1/episodes/ac21bf4b-4930-4c0d-99af-a92fa2730274/assets/': 'test_fixtures/nova_episodes_ac21bf4b-4930-4c0d-99af-a92fa2730274_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ac50483e-bd96-48cd-948c-f2d17fcd63a4/': 'test_fixtures/nova_episodes_ac50483e-bd96-48cd-948c-f2d17fcd63a4.json',
'https://media.services.pbs.org/api/v1/episodes/ac50483e-bd96-48cd-948c-f2d17fcd63a4/assets/': 'test_fixtures/nova_episodes_ac50483e-bd96-48cd-948c-f2d17fcd63a4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ac911185-b4ca-43f8-a963-33270af649cc/': 'test_fixtures/nova_episodes_ac911185-b4ca-43f8-a963-33270af649cc.json',
'https://media.services.pbs.org/api/v1/episodes/ac911185-b4ca-43f8-a963-33270af649cc/assets/': 'test_fixtures/nova_episodes_ac911185-b4ca-43f8-a963-33270af649cc_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ae55b227-48a1-4ae0-94fd-ac8f0c5eaf87/': 'test_fixtures/nova_episodes_ae55b227-48a1-4ae0-94fd-ac8f0c5eaf87.json',
'https://media.services.pbs.org/api/v1/episodes/ae55b227-48a1-4ae0-94fd-ac8f0c5eaf87/assets/': 'test_fixtures/nova_episodes_ae55b227-48a1-4ae0-94fd-ac8f0c5eaf87_assets.json',
'https://media.services.pbs.org/api/v1/episodes/af4ae2a9-e45e-4716-ba33-2e8c84d973f3/': 'test_fixtures/nova_episodes_af4ae2a9-e45e-4716-ba33-2e8c84d973f3.json',
'https://media.services.pbs.org/api/v1/episodes/af4ae2a9-e45e-4716-ba33-2e8c84d973f3/assets/': 'test_fixtures/nova_episodes_af4ae2a9-e45e-4716-ba33-2e8c84d973f3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/af582f0e-3b02-4efa-a725-01cb053dc02c/': 'test_fixtures/nova_episodes_af582f0e-3b02-4efa-a725-01cb053dc02c.json',
'https://media.services.pbs.org/api/v1/episodes/af582f0e-3b02-4efa-a725-01cb053dc02c/assets/': 'test_fixtures/nova_episodes_af582f0e-3b02-4efa-a725-01cb053dc02c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/af9adf9f-67f3-4c99-b57e-4dcb9cdd3481/': 'test_fixtures/nova_episodes_af9adf9f-67f3-4c99-b57e-4dcb9cdd3481.json',
'https://media.services.pbs.org/api/v1/episodes/af9adf9f-67f3-4c99-b57e-4dcb9cdd3481/assets/': 'test_fixtures/nova_episodes_af9adf9f-67f3-4c99-b57e-4dcb9cdd3481_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b05855fa-81db-40b8-a96f-2914145f2ae2/': 'test_fixtures/nova_episodes_b05855fa-81db-40b8-a96f-2914145f2ae2.json',
'https://media.services.pbs.org/api/v1/episodes/b05855fa-81db-40b8-a96f-2914145f2ae2/assets/': 'test_fixtures/nova_episodes_b05855fa-81db-40b8-a96f-2914145f2ae2_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b2489197-cad5-4f4f-9f94-36081db09fd7/': 'test_fixtures/nova_episodes_b2489197-cad5-4f4f-9f94-36081db09fd7.json',
'https://media.services.pbs.org/api/v1/episodes/b2489197-cad5-4f4f-9f94-36081db09fd7/assets/': 'test_fixtures/nova_episodes_b2489197-cad5-4f4f-9f94-36081db09fd7_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b2e95931-3232-4b21-a30e-3df0c1312012/': 'test_fixtures/nova_episodes_b2e95931-3232-4b21-a30e-3df0c1312012.json',
'https://media.services.pbs.org/api/v1/episodes/b2e95931-3232-4b21-a30e-3df0c1312012/assets/': 'test_fixtures/nova_episodes_b2e95931-3232-4b21-a30e-3df0c1312012_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b2f12474-e599-4e56-a523-01aca4750537/': 'test_fixtures/nova_episodes_b2f12474-e599-4e56-a523-01aca4750537.json',
'https://media.services.pbs.org/api/v1/episodes/b2f12474-e599-4e56-a523-01aca4750537/assets/': 'test_fixtures/nova_episodes_b2f12474-e599-4e56-a523-01aca4750537_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b3e79d4f-0979-4453-9fca-1fdf640585f1/': 'test_fixtures/nova_episodes_b3e79d4f-0979-4453-9fca-1fdf640585f1.json',
'https://media.services.pbs.org/api/v1/episodes/b3e79d4f-0979-4453-9fca-1fdf640585f1/assets/': 'test_fixtures/nova_episodes_b3e79d4f-0979-4453-9fca-1fdf640585f1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b401015b-b2ca-44f6-99e5-01b362dce2bd/': 'test_fixtures/nova_episodes_b401015b-b2ca-44f6-99e5-01b362dce2bd.json',
'https://media.services.pbs.org/api/v1/episodes/b401015b-b2ca-44f6-99e5-01b362dce2bd/assets/': 'test_fixtures/nova_episodes_b401015b-b2ca-44f6-99e5-01b362dce2bd_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b55ba9e8-c6ec-43b7-abce-8f1012cebc88/': 'test_fixtures/nova_episodes_b55ba9e8-c6ec-43b7-abce-8f1012cebc88.json',
'https://media.services.pbs.org/api/v1/episodes/b55ba9e8-c6ec-43b7-abce-8f1012cebc88/assets/': 'test_fixtures/nova_episodes_b55ba9e8-c6ec-43b7-abce-8f1012cebc88_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b5e15083-d0f9-49c9-af1f-8a21b1fb6a05/': 'test_fixtures/nova_episodes_b5e15083-d0f9-49c9-af1f-8a21b1fb6a05.json',
'https://media.services.pbs.org/api/v1/episodes/b5e15083-d0f9-49c9-af1f-8a21b1fb6a05/assets/': 'test_fixtures/nova_episodes_b5e15083-d0f9-49c9-af1f-8a21b1fb6a05_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b66ef222-884d-4caa-9ec4-3404dca575df/': 'test_fixtures/nova_episodes_b66ef222-884d-4caa-9ec4-3404dca575df.json',
'https://media.services.pbs.org/api/v1/episodes/b66ef222-884d-4caa-9ec4-3404dca575df/assets/': 'test_fixtures/nova_episodes_b66ef222-884d-4caa-9ec4-3404dca575df_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b7c55355-baba-4763-8a3c-a98fa4363cd0/': 'test_fixtures/nova_episodes_b7c55355-baba-4763-8a3c-a98fa4363cd0.json',
'https://media.services.pbs.org/api/v1/episodes/b7c55355-baba-4763-8a3c-a98fa4363cd0/assets/': 'test_fixtures/nova_episodes_b7c55355-baba-4763-8a3c-a98fa4363cd0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/b91fd0dc-e361-4f74-b74a-4c4751d5df7b/': 'test_fixtures/nova_episodes_b91fd0dc-e361-4f74-b74a-4c4751d5df7b.json',
'https://media.services.pbs.org/api/v1/episodes/b91fd0dc-e361-4f74-b74a-4c4751d5df7b/assets/': 'test_fixtures/nova_episodes_b91fd0dc-e361-4f74-b74a-4c4751d5df7b_assets.json',
'https://media.services.pbs.org/api/v1/episodes/bb269b11-0336-467f-b15d-ff1a6435e903/': 'test_fixtures/nova_episodes_bb269b11-0336-467f-b15d-ff1a6435e903.json',
'https://media.services.pbs.org/api/v1/episodes/bb269b11-0336-467f-b15d-ff1a6435e903/assets/': 'test_fixtures/nova_episodes_bb269b11-0336-467f-b15d-ff1a6435e903_assets.json',
'https://media.services.pbs.org/api/v1/episodes/bf20ef35-fb7d-4290-93f4-1174c348aea4/': 'test_fixtures/nova_episodes_bf20ef35-fb7d-4290-93f4-1174c348aea4.json',
'https://media.services.pbs.org/api/v1/episodes/bf20ef35-fb7d-4290-93f4-1174c348aea4/assets/': 'test_fixtures/nova_episodes_bf20ef35-fb7d-4290-93f4-1174c348aea4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c004a7af-5cea-4b95-902d-1a384f34ed91/': 'test_fixtures/nova_episodes_c004a7af-5cea-4b95-902d-1a384f34ed91.json',
'https://media.services.pbs.org/api/v1/episodes/c004a7af-5cea-4b95-902d-1a384f34ed91/assets/': 'test_fixtures/nova_episodes_c004a7af-5cea-4b95-902d-1a384f34ed91_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c015dcde-bd65-4bbc-b5ca-93cb9693a4af/': 'test_fixtures/nova_episodes_c015dcde-bd65-4bbc-b5ca-93cb9693a4af.json',
'https://media.services.pbs.org/api/v1/episodes/c015dcde-bd65-4bbc-b5ca-93cb9693a4af/assets/': 'test_fixtures/nova_episodes_c015dcde-bd65-4bbc-b5ca-93cb9693a4af_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c148047c-27aa-4e8e-9d50-146ed713c40c/': 'test_fixtures/nova_episodes_c148047c-27aa-4e8e-9d50-146ed713c40c.json',
'https://media.services.pbs.org/api/v1/episodes/c148047c-27aa-4e8e-9d50-146ed713c40c/assets/': 'test_fixtures/nova_episodes_c148047c-27aa-4e8e-9d50-146ed713c40c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c18c2891-4a02-4556-ab0b-ba36b8d228a1/': 'test_fixtures/nova_episodes_c18c2891-4a02-4556-ab0b-ba36b8d228a1.json',
'https://media.services.pbs.org/api/v1/episodes/c18c2891-4a02-4556-ab0b-ba36b8d228a1/assets/': 'test_fixtures/nova_episodes_c18c2891-4a02-4556-ab0b-ba36b8d228a1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c1e0874a-9a7b-44b4-92ce-114111fa8bd4/': 'test_fixtures/nova_episodes_c1e0874a-9a7b-44b4-92ce-114111fa8bd4.json',
'https://media.services.pbs.org/api/v1/episodes/c1e0874a-9a7b-44b4-92ce-114111fa8bd4/assets/': 'test_fixtures/nova_episodes_c1e0874a-9a7b-44b4-92ce-114111fa8bd4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c2bf0508-c22d-4de2-a105-5e86e2742f85/': 'test_fixtures/nova_episodes_c2bf0508-c22d-4de2-a105-5e86e2742f85.json',
'https://media.services.pbs.org/api/v1/episodes/c2bf0508-c22d-4de2-a105-5e86e2742f85/assets/': 'test_fixtures/nova_episodes_c2bf0508-c22d-4de2-a105-5e86e2742f85_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c3e390a8-6e0e-4960-b42a-66f0c46d64b0/': 'test_fixtures/nova_episodes_c3e390a8-6e0e-4960-b42a-66f0c46d64b0.json',
'https://media.services.pbs.org/api/v1/episodes/c3e390a8-6e0e-4960-b42a-66f0c46d64b0/assets/': 'test_fixtures/nova_episodes_c3e390a8-6e0e-4960-b42a-66f0c46d64b0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c3f8eb3c-939d-4735-b2f7-fa251b9ee869/': 'test_fixtures/nova_episodes_c3f8eb3c-939d-4735-b2f7-fa251b9ee869.json',
'https://media.services.pbs.org/api/v1/episodes/c3f8eb3c-939d-4735-b2f7-fa251b9ee869/assets/': 'test_fixtures/nova_episodes_c3f8eb3c-939d-4735-b2f7-fa251b9ee869_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c481af49-7586-4f86-8f81-987dace51c43/': 'test_fixtures/nova_episodes_c481af49-7586-4f86-8f81-987dace51c43.json',
'https://media.services.pbs.org/api/v1/episodes/c481af49-7586-4f86-8f81-987dace51c43/assets/': 'test_fixtures/nova_episodes_c481af49-7586-4f86-8f81-987dace51c43_assets.json',
'https://media.services.pbs.org/api/v1/episodes/c67c1f60-abe0-4d46-829a-f232730a160c/': 'test_fixtures/nova_episodes_c67c1f60-abe0-4d46-829a-f232730a160c.json',
'https://media.services.pbs.org/api/v1/episodes/c67c1f60-abe0-4d46-829a-f232730a160c/assets/': 'test_fixtures/nova_episodes_c67c1f60-abe0-4d46-829a-f232730a160c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ca712bbc-97f7-42a0-b665-ed153f4e78c0/': 'test_fixtures/nova_episodes_ca712bbc-97f7-42a0-b665-ed153f4e78c0.json',
'https://media.services.pbs.org/api/v1/episodes/ca712bbc-97f7-42a0-b665-ed153f4e78c0/assets/': 'test_fixtures/nova_episodes_ca712bbc-97f7-42a0-b665-ed153f4e78c0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cc06941e-b90c-4202-8866-b3db2f00da00/': 'test_fixtures/nova_episodes_cc06941e-b90c-4202-8866-b3db2f00da00.json',
'https://media.services.pbs.org/api/v1/episodes/cc06941e-b90c-4202-8866-b3db2f00da00/assets/': 'test_fixtures/nova_episodes_cc06941e-b90c-4202-8866-b3db2f00da00_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cd145964-7d47-4820-9af3-529a24c705f0/': 'test_fixtures/nova_episodes_cd145964-7d47-4820-9af3-529a24c705f0.json',
'https://media.services.pbs.org/api/v1/episodes/cd145964-7d47-4820-9af3-529a24c705f0/assets/': 'test_fixtures/nova_episodes_cd145964-7d47-4820-9af3-529a24c705f0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cde61b59-3814-4228-8f50-a6fbc3422978/': 'test_fixtures/nova_episodes_cde61b59-3814-4228-8f50-a6fbc3422978.json',
'https://media.services.pbs.org/api/v1/episodes/cde61b59-3814-4228-8f50-a6fbc3422978/assets/': 'test_fixtures/nova_episodes_cde61b59-3814-4228-8f50-a6fbc3422978_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ce481349-876d-4f29-9246-4cb235fbe184/': 'test_fixtures/nova_episodes_ce481349-876d-4f29-9246-4cb235fbe184.json',
'https://media.services.pbs.org/api/v1/episodes/ce481349-876d-4f29-9246-4cb235fbe184/assets/': 'test_fixtures/nova_episodes_ce481349-876d-4f29-9246-4cb235fbe184_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cea8cc54-0a16-4cc1-88e6-cdd65728aab2/': 'test_fixtures/nova_episodes_cea8cc54-0a16-4cc1-88e6-cdd65728aab2.json',
'https://media.services.pbs.org/api/v1/episodes/cea8cc54-0a16-4cc1-88e6-cdd65728aab2/assets/': 'test_fixtures/nova_episodes_cea8cc54-0a16-4cc1-88e6-cdd65728aab2_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cf4de6a0-527a-4077-8ce6-b6dc63c0b51f/': 'test_fixtures/nova_episodes_cf4de6a0-527a-4077-8ce6-b6dc63c0b51f.json',
'https://media.services.pbs.org/api/v1/episodes/cf4de6a0-527a-4077-8ce6-b6dc63c0b51f/assets/': 'test_fixtures/nova_episodes_cf4de6a0-527a-4077-8ce6-b6dc63c0b51f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cfbdf55d-2c19-4378-b323-593efb7fb402/': 'test_fixtures/nova_episodes_cfbdf55d-2c19-4378-b323-593efb7fb402.json',
'https://media.services.pbs.org/api/v1/episodes/cfbdf55d-2c19-4378-b323-593efb7fb402/assets/': 'test_fixtures/nova_episodes_cfbdf55d-2c19-4378-b323-593efb7fb402_assets.json',
'https://media.services.pbs.org/api/v1/episodes/cfe39dce-1de5-436c-aad2-f8ed61f13cba/': 'test_fixtures/nova_episodes_cfe39dce-1de5-436c-aad2-f8ed61f13cba.json',
'https://media.services.pbs.org/api/v1/episodes/cfe39dce-1de5-436c-aad2-f8ed61f13cba/assets/': 'test_fixtures/nova_episodes_cfe39dce-1de5-436c-aad2-f8ed61f13cba_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d1c0b463-9155-4a02-b28a-942ec8a3a736/': 'test_fixtures/nova_episodes_d1c0b463-9155-4a02-b28a-942ec8a3a736.json',
'https://media.services.pbs.org/api/v1/episodes/d1c0b463-9155-4a02-b28a-942ec8a3a736/assets/': 'test_fixtures/nova_episodes_d1c0b463-9155-4a02-b28a-942ec8a3a736_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d2a46c97-d996-45f1-b7de-4a28e0fe1500/': 'test_fixtures/nova_episodes_d2a46c97-d996-45f1-b7de-4a28e0fe1500.json',
'https://media.services.pbs.org/api/v1/episodes/d2a46c97-d996-45f1-b7de-4a28e0fe1500/assets/': 'test_fixtures/nova_episodes_d2a46c97-d996-45f1-b7de-4a28e0fe1500_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d3573540-904e-4c8e-b33e-53762bcb28c0/': 'test_fixtures/nova_episodes_d3573540-904e-4c8e-b33e-53762bcb28c0.json',
'https://media.services.pbs.org/api/v1/episodes/d3573540-904e-4c8e-b33e-53762bcb28c0/assets/': 'test_fixtures/nova_episodes_d3573540-904e-4c8e-b33e-53762bcb28c0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d4d901ab-b595-41be-9176-094dbd8b97be/': 'test_fixtures/nova_episodes_d4d901ab-b595-41be-9176-094dbd8b97be.json',
'https://media.services.pbs.org/api/v1/episodes/d4d901ab-b595-41be-9176-094dbd8b97be/assets/': 'test_fixtures/nova_episodes_d4d901ab-b595-41be-9176-094dbd8b97be_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d525995c-7f03-4fbe-89c1-418be210e69d/': 'test_fixtures/nova_episodes_d525995c-7f03-4fbe-89c1-418be210e69d.json',
'https://media.services.pbs.org/api/v1/episodes/d525995c-7f03-4fbe-89c1-418be210e69d/assets/': 'test_fixtures/nova_episodes_d525995c-7f03-4fbe-89c1-418be210e69d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d5ae925b-3a15-44d3-aea8-09b8995d2b5c/': 'test_fixtures/nova_episodes_d5ae925b-3a15-44d3-aea8-09b8995d2b5c.json',
'https://media.services.pbs.org/api/v1/episodes/d5ae925b-3a15-44d3-aea8-09b8995d2b5c/assets/': 'test_fixtures/nova_episodes_d5ae925b-3a15-44d3-aea8-09b8995d2b5c_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d5c35385-af83-4ee8-afff-2435b0c56157/': 'test_fixtures/nova_episodes_d5c35385-af83-4ee8-afff-2435b0c56157.json',
'https://media.services.pbs.org/api/v1/episodes/d5c35385-af83-4ee8-afff-2435b0c56157/assets/': 'test_fixtures/nova_episodes_d5c35385-af83-4ee8-afff-2435b0c56157_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d62ea9b8-f379-483b-afd2-e018a1b57b10/': 'test_fixtures/nova_episodes_d62ea9b8-f379-483b-afd2-e018a1b57b10.json',
'https://media.services.pbs.org/api/v1/episodes/d62ea9b8-f379-483b-afd2-e018a1b57b10/assets/': 'test_fixtures/nova_episodes_d62ea9b8-f379-483b-afd2-e018a1b57b10_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d64da4a8-5258-4e44-bb08-1bb1d1e104f7/': 'test_fixtures/nova_episodes_d64da4a8-5258-4e44-bb08-1bb1d1e104f7.json',
'https://media.services.pbs.org/api/v1/episodes/d64da4a8-5258-4e44-bb08-1bb1d1e104f7/assets/': 'test_fixtures/nova_episodes_d64da4a8-5258-4e44-bb08-1bb1d1e104f7_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d681d5a9-6017-4500-aad4-5923ab1777ab/': 'test_fixtures/nova_episodes_d681d5a9-6017-4500-aad4-5923ab1777ab.json',
'https://media.services.pbs.org/api/v1/episodes/d681d5a9-6017-4500-aad4-5923ab1777ab/assets/': 'test_fixtures/nova_episodes_d681d5a9-6017-4500-aad4-5923ab1777ab_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d70a8cf0-d98c-4a73-93f0-b718ad34162a/': 'test_fixtures/nova_episodes_d70a8cf0-d98c-4a73-93f0-b718ad34162a.json',
'https://media.services.pbs.org/api/v1/episodes/d70a8cf0-d98c-4a73-93f0-b718ad34162a/assets/': 'test_fixtures/nova_episodes_d70a8cf0-d98c-4a73-93f0-b718ad34162a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/d721e043-a51a-4c46-99fa-a847b8509594/': 'test_fixtures/nova_episodes_d721e043-a51a-4c46-99fa-a847b8509594.json',
'https://media.services.pbs.org/api/v1/episodes/d721e043-a51a-4c46-99fa-a847b8509594/assets/': 'test_fixtures/nova_episodes_d721e043-a51a-4c46-99fa-a847b8509594_assets.json',
'https://media.services.pbs.org/api/v1/episodes/dc3e7597-d8d2-4c67-8ef8-09c52ef83153/': 'test_fixtures/nova_episodes_dc3e7597-d8d2-4c67-8ef8-09c52ef83153.json',
'https://media.services.pbs.org/api/v1/episodes/dc3e7597-d8d2-4c67-8ef8-09c52ef83153/assets/': 'test_fixtures/nova_episodes_dc3e7597-d8d2-4c67-8ef8-09c52ef83153_assets.json',
'https://media.services.pbs.org/api/v1/episodes/dc531fe8-fafb-4c81-8e52-234ec81fa998/': 'test_fixtures/nova_episodes_dc531fe8-fafb-4c81-8e52-234ec81fa998.json',
'https://media.services.pbs.org/api/v1/episodes/dc531fe8-fafb-4c81-8e52-234ec81fa998/assets/': 'test_fixtures/nova_episodes_dc531fe8-fafb-4c81-8e52-234ec81fa998_assets.json',
'https://media.services.pbs.org/api/v1/episodes/dca9275a-7521-482a-89c3-f27fb15dca3a/': 'test_fixtures/nova_episodes_dca9275a-7521-482a-89c3-f27fb15dca3a.json',
'https://media.services.pbs.org/api/v1/episodes/dca9275a-7521-482a-89c3-f27fb15dca3a/assets/': 'test_fixtures/nova_episodes_dca9275a-7521-482a-89c3-f27fb15dca3a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/dd2a8da6-b020-4d74-be0d-92fe80af6dfc/': 'test_fixtures/nova_episodes_dd2a8da6-b020-4d74-be0d-92fe80af6dfc.json',
'https://media.services.pbs.org/api/v1/episodes/dd2a8da6-b020-4d74-be0d-92fe80af6dfc/assets/': 'test_fixtures/nova_episodes_dd2a8da6-b020-4d74-be0d-92fe80af6dfc_assets.json',
'https://media.services.pbs.org/api/v1/episodes/de1b1bbf-1284-4861-96f4-9bf023ccb9e3/': 'test_fixtures/nova_episodes_de1b1bbf-1284-4861-96f4-9bf023ccb9e3.json',
'https://media.services.pbs.org/api/v1/episodes/de1b1bbf-1284-4861-96f4-9bf023ccb9e3/assets/': 'test_fixtures/nova_episodes_de1b1bbf-1284-4861-96f4-9bf023ccb9e3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/de2ff1e5-95a5-40b5-a8bb-8d822cce9688/': 'test_fixtures/nova_episodes_de2ff1e5-95a5-40b5-a8bb-8d822cce9688.json',
'https://media.services.pbs.org/api/v1/episodes/de2ff1e5-95a5-40b5-a8bb-8d822cce9688/assets/': 'test_fixtures/nova_episodes_de2ff1e5-95a5-40b5-a8bb-8d822cce9688_assets.json',
'https://media.services.pbs.org/api/v1/episodes/deac4540-f099-4a98-ba28-4a3734f165f4/': 'test_fixtures/nova_episodes_deac4540-f099-4a98-ba28-4a3734f165f4.json',
'https://media.services.pbs.org/api/v1/episodes/deac4540-f099-4a98-ba28-4a3734f165f4/assets/': 'test_fixtures/nova_episodes_deac4540-f099-4a98-ba28-4a3734f165f4_assets.json',
'https://media.services.pbs.org/api/v1/episodes/df26f52d-dc04-408f-b6c6-65216a6c380e/': 'test_fixtures/nova_episodes_df26f52d-dc04-408f-b6c6-65216a6c380e.json',
'https://media.services.pbs.org/api/v1/episodes/df26f52d-dc04-408f-b6c6-65216a6c380e/assets/': 'test_fixtures/nova_episodes_df26f52d-dc04-408f-b6c6-65216a6c380e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/dfe4840a-1530-415b-84ae-d203622fc98e/': 'test_fixtures/nova_episodes_dfe4840a-1530-415b-84ae-d203622fc98e.json',
'https://media.services.pbs.org/api/v1/episodes/dfe4840a-1530-415b-84ae-d203622fc98e/assets/': 'test_fixtures/nova_episodes_dfe4840a-1530-415b-84ae-d203622fc98e_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e0e279aa-50ec-4887-b6a7-ca68bdf610e6/': 'test_fixtures/nova_episodes_e0e279aa-50ec-4887-b6a7-ca68bdf610e6.json',
'https://media.services.pbs.org/api/v1/episodes/e0e279aa-50ec-4887-b6a7-ca68bdf610e6/assets/': 'test_fixtures/nova_episodes_e0e279aa-50ec-4887-b6a7-ca68bdf610e6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e0fd0c88-1c9e-4456-bfa0-1f416aa248e6/': 'test_fixtures/nova_episodes_e0fd0c88-1c9e-4456-bfa0-1f416aa248e6.json',
'https://media.services.pbs.org/api/v1/episodes/e0fd0c88-1c9e-4456-bfa0-1f416aa248e6/assets/': 'test_fixtures/nova_episodes_e0fd0c88-1c9e-4456-bfa0-1f416aa248e6_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e3a7bd8a-4b11-4b8e-a6fa-1d5cff1d820a/': 'test_fixtures/nova_episodes_e3a7bd8a-4b11-4b8e-a6fa-1d5cff1d820a.json',
'https://media.services.pbs.org/api/v1/episodes/e3a7bd8a-4b11-4b8e-a6fa-1d5cff1d820a/assets/': 'test_fixtures/nova_episodes_e3a7bd8a-4b11-4b8e-a6fa-1d5cff1d820a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e4d77392-c74f-4c78-9f60-a19ea1224846/': 'test_fixtures/nova_episodes_e4d77392-c74f-4c78-9f60-a19ea1224846.json',
'https://media.services.pbs.org/api/v1/episodes/e4d77392-c74f-4c78-9f60-a19ea1224846/assets/': 'test_fixtures/nova_episodes_e4d77392-c74f-4c78-9f60-a19ea1224846_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e4e4831a-be39-416e-9625-c0a3815e0021/': 'test_fixtures/nova_episodes_e4e4831a-be39-416e-9625-c0a3815e0021.json',
'https://media.services.pbs.org/api/v1/episodes/e4e4831a-be39-416e-9625-c0a3815e0021/assets/': 'test_fixtures/nova_episodes_e4e4831a-be39-416e-9625-c0a3815e0021_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e5413d5d-8a93-4da3-9773-82e256ce3778/': 'test_fixtures/nova_episodes_e5413d5d-8a93-4da3-9773-82e256ce3778.json',
'https://media.services.pbs.org/api/v1/episodes/e5413d5d-8a93-4da3-9773-82e256ce3778/assets/': 'test_fixtures/nova_episodes_e5413d5d-8a93-4da3-9773-82e256ce3778_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e5e03277-6798-4ba7-a137-e5b4dbe3a223/': 'test_fixtures/nova_episodes_e5e03277-6798-4ba7-a137-e5b4dbe3a223.json',
'https://media.services.pbs.org/api/v1/episodes/e5e03277-6798-4ba7-a137-e5b4dbe3a223/assets/': 'test_fixtures/nova_episodes_e5e03277-6798-4ba7-a137-e5b4dbe3a223_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e866d6a3-a997-4cde-946f-722cab7bed35/': 'test_fixtures/nova_episodes_e866d6a3-a997-4cde-946f-722cab7bed35.json',
'https://media.services.pbs.org/api/v1/episodes/e866d6a3-a997-4cde-946f-722cab7bed35/assets/': 'test_fixtures/nova_episodes_e866d6a3-a997-4cde-946f-722cab7bed35_assets.json',
'https://media.services.pbs.org/api/v1/episodes/e88e8082-6ef5-4af6-88a1-750696f14073/': 'test_fixtures/nova_episodes_e88e8082-6ef5-4af6-88a1-750696f14073.json',
'https://media.services.pbs.org/api/v1/episodes/e88e8082-6ef5-4af6-88a1-750696f14073/assets/': 'test_fixtures/nova_episodes_e88e8082-6ef5-4af6-88a1-750696f14073_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ea983362-e04d-4137-b53f-cf1b4048df38/': 'test_fixtures/nova_episodes_ea983362-e04d-4137-b53f-cf1b4048df38.json',
'https://media.services.pbs.org/api/v1/episodes/ea983362-e04d-4137-b53f-cf1b4048df38/assets/': 'test_fixtures/nova_episodes_ea983362-e04d-4137-b53f-cf1b4048df38_assets.json',
'https://media.services.pbs.org/api/v1/episodes/eaedcac0-4f4c-4541-8400-89b7f1de7392/': 'test_fixtures/nova_episodes_eaedcac0-4f4c-4541-8400-89b7f1de7392.json',
'https://media.services.pbs.org/api/v1/episodes/eaedcac0-4f4c-4541-8400-89b7f1de7392/assets/': 'test_fixtures/nova_episodes_eaedcac0-4f4c-4541-8400-89b7f1de7392_assets.json',
'https://media.services.pbs.org/api/v1/episodes/eb0ca0ae-d4ab-4d88-a1fc-231166fe1b54/': 'test_fixtures/nova_episodes_eb0ca0ae-d4ab-4d88-a1fc-231166fe1b54.json',
'https://media.services.pbs.org/api/v1/episodes/eb0ca0ae-d4ab-4d88-a1fc-231166fe1b54/assets/': 'test_fixtures/nova_episodes_eb0ca0ae-d4ab-4d88-a1fc-231166fe1b54_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ed2a38b9-5d54-4fcf-b4f1-277cbc7876e1/': 'test_fixtures/nova_episodes_ed2a38b9-5d54-4fcf-b4f1-277cbc7876e1.json',
'https://media.services.pbs.org/api/v1/episodes/ed2a38b9-5d54-4fcf-b4f1-277cbc7876e1/assets/': 'test_fixtures/nova_episodes_ed2a38b9-5d54-4fcf-b4f1-277cbc7876e1_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f12e28dd-6056-4a31-ab2a-7ce867bb2ad3/': 'test_fixtures/nova_episodes_f12e28dd-6056-4a31-ab2a-7ce867bb2ad3.json',
'https://media.services.pbs.org/api/v1/episodes/f12e28dd-6056-4a31-ab2a-7ce867bb2ad3/assets/': 'test_fixtures/nova_episodes_f12e28dd-6056-4a31-ab2a-7ce867bb2ad3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f2e9990b-b645-4395-8eae-a01746dcb6a0/': 'test_fixtures/nova_episodes_f2e9990b-b645-4395-8eae-a01746dcb6a0.json',
'https://media.services.pbs.org/api/v1/episodes/f2e9990b-b645-4395-8eae-a01746dcb6a0/assets/': 'test_fixtures/nova_episodes_f2e9990b-b645-4395-8eae-a01746dcb6a0_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f3aac3cf-0567-4508-bc11-148a1aa79d33/': 'test_fixtures/nova_episodes_f3aac3cf-0567-4508-bc11-148a1aa79d33.json',
'https://media.services.pbs.org/api/v1/episodes/f3aac3cf-0567-4508-bc11-148a1aa79d33/assets/': 'test_fixtures/nova_episodes_f3aac3cf-0567-4508-bc11-148a1aa79d33_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f3c87756-40a7-497a-b732-010dc1a5c577/': 'test_fixtures/nova_episodes_f3c87756-40a7-497a-b732-010dc1a5c577.json',
'https://media.services.pbs.org/api/v1/episodes/f3c87756-40a7-497a-b732-010dc1a5c577/assets/': 'test_fixtures/nova_episodes_f3c87756-40a7-497a-b732-010dc1a5c577_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f4e2b125-a497-486c-a015-3211485f8ef9/': 'test_fixtures/nova_episodes_f4e2b125-a497-486c-a015-3211485f8ef9.json',
'https://media.services.pbs.org/api/v1/episodes/f4e2b125-a497-486c-a015-3211485f8ef9/assets/': 'test_fixtures/nova_episodes_f4e2b125-a497-486c-a015-3211485f8ef9_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f6a4560c-f64f-4dc9-a97c-67e7dbddad36/': 'test_fixtures/nova_episodes_f6a4560c-f64f-4dc9-a97c-67e7dbddad36.json',
'https://media.services.pbs.org/api/v1/episodes/f6a4560c-f64f-4dc9-a97c-67e7dbddad36/assets/': 'test_fixtures/nova_episodes_f6a4560c-f64f-4dc9-a97c-67e7dbddad36_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f83dd05c-5ba1-414c-9c50-a1a652ffdfd2/': 'test_fixtures/nova_episodes_f83dd05c-5ba1-414c-9c50-a1a652ffdfd2.json',
'https://media.services.pbs.org/api/v1/episodes/f83dd05c-5ba1-414c-9c50-a1a652ffdfd2/assets/': 'test_fixtures/nova_episodes_f83dd05c-5ba1-414c-9c50-a1a652ffdfd2_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f985868c-c3d6-4b39-b8c6-5c93ff8de4d3/': 'test_fixtures/nova_episodes_f985868c-c3d6-4b39-b8c6-5c93ff8de4d3.json',
'https://media.services.pbs.org/api/v1/episodes/f985868c-c3d6-4b39-b8c6-5c93ff8de4d3/assets/': 'test_fixtures/nova_episodes_f985868c-c3d6-4b39-b8c6-5c93ff8de4d3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f9b0ce7a-37ee-4d44-bb2b-92cc11ff90f3/': 'test_fixtures/nova_episodes_f9b0ce7a-37ee-4d44-bb2b-92cc11ff90f3.json',
'https://media.services.pbs.org/api/v1/episodes/f9b0ce7a-37ee-4d44-bb2b-92cc11ff90f3/assets/': 'test_fixtures/nova_episodes_f9b0ce7a-37ee-4d44-bb2b-92cc11ff90f3_assets.json',
'https://media.services.pbs.org/api/v1/episodes/f9d363e6-8941-45c6-9079-0d3fbe5b44f8/': 'test_fixtures/nova_episodes_f9d363e6-8941-45c6-9079-0d3fbe5b44f8.json',
'https://media.services.pbs.org/api/v1/episodes/f9d363e6-8941-45c6-9079-0d3fbe5b44f8/assets/': 'test_fixtures/nova_episodes_f9d363e6-8941-45c6-9079-0d3fbe5b44f8_assets.json',
'https://media.services.pbs.org/api/v1/episodes/fa344068-eea6-47e4-a6df-d39618b8ce6d/': 'test_fixtures/nova_episodes_fa344068-eea6-47e4-a6df-d39618b8ce6d.json',
'https://media.services.pbs.org/api/v1/episodes/fa344068-eea6-47e4-a6df-d39618b8ce6d/assets/': 'test_fixtures/nova_episodes_fa344068-eea6-47e4-a6df-d39618b8ce6d_assets.json',
'https://media.services.pbs.org/api/v1/episodes/fa9c42db-fdb4-40a9-abc0-fb27a47d923f/': 'test_fixtures/nova_episodes_fa9c42db-fdb4-40a9-abc0-fb27a47d923f.json',
'https://media.services.pbs.org/api/v1/episodes/fa9c42db-fdb4-40a9-abc0-fb27a47d923f/assets/': 'test_fixtures/nova_episodes_fa9c42db-fdb4-40a9-abc0-fb27a47d923f_assets.json',
'https://media.services.pbs.org/api/v1/episodes/fb4e73e2-ccd5-45e5-96d8-6e0ceff1f38a/': 'test_fixtures/nova_episodes_fb4e73e2-ccd5-45e5-96d8-6e0ceff1f38a.json',
'https://media.services.pbs.org/api/v1/episodes/fb4e73e2-ccd5-45e5-96d8-6e0ceff1f38a/assets/': 'test_fixtures/nova_episodes_fb4e73e2-ccd5-45e5-96d8-6e0ceff1f38a_assets.json',
'https://media.services.pbs.org/api/v1/episodes/ffdc13cc-d9f9-4584-8e26-4f6f7eb93c53/': 'test_fixtures/nova_episodes_ffdc13cc-d9f9-4584-8e26-4f6f7eb93c53.json',
'https://media.services.pbs.org/api/v1/episodes/ffdc13cc-d9f9-4584-8e26-4f6f7eb93c53/assets/': 'test_fixtures/nova_episodes_ffdc13cc-d9f9-4584-8e26-4f6f7eb93c53_assets.json',
'https://media.services.pbs.org/api/v1/seasons/08cd0667-88ae-4c3d-b726-c0833301f55b/': 'test_fixtures/nova_seasons_08cd0667-88ae-4c3d-b726-c0833301f55b.json',
'https://media.services.pbs.org/api/v1/seasons/08cd0667-88ae-4c3d-b726-c0833301f55b/assets/': 'test_fixtures/nova_seasons_08cd0667-88ae-4c3d-b726-c0833301f55b_assets.json',
'https://media.services.pbs.org/api/v1/seasons/08cd0667-88ae-4c3d-b726-c0833301f55b/episodes/': 'test_fixtures/nova_seasons_08cd0667-88ae-4c3d-b726-c0833301f55b_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/128ac8f0-d074-47d2-b48f-c26a64149acb/': 'test_fixtures/nova_seasons_128ac8f0-d074-47d2-b48f-c26a64149acb.json',
'https://media.services.pbs.org/api/v1/seasons/128ac8f0-d074-47d2-b48f-c26a64149acb/assets/': 'test_fixtures/nova_seasons_128ac8f0-d074-47d2-b48f-c26a64149acb_assets.json',
'https://media.services.pbs.org/api/v1/seasons/128ac8f0-d074-47d2-b48f-c26a64149acb/episodes/': 'test_fixtures/nova_seasons_128ac8f0-d074-47d2-b48f-c26a64149acb_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/15ce94d8-0937-4778-aa12-88cccdfd8ba6/': 'test_fixtures/nova_seasons_15ce94d8-0937-4778-aa12-88cccdfd8ba6.json',
'https://media.services.pbs.org/api/v1/seasons/15ce94d8-0937-4778-aa12-88cccdfd8ba6/assets/': 'test_fixtures/nova_seasons_15ce94d8-0937-4778-aa12-88cccdfd8ba6_assets.json',
'https://media.services.pbs.org/api/v1/seasons/15ce94d8-0937-4778-aa12-88cccdfd8ba6/episodes/': 'test_fixtures/nova_seasons_15ce94d8-0937-4778-aa12-88cccdfd8ba6_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/18c3d01f-d067-4889-97cd-fed79a8ef702/': 'test_fixtures/nova_seasons_18c3d01f-d067-4889-97cd-fed79a8ef702.json',
'https://media.services.pbs.org/api/v1/seasons/18c3d01f-d067-4889-97cd-fed79a8ef702/assets/': 'test_fixtures/nova_seasons_18c3d01f-d067-4889-97cd-fed79a8ef702_assets.json',
'https://media.services.pbs.org/api/v1/seasons/18c3d01f-d067-4889-97cd-fed79a8ef702/episodes/': 'test_fixtures/nova_seasons_18c3d01f-d067-4889-97cd-fed79a8ef702_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05/': 'test_fixtures/nova_seasons_3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05.json',
'https://media.services.pbs.org/api/v1/seasons/3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05/assets/': 'test_fixtures/nova_seasons_3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05_assets.json',
'https://media.services.pbs.org/api/v1/seasons/3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05/episodes/': 'test_fixtures/nova_seasons_3f2fd2dc-e10d-4c9d-8a3b-9c84a0251b05_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/64e5aaf9-7230-4040-8976-1add3f82af73/': 'test_fixtures/nova_seasons_64e5aaf9-7230-4040-8976-1add3f82af73.json',
'https://media.services.pbs.org/api/v1/seasons/64e5aaf9-7230-4040-8976-1add3f82af73/assets/': 'test_fixtures/nova_seasons_64e5aaf9-7230-4040-8976-1add3f82af73_assets.json',
'https://media.services.pbs.org/api/v1/seasons/64e5aaf9-7230-4040-8976-1add3f82af73/episodes/': 'test_fixtures/nova_seasons_64e5aaf9-7230-4040-8976-1add3f82af73_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/67fa8085-a86c-494a-9394-5eb09070c8c9/': 'test_fixtures/nova_seasons_67fa8085-a86c-494a-9394-5eb09070c8c9.json',
'https://media.services.pbs.org/api/v1/seasons/67fa8085-a86c-494a-9394-5eb09070c8c9/assets/': 'test_fixtures/nova_seasons_67fa8085-a86c-494a-9394-5eb09070c8c9_assets.json',
'https://media.services.pbs.org/api/v1/seasons/67fa8085-a86c-494a-9394-5eb09070c8c9/episodes/': 'test_fixtures/nova_seasons_67fa8085-a86c-494a-9394-5eb09070c8c9_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/705b6718-8291-4a80-a291-bb5f972d7c0d/': 'test_fixtures/nova_seasons_705b6718-8291-4a80-a291-bb5f972d7c0d.json',
'https://media.services.pbs.org/api/v1/seasons/705b6718-8291-4a80-a291-bb5f972d7c0d/assets/': 'test_fixtures/nova_seasons_705b6718-8291-4a80-a291-bb5f972d7c0d_assets.json',
'https://media.services.pbs.org/api/v1/seasons/705b6718-8291-4a80-a291-bb5f972d7c0d/episodes/': 'test_fixtures/nova_seasons_705b6718-8291-4a80-a291-bb5f972d7c0d_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/745da89e-a2f6-4b0b-9027-219ff64408e9/': 'test_fixtures/nova_seasons_745da89e-a2f6-4b0b-9027-219ff64408e9.json',
'https://media.services.pbs.org/api/v1/seasons/745da89e-a2f6-4b0b-9027-219ff64408e9/assets/': 'test_fixtures/nova_seasons_745da89e-a2f6-4b0b-9027-219ff64408e9_assets.json',
'https://media.services.pbs.org/api/v1/seasons/745da89e-a2f6-4b0b-9027-219ff64408e9/episodes/': 'test_fixtures/nova_seasons_745da89e-a2f6-4b0b-9027-219ff64408e9_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef/': 'test_fixtures/nova_seasons_7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef.json',
'https://media.services.pbs.org/api/v1/seasons/7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef/assets/': 'test_fixtures/nova_seasons_7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef_assets.json',
'https://media.services.pbs.org/api/v1/seasons/7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef/assets/?page=2': 'test_fixtures/nova_seasons_7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef_assets_page_2.json',
'https://media.services.pbs.org/api/v1/seasons/7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef/episodes/': 'test_fixtures/nova_seasons_7bf1be39-29a9-4d1e-8fbd-d0b284bd54ef_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/7f613b59-588b-4ec5-bcb1-a3d595b2579c/': 'test_fixtures/nova_seasons_7f613b59-588b-4ec5-bcb1-a3d595b2579c.json',
'https://media.services.pbs.org/api/v1/seasons/7f613b59-588b-4ec5-bcb1-a3d595b2579c/assets/': 'test_fixtures/nova_seasons_7f613b59-588b-4ec5-bcb1-a3d595b2579c_assets.json',
'https://media.services.pbs.org/api/v1/seasons/7f613b59-588b-4ec5-bcb1-a3d595b2579c/assets/?page=2': 'test_fixtures/nova_seasons_7f613b59-588b-4ec5-bcb1-a3d595b2579c_assets_page_2.json',
'https://media.services.pbs.org/api/v1/seasons/7f613b59-588b-4ec5-bcb1-a3d595b2579c/episodes/': 'test_fixtures/nova_seasons_7f613b59-588b-4ec5-bcb1-a3d595b2579c_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/8c818a60-c341-4ce1-aee7-b3541ff9c43e/': 'test_fixtures/nova_seasons_8c818a60-c341-4ce1-aee7-b3541ff9c43e.json',
'https://media.services.pbs.org/api/v1/seasons/8c818a60-c341-4ce1-aee7-b3541ff9c43e/assets/': 'test_fixtures/nova_seasons_8c818a60-c341-4ce1-aee7-b3541ff9c43e_assets.json',
'https://media.services.pbs.org/api/v1/seasons/8c818a60-c341-4ce1-aee7-b3541ff9c43e/episodes/': 'test_fixtures/nova_seasons_8c818a60-c341-4ce1-aee7-b3541ff9c43e_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/95801358-78b6-4d2a-9b36-e274e5716e4e/': 'test_fixtures/nova_seasons_95801358-78b6-4d2a-9b36-e274e5716e4e.json',
'https://media.services.pbs.org/api/v1/seasons/95801358-78b6-4d2a-9b36-e274e5716e4e/assets/': 'test_fixtures/nova_seasons_95801358-78b6-4d2a-9b36-e274e5716e4e_assets.json',
'https://media.services.pbs.org/api/v1/seasons/95801358-78b6-4d2a-9b36-e274e5716e4e/episodes/': 'test_fixtures/nova_seasons_95801358-78b6-4d2a-9b36-e274e5716e4e_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/9c84c743-99d3-46e4-a9b5-86e62bf58e44/': 'test_fixtures/nova_seasons_9c84c743-99d3-46e4-a9b5-86e62bf58e44.json',
'https://media.services.pbs.org/api/v1/seasons/9c84c743-99d3-46e4-a9b5-86e62bf58e44/assets/': 'test_fixtures/nova_seasons_9c84c743-99d3-46e4-a9b5-86e62bf58e44_assets.json',
'https://media.services.pbs.org/api/v1/seasons/9c84c743-99d3-46e4-a9b5-86e62bf58e44/episodes/': 'test_fixtures/nova_seasons_9c84c743-99d3-46e4-a9b5-86e62bf58e44_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/ae794533-81a1-472b-8331-6fd4076f5f7e/': 'test_fixtures/nova_seasons_ae794533-81a1-472b-8331-6fd4076f5f7e.json',
'https://media.services.pbs.org/api/v1/seasons/ae794533-81a1-472b-8331-6fd4076f5f7e/assets/': 'test_fixtures/nova_seasons_ae794533-81a1-472b-8331-6fd4076f5f7e_assets.json',
'https://media.services.pbs.org/api/v1/seasons/ae794533-81a1-472b-8331-6fd4076f5f7e/episodes/': 'test_fixtures/nova_seasons_ae794533-81a1-472b-8331-6fd4076f5f7e_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a/': 'test_fixtures/nova_seasons_c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a.json',
'https://media.services.pbs.org/api/v1/seasons/c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a/assets/': 'test_fixtures/nova_seasons_c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a_assets.json',
'https://media.services.pbs.org/api/v1/seasons/c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a/episodes/': 'test_fixtures/nova_seasons_c5cf1f7c-c636-47cb-bec4-ca6dee8bf43a_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/d62aef4f-7703-41c0-beca-61a520e79313/': 'test_fixtures/nova_seasons_d62aef4f-7703-41c0-beca-61a520e79313.json',
'https://media.services.pbs.org/api/v1/seasons/d62aef4f-7703-41c0-beca-61a520e79313/assets/': 'test_fixtures/nova_seasons_d62aef4f-7703-41c0-beca-61a520e79313_assets.json',
'https://media.services.pbs.org/api/v1/seasons/d62aef4f-7703-41c0-beca-61a520e79313/episodes/': 'test_fixtures/nova_seasons_d62aef4f-7703-41c0-beca-61a520e79313_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/ec14b79c-4bf6-409d-af3c-14783de69cdc/': 'test_fixtures/nova_seasons_ec14b79c-4bf6-409d-af3c-14783de69cdc.json',
'https://media.services.pbs.org/api/v1/seasons/ec14b79c-4bf6-409d-af3c-14783de69cdc/assets/': 'test_fixtures/nova_seasons_ec14b79c-4bf6-409d-af3c-14783de69cdc_assets.json',
'https://media.services.pbs.org/api/v1/seasons/ec14b79c-4bf6-409d-af3c-14783de69cdc/episodes/': 'test_fixtures/nova_seasons_ec14b79c-4bf6-409d-af3c-14783de69cdc_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/ee7a9a7f-daf5-48dc-b4a9-c84f79896102/': 'test_fixtures/nova_seasons_ee7a9a7f-daf5-48dc-b4a9-c84f79896102.json',
'https://media.services.pbs.org/api/v1/seasons/ee7a9a7f-daf5-48dc-b4a9-c84f79896102/assets/': 'test_fixtures/nova_seasons_ee7a9a7f-daf5-48dc-b4a9-c84f79896102_assets.json',
'https://media.services.pbs.org/api/v1/seasons/ee7a9a7f-daf5-48dc-b4a9-c84f79896102/episodes/': 'test_fixtures/nova_seasons_ee7a9a7f-daf5-48dc-b4a9-c84f79896102_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/f65834c3-7236-450b-9d4e-0f7a354bbca6/': 'test_fixtures/nova_seasons_f65834c3-7236-450b-9d4e-0f7a354bbca6.json',
'https://media.services.pbs.org/api/v1/seasons/f65834c3-7236-450b-9d4e-0f7a354bbca6/assets/': 'test_fixtures/nova_seasons_f65834c3-7236-450b-9d4e-0f7a354bbca6_assets.json',
'https://media.services.pbs.org/api/v1/seasons/f65834c3-7236-450b-9d4e-0f7a354bbca6/episodes/': 'test_fixtures/nova_seasons_f65834c3-7236-450b-9d4e-0f7a354bbca6_episodes.json',
'https://media.services.pbs.org/api/v1/seasons/fe413398-5934-4d22-836b-45ee81162a83/': 'test_fixtures/nova_seasons_fe413398-5934-4d22-836b-45ee81162a83.json',
'https://media.services.pbs.org/api/v1/seasons/fe413398-5934-4d22-836b-45ee81162a83/assets/': 'test_fixtures/nova_seasons_fe413398-5934-4d22-836b-45ee81162a83_assets.json',
'https://media.services.pbs.org/api/v1/seasons/fe413398-5934-4d22-836b-45ee81162a83/episodes/': 'test_fixtures/nova_seasons_fe413398-5934-4d22-836b-45ee81162a83_episodes.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/assets/': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_assets.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/assets/?page=2': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_assets_page_2.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/assets/?page=3': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_assets_page_3.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/seasons/': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_seasons.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=2': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_2.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=3': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_3.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=4': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_4.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=5': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_5.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=6': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_6.json',
'https://media.services.pbs.org/api/v1/shows/adfb2f9d-f61e-4613-ac58-ab3bde582afb/specials/?page=7': 'test_fixtures/nova_shows_adfb2f9d-f61e-4613-ac58-ab3bde582afb_specials_page_7.json',
'https://media.services.pbs.org/api/v1/shows/nova/': 'test_fixtures/nova_shows_nova.json',
'https://media.services.pbs.org/api/v1/specials/0154d740-9f2a-467e-9e0c-b47340b24b2b/assets/': 'test_fixtures/nova_specials_0154d740-9f2a-467e-9e0c-b47340b24b2b_assets.json',
'https://media.services.pbs.org/api/v1/specials/02191b55-7ba2-459a-b28b-789d1e3958ee/assets/': 'test_fixtures/nova_specials_02191b55-7ba2-459a-b28b-789d1e3958ee_assets.json',
'https://media.services.pbs.org/api/v1/specials/05013326-5c13-4369-bf7c-d54b95e1ae10/assets/': 'test_fixtures/nova_specials_05013326-5c13-4369-bf7c-d54b95e1ae10_assets.json',
'https://media.services.pbs.org/api/v1/specials/05218116-66ef-444e-a02b-d4a9e70c8a7d/assets/': 'test_fixtures/nova_specials_05218116-66ef-444e-a02b-d4a9e70c8a7d_assets.json',
'https://media.services.pbs.org/api/v1/specials/06102148-d2e0-4fff-8229-92311cdb764c/assets/': 'test_fixtures/nova_specials_06102148-d2e0-4fff-8229-92311cdb764c_assets.json',
'https://media.services.pbs.org/api/v1/specials/062c8bbc-9678-4bca-bed4-baba879ed250/assets/': 'test_fixtures/nova_specials_062c8bbc-9678-4bca-bed4-baba879ed250_assets.json',
'https://media.services.pbs.org/api/v1/specials/0791185c-e109-4749-822f-b4b08893daa1/assets/': 'test_fixtures/nova_specials_0791185c-e109-4749-822f-b4b08893daa1_assets.json',
'https://media.services.pbs.org/api/v1/specials/07b60a5c-b918-462a-a52a-77b55e4fa502/assets/': 'test_fixtures/nova_specials_07b60a5c-b918-462a-a52a-77b55e4fa502_assets.json',
'https://media.services.pbs.org/api/v1/specials/07c0a48c-31f5-4958-a07e-38b792495024/assets/': 'test_fixtures/nova_specials_07c0a48c-31f5-4958-a07e-38b792495024_assets.json',
'https://media.services.pbs.org/api/v1/specials/09a11b2d-d8ba-4e04-96c7-a1e5e13184a2/assets/': 'test_fixtures/nova_specials_09a11b2d-d8ba-4e04-96c7-a1e5e13184a2_assets.json',
'https://media.services.pbs.org/api/v1/specials/0a94a0de-c290-45c8-ad9e-cf2e30d9a84d/assets/': 'test_fixtures/nova_specials_0a94a0de-c290-45c8-ad9e-cf2e30d9a84d_assets.json',
'https://media.services.pbs.org/api/v1/specials/0b0d37b3-54f6-430e-b26a-bdd22556e536/assets/': 'test_fixtures/nova_specials_0b0d37b3-54f6-430e-b26a-bdd22556e536_assets.json',
'https://media.services.pbs.org/api/v1/specials/0c26b465-97e1-4291-8108-e9a0c7ca75d1/assets/': 'test_fixtures/nova_specials_0c26b465-97e1-4291-8108-e9a0c7ca75d1_assets.json',
'https://media.services.pbs.org/api/v1/specials/0d823a5d-3877-4cea-9640-a1265ae50811/assets/': 'test_fixtures/nova_specials_0d823a5d-3877-4cea-9640-a1265ae50811_assets.json',
'https://media.services.pbs.org/api/v1/specials/0d9aee2d-5d94-4420-bebb-f21cdaeef497/assets/': 'test_fixtures/nova_specials_0d9aee2d-5d94-4420-bebb-f21cdaeef497_assets.json',
'https://media.services.pbs.org/api/v1/specials/12deb199-291b-4fd3-bd1d-9628e0dff55c/assets/': 'test_fixtures/nova_specials_12deb199-291b-4fd3-bd1d-9628e0dff55c_assets.json',
'https://media.services.pbs.org/api/v1/specials/131e940c-7e92-48a4-978a-e3980dfbe9f5/assets/': 'test_fixtures/nova_specials_131e940c-7e92-48a4-978a-e3980dfbe9f5_assets.json',
'https://media.services.pbs.org/api/v1/specials/13d9ced2-c44e-48f5-9cd5-8a4081ecc8cf/assets/': 'test_fixtures/nova_specials_13d9ced2-c44e-48f5-9cd5-8a4081ecc8cf_assets.json',
'https://media.services.pbs.org/api/v1/specials/15b5a9c7-2ade-4822-8fe9-af7440456faa/assets/': 'test_fixtures/nova_specials_15b5a9c7-2ade-4822-8fe9-af7440456faa_assets.json',
'https://media.services.pbs.org/api/v1/specials/17b9c9b9-583c-4142-897f-a85f330b2b6d/assets/': 'test_fixtures/nova_specials_17b9c9b9-583c-4142-897f-a85f330b2b6d_assets.json',
'https://media.services.pbs.org/api/v1/specials/18194f4b-d240-4901-aa13-fd134c1ecc4d/assets/': 'test_fixtures/nova_specials_18194f4b-d240-4901-aa13-fd134c1ecc4d_assets.json',
'https://media.services.pbs.org/api/v1/specials/1ce6de9b-c2ca-4d45-8989-0ee4a8e1ca56/assets/': 'test_fixtures/nova_specials_1ce6de9b-c2ca-4d45-8989-0ee4a8e1ca56_assets.json',
'https://media.services.pbs.org/api/v1/specials/1d4aaef4-999e-44a7-8d5a-20c2acbe2ac7/assets/': 'test_fixtures/nova_specials_1d4aaef4-999e-44a7-8d5a-20c2acbe2ac7_assets.json',
'https://media.services.pbs.org/api/v1/specials/1f839811-9782-4040-9803-e3d3370a0466/assets/': 'test_fixtures/nova_specials_1f839811-9782-4040-9803-e3d3370a0466_assets.json',
'https://media.services.pbs.org/api/v1/specials/22adb0cd-5e0d-42ab-a0f4-c7cdd5a3796e/assets/': 'test_fixtures/nova_specials_22adb0cd-5e0d-42ab-a0f4-c7cdd5a3796e_assets.json',
'https://media.services.pbs.org/api/v1/specials/23ca6c24-d359-4cd7-8c68-1912570b98d3/assets/': 'test_fixtures/nova_specials_23ca6c24-d359-4cd7-8c68-1912570b98d3_assets.json',
'https://media.services.pbs.org/api/v1/specials/24286248-5ded-4fbd-893d-d3ce8ea3c539/assets/': 'test_fixtures/nova_specials_24286248-5ded-4fbd-893d-d3ce8ea3c539_assets.json',
'https://media.services.pbs.org/api/v1/specials/243b2ab7-967b-4255-87a7-e0e9c8a994f8/assets/': 'test_fixtures/nova_specials_243b2ab7-967b-4255-87a7-e0e9c8a994f8_assets.json',
'https://media.services.pbs.org/api/v1/specials/24ca245b-56ae-42d5-941d-beb10e1cdcb7/assets/': 'test_fixtures/nova_specials_24ca245b-56ae-42d5-941d-beb10e1cdcb7_assets.json',
'https://media.services.pbs.org/api/v1/specials/251e17eb-85b9-4c35-b7df-149ca33c6167/assets/': 'test_fixtures/nova_specials_251e17eb-85b9-4c35-b7df-149ca33c6167_assets.json',
'https://media.services.pbs.org/api/v1/specials/276ca2ed-4032-4942-90a4-cea1ab0d7435/assets/': 'test_fixtures/nova_specials_276ca2ed-4032-4942-90a4-cea1ab0d7435_assets.json',
'https://media.services.pbs.org/api/v1/specials/27739df6-1511-47c1-b62f-8fbb106c6a2f/assets/': 'test_fixtures/nova_specials_27739df6-1511-47c1-b62f-8fbb106c6a2f_assets.json',
'https://media.services.pbs.org/api/v1/specials/2de0eda2-e871-4a2c-8681-c8683b685ec7/assets/': 'test_fixtures/nova_specials_2de0eda2-e871-4a2c-8681-c8683b685ec7_assets.json',
'https://media.services.pbs.org/api/v1/specials/2eb690f2-ebc4-41f6-9558-6962d8e43c48/assets/': 'test_fixtures/nova_specials_2eb690f2-ebc4-41f6-9558-6962d8e43c48_assets.json',
'https://media.services.pbs.org/api/v1/specials/2f74a17c-9495-4db6-b254-6b8fb5111cd3/assets/': 'test_fixtures/nova_specials_2f74a17c-9495-4db6-b254-6b8fb5111cd3_assets.json',
'https://media.services.pbs.org/api/v1/specials/34e05c7c-a17c-44c0-bbff-2deb99bf25c1/assets/': 'test_fixtures/nova_specials_34e05c7c-a17c-44c0-bbff-2deb99bf25c1_assets.json',
'https://media.services.pbs.org/api/v1/specials/35b592f3-6c16-464a-b79f-70f95f9b6d11/assets/': 'test_fixtures/nova_specials_35b592f3-6c16-464a-b79f-70f95f9b6d11_assets.json',
'https://media.services.pbs.org/api/v1/specials/367b772c-7d0d-4a93-84c3-a86805978cb5/assets/': 'test_fixtures/nova_specials_367b772c-7d0d-4a93-84c3-a86805978cb5_assets.json',
'https://media.services.pbs.org/api/v1/specials/4174207d-e27e-47c6-b69d-00e2f6621551/assets/': 'test_fixtures/nova_specials_4174207d-e27e-47c6-b69d-00e2f6621551_assets.json',
'https://media.services.pbs.org/api/v1/specials/431e0413-458a-489e-a237-388cbe4ba818/assets/': 'test_fixtures/nova_specials_431e0413-458a-489e-a237-388cbe4ba818_assets.json',
'https://media.services.pbs.org/api/v1/specials/446fb0ee-473f-4f4f-a0e7-fd989f8771fe/assets/': 'test_fixtures/nova_specials_446fb0ee-473f-4f4f-a0e7-fd989f8771fe_assets.json',
'https://media.services.pbs.org/api/v1/specials/4586f099-1a0b-4185-9739-57747a0d8a26/assets/': 'test_fixtures/nova_specials_4586f099-1a0b-4185-9739-57747a0d8a26_assets.json',
'https://media.services.pbs.org/api/v1/specials/46630e9c-f3b5-49cc-9abb-e12d4ee4f58c/assets/': 'test_fixtures/nova_specials_46630e9c-f3b5-49cc-9abb-e12d4ee4f58c_assets.json',
'https://media.services.pbs.org/api/v1/specials/47fa69eb-9bbd-457b-b7de-172f04081d72/assets/': 'test_fixtures/nova_specials_47fa69eb-9bbd-457b-b7de-172f04081d72_assets.json',
'https://media.services.pbs.org/api/v1/specials/49385b03-fe56-4e66-86bb-20254797114b/assets/': 'test_fixtures/nova_specials_49385b03-fe56-4e66-86bb-20254797114b_assets.json',
'https://media.services.pbs.org/api/v1/specials/4eb15462-2fb3-48d4-9575-461dcb5935d1/assets/': 'test_fixtures/nova_specials_4eb15462-2fb3-48d4-9575-461dcb5935d1_assets.json',
'https://media.services.pbs.org/api/v1/specials/4f4ab67f-6973-4f63-b656-209e246101d5/assets/': 'test_fixtures/nova_specials_4f4ab67f-6973-4f63-b656-209e246101d5_assets.json',
'https://media.services.pbs.org/api/v1/specials/529f2920-e3ed-40e7-b4b4-9e02d9ec273a/assets/': 'test_fixtures/nova_specials_529f2920-e3ed-40e7-b4b4-9e02d9ec273a_assets.json',
'https://media.services.pbs.org/api/v1/specials/5392561b-9e90-4110-ada0-e0b449ce82f4/assets/': 'test_fixtures/nova_specials_5392561b-9e90-4110-ada0-e0b449ce82f4_assets.json',
'https://media.services.pbs.org/api/v1/specials/54377db9-8db8-41db-9b63-c737aad3f37c/assets/': 'test_fixtures/nova_specials_54377db9-8db8-41db-9b63-c737aad3f37c_assets.json',
'https://media.services.pbs.org/api/v1/specials/54befe20-2f21-446f-a853-31fc8085ca70/assets/': 'test_fixtures/nova_specials_54befe20-2f21-446f-a853-31fc8085ca70_assets.json',
'https://media.services.pbs.org/api/v1/specials/55ca77ee-2e41-439a-b978-4a5074da7828/assets/': 'test_fixtures/nova_specials_55ca77ee-2e41-439a-b978-4a5074da7828_assets.json',
'https://media.services.pbs.org/api/v1/specials/55d9e3cd-9ecf-4f56-ac14-17ddf5ba9c4e/assets/': 'test_fixtures/nova_specials_55d9e3cd-9ecf-4f56-ac14-17ddf5ba9c4e_assets.json',
'https://media.services.pbs.org/api/v1/specials/57a463e8-aad5-4a42-9e08-61a20b2a879c/assets/': 'test_fixtures/nova_specials_57a463e8-aad5-4a42-9e08-61a20b2a879c_assets.json',
'https://media.services.pbs.org/api/v1/specials/58b44e96-e11c-4403-8dc7-bfe04025a1e0/assets/': 'test_fixtures/nova_specials_58b44e96-e11c-4403-8dc7-bfe04025a1e0_assets.json',
'https://media.services.pbs.org/api/v1/specials/5b2899f1-5e30-4a22-a2bf-b00008dd3b13/assets/': 'test_fixtures/nova_specials_5b2899f1-5e30-4a22-a2bf-b00008dd3b13_assets.json',
'https://media.services.pbs.org/api/v1/specials/5eaa804a-c3fa-4d7f-914f-d9b8d9038f8e/assets/': 'test_fixtures/nova_specials_5eaa804a-c3fa-4d7f-914f-d9b8d9038f8e_assets.json',
'https://media.services.pbs.org/api/v1/specials/5ec06047-0739-42fb-a740-72a84645f479/assets/': 'test_fixtures/nova_specials_5ec06047-0739-42fb-a740-72a84645f479_assets.json',
'https://media.services.pbs.org/api/v1/specials/5fe0309c-5e67-450e-b8e3-146993c36db5/assets/': 'test_fixtures/nova_specials_5fe0309c-5e67-450e-b8e3-146993c36db5_assets.json',
'https://media.services.pbs.org/api/v1/specials/6221fa71-7205-4cb0-b9f3-fe3f4f06b629/assets/': 'test_fixtures/nova_specials_6221fa71-7205-4cb0-b9f3-fe3f4f06b629_assets.json',
'https://media.services.pbs.org/api/v1/specials/6361c7f9-f601-4ab1-bdd7-1c5f4d2096df/assets/': 'test_fixtures/nova_specials_6361c7f9-f601-4ab1-bdd7-1c5f4d2096df_assets.json',
'https://media.services.pbs.org/api/v1/specials/65885aaf-af5d-46d1-bc7d-0a24901aa69a/assets/': 'test_fixtures/nova_specials_65885aaf-af5d-46d1-bc7d-0a24901aa69a_assets.json',
'https://media.services.pbs.org/api/v1/specials/65b7f798-91f4-4615-950e-44d19e50e2a9/assets/': 'test_fixtures/nova_specials_65b7f798-91f4-4615-950e-44d19e50e2a9_assets.json',
'https://media.services.pbs.org/api/v1/specials/663b20da-2bd6-4d6f-8637-c16ee5300533/assets/': 'test_fixtures/nova_specials_663b20da-2bd6-4d6f-8637-c16ee5300533_assets.json',
'https://media.services.pbs.org/api/v1/specials/6779b9eb-8ea6-49c2-a7d0-46079718f0c9/assets/': 'test_fixtures/nova_specials_6779b9eb-8ea6-49c2-a7d0-46079718f0c9_assets.json',
'https://media.services.pbs.org/api/v1/specials/67b00fb1-d8b2-45c1-bda8-2d769d2e89b7/assets/': 'test_fixtures/nova_specials_67b00fb1-d8b2-45c1-bda8-2d769d2e89b7_assets.json',
'https://media.services.pbs.org/api/v1/specials/6b5b0e93-8984-42c6-bd74-ea0c6dec88fe/assets/': 'test_fixtures/nova_specials_6b5b0e93-8984-42c6-bd74-ea0c6dec88fe_assets.json',
'https://media.services.pbs.org/api/v1/specials/6cb8eed7-c0de-42f5-a558-f8a2b7697fe9/assets/': 'test_fixtures/nova_specials_6cb8eed7-c0de-42f5-a558-f8a2b7697fe9_assets.json',
'https://media.services.pbs.org/api/v1/specials/6ed9f9b2-b30f-4fd8-9680-af97840bfd03/assets/': 'test_fixtures/nova_specials_6ed9f9b2-b30f-4fd8-9680-af97840bfd03_assets.json',
'https://media.services.pbs.org/api/v1/specials/70ceb5bb-6130-4351-b262-ee60e4c9c8df/assets/': 'test_fixtures/nova_specials_70ceb5bb-6130-4351-b262-ee60e4c9c8df_assets.json',
'https://media.services.pbs.org/api/v1/specials/710e37f2-bff8-44a6-9a7c-ed178be6b182/assets/': 'test_fixtures/nova_specials_710e37f2-bff8-44a6-9a7c-ed178be6b182_assets.json',
'https://media.services.pbs.org/api/v1/specials/729c184b-aee9-4aac-ba0f-95b28fe0c58f/assets/': 'test_fixtures/nova_specials_729c184b-aee9-4aac-ba0f-95b28fe0c58f_assets.json',
'https://media.services.pbs.org/api/v1/specials/734ffa78-9cb9-4e4d-b97a-878f7e0d1ad1/assets/': 'test_fixtures/nova_specials_734ffa78-9cb9-4e4d-b97a-878f7e0d1ad1_assets.json',
'https://media.services.pbs.org/api/v1/specials/76e6ff43-52bf-4ddb-b5e5-1b7663715a5b/assets/': 'test_fixtures/nova_specials_76e6ff43-52bf-4ddb-b5e5-1b7663715a5b_assets.json',
'https://media.services.pbs.org/api/v1/specials/778747d9-b537-45db-bb1b-0a1122380a99/assets/': 'test_fixtures/nova_specials_778747d9-b537-45db-bb1b-0a1122380a99_assets.json',
'https://media.services.pbs.org/api/v1/specials/78f3796a-b23d-44c5-bd79-b6dca930ce9e/assets/': 'test_fixtures/nova_specials_78f3796a-b23d-44c5-bd79-b6dca930ce9e_assets.json',
'https://media.services.pbs.org/api/v1/specials/7968656b-dc5d-4645-9897-90afa3017d5c/assets/': 'test_fixtures/nova_specials_7968656b-dc5d-4645-9897-90afa3017d5c_assets.json',
'https://media.services.pbs.org/api/v1/specials/79ad0ebf-bbb9-4b43-a1f5-35f3bf6f6996/assets/': 'test_fixtures/nova_specials_79ad0ebf-bbb9-4b43-a1f5-35f3bf6f6996_assets.json',
'https://media.services.pbs.org/api/v1/specials/7da5ad94-60e8-4cd6-af56-ee56b7e4681e/assets/': 'test_fixtures/nova_specials_7da5ad94-60e8-4cd6-af56-ee56b7e4681e_assets.json',
'https://media.services.pbs.org/api/v1/specials/812c6e9c-7bc8-42d0-8ab4-a18d52e3726a/assets/': 'test_fixtures/nova_specials_812c6e9c-7bc8-42d0-8ab4-a18d52e3726a_assets.json',
'https://media.services.pbs.org/api/v1/specials/8562124f-7ece-43c9-81a6-a387634ccfce/assets/': 'test_fixtures/nova_specials_8562124f-7ece-43c9-81a6-a387634ccfce_assets.json',
'https://media.services.pbs.org/api/v1/specials/882c1063-6e1f-4621-a589-55c16515293d/assets/': 'test_fixtures/nova_specials_882c1063-6e1f-4621-a589-55c16515293d_assets.json',
'https://media.services.pbs.org/api/v1/specials/8c3c20ed-6e47-4abf-b327-ebfed61873ee/assets/': 'test_fixtures/nova_specials_8c3c20ed-6e47-4abf-b327-ebfed61873ee_assets.json',
'https://media.services.pbs.org/api/v1/specials/8c70f25e-e1dc-4519-ba1a-e11d3daabba5/assets/': 'test_fixtures/nova_specials_8c70f25e-e1dc-4519-ba1a-e11d3daabba5_assets.json',
'https://media.services.pbs.org/api/v1/specials/8d38e870-fd6a-496e-bdcb-eb05f3a0053e/assets/': 'test_fixtures/nova_specials_8d38e870-fd6a-496e-bdcb-eb05f3a0053e_assets.json',
'https://media.services.pbs.org/api/v1/specials/92b89c9f-92fb-47eb-b6d8-d19d098d4757/assets/': 'test_fixtures/nova_specials_92b89c9f-92fb-47eb-b6d8-d19d098d4757_assets.json',
'https://media.services.pbs.org/api/v1/specials/961aba67-8b4d-422d-9627-e20d6af68890/assets/': 'test_fixtures/nova_specials_961aba67-8b4d-422d-9627-e20d6af68890_assets.json',
'https://media.services.pbs.org/api/v1/specials/97b7c569-9990-4ffd-b94e-a9fa647fb99f/assets/': 'test_fixtures/nova_specials_97b7c569-9990-4ffd-b94e-a9fa647fb99f_assets.json',
'https://media.services.pbs.org/api/v1/specials/9a8b3261-37a2-4cac-9e68-818ea349ef3d/assets/': 'test_fixtures/nova_specials_9a8b3261-37a2-4cac-9e68-818ea349ef3d_assets.json',
'https://media.services.pbs.org/api/v1/specials/9b3efb9e-e217-460d-aab2-ddef01e11699/assets/': 'test_fixtures/nova_specials_9b3efb9e-e217-460d-aab2-ddef01e11699_assets.json',
'https://media.services.pbs.org/api/v1/specials/9c40366f-052f-4b25-a1e4-080d10d10d68/assets/': 'test_fixtures/nova_specials_9c40366f-052f-4b25-a1e4-080d10d10d68_assets.json',
'https://media.services.pbs.org/api/v1/specials/a327f369-66b4-4d02-b95f-53dea35dc137/assets/': 'test_fixtures/nova_specials_a327f369-66b4-4d02-b95f-53dea35dc137_assets.json',
'https://media.services.pbs.org/api/v1/specials/a3410528-7f72-47e8-b28d-6861693b9309/assets/': 'test_fixtures/nova_specials_a3410528-7f72-47e8-b28d-6861693b9309_assets.json',
'https://media.services.pbs.org/api/v1/specials/a7ca8d44-fe1b-4e69-8e33-3d96a82e1742/assets/': 'test_fixtures/nova_specials_a7ca8d44-fe1b-4e69-8e33-3d96a82e1742_assets.json',
'https://media.services.pbs.org/api/v1/specials/a9aca7c4-4fd8-45d6-846e-d96242cba1e1/assets/': 'test_fixtures/nova_specials_a9aca7c4-4fd8-45d6-846e-d96242cba1e1_assets.json',
'https://media.services.pbs.org/api/v1/specials/ac3cfbb8-339d-4397-b7d1-3545a2ff58f0/assets/': 'test_fixtures/nova_specials_ac3cfbb8-339d-4397-b7d1-3545a2ff58f0_assets.json',
'https://media.services.pbs.org/api/v1/specials/ad1a3c8f-83b9-4433-a021-3605ae5faad9/assets/': 'test_fixtures/nova_specials_ad1a3c8f-83b9-4433-a021-3605ae5faad9_assets.json',
'https://media.services.pbs.org/api/v1/specials/ad1cc220-46f3-4fe2-86b6-96d9946c377a/assets/': 'test_fixtures/nova_specials_ad1cc220-46f3-4fe2-86b6-96d9946c377a_assets.json',
'https://media.services.pbs.org/api/v1/specials/ae3a79eb-b3d5-419f-a963-fc5eb82123b6/assets/': 'test_fixtures/nova_specials_ae3a79eb-b3d5-419f-a963-fc5eb82123b6_assets.json',
'https://media.services.pbs.org/api/v1/specials/af628aae-822c-4645-b604-8a1d79b05467/assets/': 'test_fixtures/nova_specials_af628aae-822c-4645-b604-8a1d79b05467_assets.json',
'https://media.services.pbs.org/api/v1/specials/af71253c-c17b-4bc7-acfc-1a125301945d/assets/': 'test_fixtures/nova_specials_af71253c-c17b-4bc7-acfc-1a125301945d_assets.json',
'https://media.services.pbs.org/api/v1/specials/b2b05c41-14a9-40a5-9379-3a37b52f0fc9/assets/': 'test_fixtures/nova_specials_b2b05c41-14a9-40a5-9379-3a37b52f0fc9_assets.json',
'https://media.services.pbs.org/api/v1/specials/b31c959d-d4ec-4ee9-a534-24236251ea46/assets/': 'test_fixtures/nova_specials_b31c959d-d4ec-4ee9-a534-24236251ea46_assets.json',
'https://media.services.pbs.org/api/v1/specials/b560095b-e63d-4cf0-a37f-6639af0b0999/assets/': 'test_fixtures/nova_specials_b560095b-e63d-4cf0-a37f-6639af0b0999_assets.json',
'https://media.services.pbs.org/api/v1/specials/b828eb4b-1ce6-4c4d-9198-41dfcb4e00d8/assets/': 'test_fixtures/nova_specials_b828eb4b-1ce6-4c4d-9198-41dfcb4e00d8_assets.json',
'https://media.services.pbs.org/api/v1/specials/ba41fd3b-e184-441b-bfa4-b839867f6479/assets/': 'test_fixtures/nova_specials_ba41fd3b-e184-441b-bfa4-b839867f6479_assets.json',
'https://media.services.pbs.org/api/v1/specials/bd784b6e-18db-4027-995a-cb566463af38/assets/': 'test_fixtures/nova_specials_bd784b6e-18db-4027-995a-cb566463af38_assets.json',
'https://media.services.pbs.org/api/v1/specials/c025d3ce-0a2e-482f-9aa9-c5b00e8152cf/assets/': 'test_fixtures/nova_specials_c025d3ce-0a2e-482f-9aa9-c5b00e8152cf_assets.json',
'https://media.services.pbs.org/api/v1/specials/c29b1dfb-6337-469f-9b58-45fbca73a234/assets/': 'test_fixtures/nova_specials_c29b1dfb-6337-469f-9b58-45fbca73a234_assets.json',
'https://media.services.pbs.org/api/v1/specials/c3ec85bd-4993-4df9-a5cc-e42f76d0a973/assets/': 'test_fixtures/nova_specials_c3ec85bd-4993-4df9-a5cc-e42f76d0a973_assets.json',
'https://media.services.pbs.org/api/v1/specials/c4e5df49-fd41-4865-a0ad-4bcf29bebec4/assets/': 'test_fixtures/nova_specials_c4e5df49-fd41-4865-a0ad-4bcf29bebec4_assets.json',
'https://media.services.pbs.org/api/v1/specials/c7bee059-bd23-4696-9e71-1440ef44713b/assets/': 'test_fixtures/nova_specials_c7bee059-bd23-4696-9e71-1440ef44713b_assets.json',
'https://media.services.pbs.org/api/v1/specials/cc4767ea-1281-40ca-9433-ff3bcc920118/assets/': 'test_fixtures/nova_specials_cc4767ea-1281-40ca-9433-ff3bcc920118_assets.json',
'https://media.services.pbs.org/api/v1/specials/cce6415b-7652-4698-98f6-578c000e18ab/assets/': 'test_fixtures/nova_specials_cce6415b-7652-4698-98f6-578c000e18ab_assets.json',
'https://media.services.pbs.org/api/v1/specials/ce5b492a-0bb3-4b8d-a7b1-5cad87159594/assets/': 'test_fixtures/nova_specials_ce5b492a-0bb3-4b8d-a7b1-5cad87159594_assets.json',
'https://media.services.pbs.org/api/v1/specials/cff70bcf-f9bb-4d8b-8c34-27c8255111ac/assets/': 'test_fixtures/nova_specials_cff70bcf-f9bb-4d8b-8c34-27c8255111ac_assets.json',
'https://media.services.pbs.org/api/v1/specials/d102b8e4-644a-4ee4-b541-bf8c588eb834/assets/': 'test_fixtures/nova_specials_d102b8e4-644a-4ee4-b541-bf8c588eb834_assets.json',
'https://media.services.pbs.org/api/v1/specials/d1188b16-c7e6-4873-a33b-634dad6108f1/assets/': 'test_fixtures/nova_specials_d1188b16-c7e6-4873-a33b-634dad6108f1_assets.json',
'https://media.services.pbs.org/api/v1/specials/d24ee258-1f72-43c1-a0a1-a999c164c336/assets/': 'test_fixtures/nova_specials_d24ee258-1f72-43c1-a0a1-a999c164c336_assets.json',
'https://media.services.pbs.org/api/v1/specials/d330996e-b4a9-44b0-ae97-eadc4ed6894d/assets/': 'test_fixtures/nova_specials_d330996e-b4a9-44b0-ae97-eadc4ed6894d_assets.json',
'https://media.services.pbs.org/api/v1/specials/d50d20ae-d285-47f0-84ce-991f35b7eb1d/assets/': 'test_fixtures/nova_specials_d50d20ae-d285-47f0-84ce-991f35b7eb1d_assets.json',
'https://media.services.pbs.org/api/v1/specials/d51996a7-af60-4f5e-900d-9a6aa78cf833/assets/': 'test_fixtures/nova_specials_d51996a7-af60-4f5e-900d-9a6aa78cf833_assets.json',
'https://media.services.pbs.org/api/v1/specials/d5e7cd6f-c410-4c00-9ca1-89c6bad5959b/assets/': 'test_fixtures/nova_specials_d5e7cd6f-c410-4c00-9ca1-89c6bad5959b_assets.json',
'https://media.services.pbs.org/api/v1/specials/d6ce215e-2e67-4e7d-ab62-b7a70da24feb/assets/': 'test_fixtures/nova_specials_d6ce215e-2e67-4e7d-ab62-b7a70da24feb_assets.json',
'https://media.services.pbs.org/api/v1/specials/d7c5a990-6ade-438d-b1da-297f5479800d/assets/': 'test_fixtures/nova_specials_d7c5a990-6ade-438d-b1da-297f5479800d_assets.json',
'https://media.services.pbs.org/api/v1/specials/d7de0b32-94fe-4b50-a438-03c08b612770/assets/': 'test_fixtures/nova_specials_d7de0b32-94fe-4b50-a438-03c08b612770_assets.json',
'https://media.services.pbs.org/api/v1/specials/d8191e16-92b2-4547-afb9-b9d2bf28b92a/assets/': 'test_fixtures/nova_specials_d8191e16-92b2-4547-afb9-b9d2bf28b92a_assets.json',
'https://media.services.pbs.org/api/v1/specials/d847057f-c3cf-46aa-9915-0714b3de367c/assets/': 'test_fixtures/nova_specials_d847057f-c3cf-46aa-9915-0714b3de367c_assets.json',
'https://media.services.pbs.org/api/v1/specials/d84dc74d-a83f-40be-8aa4-112462d72b2f/assets/': 'test_fixtures/nova_specials_d84dc74d-a83f-40be-8aa4-112462d72b2f_assets.json',
'https://media.services.pbs.org/api/v1/specials/d9b4c2fe-e90f-4773-bf9e-fd7859e7e8a6/assets/': 'test_fixtures/nova_specials_d9b4c2fe-e90f-4773-bf9e-fd7859e7e8a6_assets.json',
'https://media.services.pbs.org/api/v1/specials/d9db949c-4612-4294-905e-3588d13945b0/assets/': 'test_fixtures/nova_specials_d9db949c-4612-4294-905e-3588d13945b0_assets.json',
'https://media.services.pbs.org/api/v1/specials/db3894ce-7866-4366-9ae9-9a624d828e86/assets/': 'test_fixtures/nova_specials_db3894ce-7866-4366-9ae9-9a624d828e86_assets.json',
'https://media.services.pbs.org/api/v1/specials/db903aac-3b93-4c22-a8e2-def836def294/assets/': 'test_fixtures/nova_specials_db903aac-3b93-4c22-a8e2-def836def294_assets.json',
'https://media.services.pbs.org/api/v1/specials/ddfd618c-c439-496a-a54a-ddef95897ad4/assets/': 'test_fixtures/nova_specials_ddfd618c-c439-496a-a54a-ddef95897ad4_assets.json',
'https://media.services.pbs.org/api/v1/specials/de44ebfc-33e0-4e84-85ff-fa3ef42801dc/assets/': 'test_fixtures/nova_specials_de44ebfc-33e0-4e84-85ff-fa3ef42801dc_assets.json',
'https://media.services.pbs.org/api/v1/specials/dfbb1e57-9565-4961-aac8-892672acde2e/assets/': 'test_fixtures/nova_specials_dfbb1e57-9565-4961-aac8-892672acde2e_assets.json',
'https://media.services.pbs.org/api/v1/specials/e0962275-5d67-445b-8a82-f3ab67166b04/assets/': 'test_fixtures/nova_specials_e0962275-5d67-445b-8a82-f3ab67166b04_assets.json',
'https://media.services.pbs.org/api/v1/specials/e36e28e3-f002-41c7-843e-bb320a805b44/assets/': 'test_fixtures/nova_specials_e36e28e3-f002-41c7-843e-bb320a805b44_assets.json',
'https://media.services.pbs.org/api/v1/specials/e7e3648b-0d4e-4f38-8b35-d850b7be1e94/assets/': 'test_fixtures/nova_specials_e7e3648b-0d4e-4f38-8b35-d850b7be1e94_assets.json',
'https://media.services.pbs.org/api/v1/specials/e90efe5e-c37f-406f-9df1-966b496a1871/assets/': 'test_fixtures/nova_specials_e90efe5e-c37f-406f-9df1-966b496a1871_assets.json',
'https://media.services.pbs.org/api/v1/specials/eaea5cfb-959d-42c2-b76c-84c1d4c8524f/assets/': 'test_fixtures/nova_specials_eaea5cfb-959d-42c2-b76c-84c1d4c8524f_assets.json',
'https://media.services.pbs.org/api/v1/specials/ebe4ffd5-8a6e-4767-b2bd-99ec474c9c0f/assets/': 'test_fixtures/nova_specials_ebe4ffd5-8a6e-4767-b2bd-99ec474c9c0f_assets.json',
'https://media.services.pbs.org/api/v1/specials/ecc8ad99-99db-4467-8517-4358ce8864a7/assets/': 'test_fixtures/nova_specials_ecc8ad99-99db-4467-8517-4358ce8864a7_assets.json',
'https://media.services.pbs.org/api/v1/specials/ece036e1-b0b9-4304-9d11-9540e7fc7308/assets/': 'test_fixtures/nova_specials_ece036e1-b0b9-4304-9d11-9540e7fc7308_assets.json',
'https://media.services.pbs.org/api/v1/specials/ed8a6763-f4d9-4804-a2d5-7b58a87da5a3/assets/': 'test_fixtures/nova_specials_ed8a6763-f4d9-4804-a2d5-7b58a87da5a3_assets.json',
'https://media.services.pbs.org/api/v1/specials/ef69d35d-897a-4714-b07a-1e59907a4f60/assets/': 'test_fixtures/nova_specials_ef69d35d-897a-4714-b07a-1e59907a4f60_assets.json',
'https://media.services.pbs.org/api/v1/specials/f0434359-3fd4-45ac-afbc-88a455866238/assets/': 'test_fixtures/nova_specials_f0434359-3fd4-45ac-afbc-88a455866238_assets.json',
'https://media.services.pbs.org/api/v1/specials/f177a875-b98e-4fa7-990d-9bb38ec3cd7f/assets/': 'test_fixtures/nova_specials_f177a875-b98e-4fa7-990d-9bb38ec3cd7f_assets.json',
'https://media.services.pbs.org/api/v1/specials/f2e734dd-c9d3-47a4-8c6d-7cdf1acf9608/assets/': 'test_fixtures/nova_specials_f2e734dd-c9d3-47a4-8c6d-7cdf1acf9608_assets.json',
'https://media.services.pbs.org/api/v1/specials/f3b3b215-3564-4bf8-bce2-1e4766475605/assets/': 'test_fixtures/nova_specials_f3b3b215-3564-4bf8-bce2-1e4766475605_assets.json',
'https://media.services.pbs.org/api/v1/specials/f59936d1-0d82-476e-869b-f71addd36031/assets/': 'test_fixtures/nova_specials_f59936d1-0d82-476e-869b-f71addd36031_assets.json',
'https://media.services.pbs.org/api/v1/specials/f788e1f2-ab31-4518-8fe2-ce47b4b4bc79/assets/': 'test_fixtures/nova_specials_f788e1f2-ab31-4518-8fe2-ce47b4b4bc79_assets.json',
'https://media.services.pbs.org/api/v1/specials/f8f6b6a1-a86f-4a4a-8300-e4e1cc70d589/assets/': 'test_fixtures/nova_specials_f8f6b6a1-a86f-4a4a-8300-e4e1cc70d589_assets.json',
'https://media.services.pbs.org/api/v1/specials/f9966769-080d-4614-b8dc-68b590d76508/assets/': 'test_fixtures/nova_specials_f9966769-080d-4614-b8dc-68b590d76508_assets.json',
'https://media.services.pbs.org/api/v1/specials/fad2d21b-57d9-4c41-ab78-4882f0299ec7/assets/': 'test_fixtures/nova_specials_fad2d21b-57d9-4c41-ab78-4882f0299ec7_assets.json',
'https://media.services.pbs.org/api/v1/specials/fb29114a-f79b-421f-989c-117167be9fd7/assets/': 'test_fixtures/nova_specials_fb29114a-f79b-421f-989c-117167be9fd7_assets.json',
'https://media.services.pbs.org/api/v1/specials/fb58e2bf-dda6-4795-bd6a-c4621f30260b/assets/': 'test_fixtures/nova_specials_fb58e2bf-dda6-4795-bd6a-c4621f30260b_assets.json',
'https://media.services.pbs.org/api/v1/specials/fc98467e-69ca-4020-8b44-509bfb2e63bd/assets/': 'test_fixtures/nova_specials_fc98467e-69ca-4020-8b44-509bfb2e63bd_assets.json',
'https://media.services.pbs.org/api/v1/specials/fcca6ddb-154c-44ca-ab5d-01f67277085b/assets/': 'test_fixtures/nova_specials_fcca6ddb-154c-44ca-ab5d-01f67277085b_assets.json',
'https://media.services.pbs.org/api/v1/specials/febf92a9-6e34-4381-92a5-3c594a38d9d0/assets/': 'test_fixtures/nova_specials_febf92a9-6e34-4381-92a5-3c594a38d9d0_assets.json',
'https://media.services.pbs.org/api/v1/specials/front-row-seat-saturn/': 'test_fixtures/nova_specials_front-row-seat-saturn.json',
'https://media.services.pbs.org/api/v1/specials/nova-ancient-life/': 'test_fixtures/nova_specials_nova-ancient-life.json',
'https://media.services.pbs.org/api/v1/specials/nova-animal-mummies-pro/': 'test_fixtures/nova_specials_nova-animal-mummies-pro.json',
'https://media.services.pbs.org/api/v1/specials/nova-animal-robot-1/': 'test_fixtures/nova_specials_nova-animal-robot-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-animal-robot-2/': 'test_fixtures/nova_specials_nova-animal-robot-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-arctic-dinosaurs/': 'test_fixtures/nova_specials_nova-arctic-dinosaurs.json',
'https://media.services.pbs.org/api/v1/specials/nova-battle-jacket/': 'test_fixtures/nova_specials_nova-battle-jacket.json',
'https://media.services.pbs.org/api/v1/specials/nova-bee-toxin/': 'test_fixtures/nova_specials_nova-bee-toxin.json',
'https://media.services.pbs.org/api/v1/specials/nova-bird-wings/': 'test_fixtures/nova_specials_nova-bird-wings.json',
'https://media.services.pbs.org/api/v1/specials/nova-chemotherapy-goes-small/': 'test_fixtures/nova_specials_nova-chemotherapy-goes-small.json',
'https://media.services.pbs.org/api/v1/specials/nova-chicken-feathers/': 'test_fixtures/nova_specials_nova-chicken-feathers.json',
'https://media.services.pbs.org/api/v1/specials/nova-chnops-ingredients-life/': 'test_fixtures/nova_specials_nova-chnops-ingredients-life.json',
'https://media.services.pbs.org/api/v1/specials/nova-climate-wild-card/': 'test_fixtures/nova_specials_nova-climate-wild-card.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_01/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_01.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_02/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_02.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_03/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_03.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_04/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_04.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_05/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_05.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_06/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_06.json',
'https://media.services.pbs.org/api/v1/specials/nova-cloudlab_megastorm_07/': 'test_fixtures/nova_specials_nova-cloudlab_megastorm_07.json',
'https://media.services.pbs.org/api/v1/specials/nova-clouds-and-weather/': 'test_fixtures/nova_specials_nova-clouds-and-weather.json',
'https://media.services.pbs.org/api/v1/specials/nova-coriolis-effect/': 'test_fixtures/nova_specials_nova-coriolis-effect.json',
'https://media.services.pbs.org/api/v1/specials/nova-corkscrew-robot/': 'test_fixtures/nova_specials_nova-corkscrew-robot.json',
'https://media.services.pbs.org/api/v1/specials/nova-crow-tool-excerpt/': 'test_fixtures/nova_specials_nova-crow-tool-excerpt.json',
'https://media.services.pbs.org/api/v1/specials/nova-david-drives/': 'test_fixtures/nova_specials_nova-david-drives.json',
'https://media.services.pbs.org/api/v1/specials/nova-davids-race/': 'test_fixtures/nova_specials_nova-davids-race.json',
'https://media.services.pbs.org/api/v1/specials/nova-dinosaur-fossils/': 'test_fixtures/nova_specials_nova-dinosaur-fossils.json',
'https://media.services.pbs.org/api/v1/specials/nova-dna-patch/': 'test_fixtures/nova_specials_nova-dna-patch.json',
'https://media.services.pbs.org/api/v1/specials/nova-dna-vaccine/': 'test_fixtures/nova_specials_nova-dna-vaccine.json',
'https://media.services.pbs.org/api/v1/specials/nova-dogs-sense-time/': 'test_fixtures/nova_specials_nova-dogs-sense-time.json',
'https://media.services.pbs.org/api/v1/specials/nova-dolphin-mirror/': 'test_fixtures/nova_specials_nova-dolphin-mirror.json',
'https://media.services.pbs.org/api/v1/specials/nova-driving-dinosaur-diversity/': 'test_fixtures/nova_specials_nova-driving-dinosaur-diversity.json',
'https://media.services.pbs.org/api/v1/specials/nova-earthquake-test/': 'test_fixtures/nova_specials_nova-earthquake-test.json',
'https://media.services.pbs.org/api/v1/specials/nova-electromagnetic-spectrum/': 'test_fixtures/nova_specials_nova-electromagnetic-spectrum.json',
'https://media.services.pbs.org/api/v1/specials/nova-elephant-arm/': 'test_fixtures/nova_specials_nova-elephant-arm.json',
'https://media.services.pbs.org/api/v1/specials/nova-exuma-cays-4/': 'test_fixtures/nova_specials_nova-exuma-cays-4.json',
'https://media.services.pbs.org/api/v1/specials/nova-exuma-cays-bahamas/': 'test_fixtures/nova_specials_nova-exuma-cays-bahamas.json',
'https://media.services.pbs.org/api/v1/specials/nova-eye-robot/': 'test_fixtures/nova_specials_nova-eye-robot.json',
'https://media.services.pbs.org/api/v1/specials/nova-fertilizing-amazon/': 'test_fixtures/nova_specials_nova-fertilizing-amazon.json',
'https://media.services.pbs.org/api/v1/specials/nova-fingripper/': 'test_fixtures/nova_specials_nova-fingripper.json',
'https://media.services.pbs.org/api/v1/specials/nova-five-steps-volcanic-rock/': 'test_fixtures/nova_specials_nova-five-steps-volcanic-rock.json',
'https://media.services.pbs.org/api/v1/specials/nova-flatfish-larvae/': 'test_fixtures/nova_specials_nova-flatfish-larvae.json',
'https://media.services.pbs.org/api/v1/specials/nova-flu-probabilities/': 'test_fixtures/nova_specials_nova-flu-probabilities.json',
'https://media.services.pbs.org/api/v1/specials/nova-forgotten-genius-1/': 'test_fixtures/nova_specials_nova-forgotten-genius-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-formation-typhoon/': 'test_fixtures/nova_specials_nova-formation-typhoon.json',
'https://media.services.pbs.org/api/v1/specials/nova-fossil-filled-limestone/': 'test_fixtures/nova_specials_nova-fossil-filled-limestone.json',
'https://media.services.pbs.org/api/v1/specials/nova-fossilized-sand-dunes/': 'test_fixtures/nova_specials_nova-fossilized-sand-dunes.json',
'https://media.services.pbs.org/api/v1/specials/nova-gm-plants/': 'test_fixtures/nova_specials_nova-gm-plants.json',
'https://media.services.pbs.org/api/v1/specials/nova-gooseberry-falls-1/': 'test_fixtures/nova_specials_nova-gooseberry-falls-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-gooseberry-falls-2/': 'test_fixtures/nova_specials_nova-gooseberry-falls-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-gooseberry-falls-3/': 'test_fixtures/nova_specials_nova-gooseberry-falls-3.json',
'https://media.services.pbs.org/api/v1/specials/nova-gooseberry-falls-4/': 'test_fixtures/nova_specials_nova-gooseberry-falls-4.json',
'https://media.services.pbs.org/api/v1/specials/nova-grand-canyon-1/': 'test_fixtures/nova_specials_nova-grand-canyon-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-grand-canyon-2/': 'test_fixtures/nova_specials_nova-grand-canyon-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-grand-canyon-3/': 'test_fixtures/nova_specials_nova-grand-canyon-3.json',
'https://media.services.pbs.org/api/v1/specials/nova-grand-canyon-4/': 'test_fixtures/nova_specials_nova-grand-canyon-4.json',
'https://media.services.pbs.org/api/v1/specials/nova-grand-canyons-oldest-rocks/': 'test_fixtures/nova_specials_nova-grand-canyons-oldest-rocks.json',
'https://media.services.pbs.org/api/v1/specials/nova-graphene/': 'test_fixtures/nova_specials_nova-graphene.json',
'https://media.services.pbs.org/api/v1/specials/nova-hagfish-defense/': 'test_fixtures/nova_specials_nova-hagfish-defense.json',
'https://media.services.pbs.org/api/v1/specials/nova-hagfish/': 'test_fixtures/nova_specials_nova-hagfish.json',
'https://media.services.pbs.org/api/v1/specials/nova-hell-creek-formation/': 'test_fixtures/nova_specials_nova-hell-creek-formation.json',
'https://media.services.pbs.org/api/v1/specials/nova-how-1918-flu-infected-people/': 'test_fixtures/nova_specials_nova-how-1918-flu-infected-people.json',
'https://media.services.pbs.org/api/v1/specials/nova-how-batteries-work/': 'test_fixtures/nova_specials_nova-how-batteries-work.json',
'https://media.services.pbs.org/api/v1/specials/nova-how-solar-cells-work/': 'test_fixtures/nova_specials_nova-how-solar-cells-work.json',
'https://media.services.pbs.org/api/v1/specials/nova-hydrofoil/': 'test_fixtures/nova_specials_nova-hydrofoil.json',
'https://media.services.pbs.org/api/v1/specials/nova-invisibility-cloak/': 'test_fixtures/nova_specials_nova-invisibility-cloak.json',
'https://media.services.pbs.org/api/v1/specials/nova-invisible-universe-revealed/': 'test_fixtures/nova_specials_nova-invisible-universe-revealed.json',
'https://media.services.pbs.org/api/v1/specials/nova-jellyfish/': 'test_fixtures/nova_specials_nova-jellyfish.json',
'https://media.services.pbs.org/api/v1/specials/nova-killacycle/': 'test_fixtures/nova_specials_nova-killacycle.json',
'https://media.services.pbs.org/api/v1/specials/nova-king-gore/': 'test_fixtures/nova_specials_nova-king-gore.json',
'https://media.services.pbs.org/api/v1/specials/nova-labyrinth-lava-map/': 'test_fixtures/nova_specials_nova-labyrinth-lava-map.json',
'https://media.services.pbs.org/api/v1/specials/nova-lava-land/': 'test_fixtures/nova_specials_nova-lava-land.json',
'https://media.services.pbs.org/api/v1/specials/nova-lethal-seas-excerpt-1/': 'test_fixtures/nova_specials_nova-lethal-seas-excerpt-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-lethal-seas-excerpt-2/': 'test_fixtures/nova_specials_nova-lethal-seas-excerpt-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-lethal-seas-excerpt-3/': 'test_fixtures/nova_specials_nova-lethal-seas-excerpt-3.json',
'https://media.services.pbs.org/api/v1/specials/nova-lethal-seas-excerpt-4/': 'test_fixtures/nova_specials_nova-lethal-seas-excerpt-4.json',
'https://media.services.pbs.org/api/v1/specials/nova-levitation/': 'test_fixtures/nova_specials_nova-levitation.json',
'https://media.services.pbs.org/api/v1/specials/nova-life-europa/': 'test_fixtures/nova_specials_nova-life-europa.json',
'https://media.services.pbs.org/api/v1/specials/nova-life-saving-non-newtonian-fluids/': 'test_fixtures/nova_specials_nova-life-saving-non-newtonian-fluids.json',
'https://media.services.pbs.org/api/v1/specials/nova-lithuanian-escape-tunnel-revealed/': 'test_fixtures/nova_specials_nova-lithuanian-escape-tunnel-revealed.json',
'https://media.services.pbs.org/api/v1/specials/nova-magnetic-oobleck/': 'test_fixtures/nova_specials_nova-magnetic-oobleck.json',
'https://media.services.pbs.org/api/v1/specials/nova-making-cloud/': 'test_fixtures/nova_specials_nova-making-cloud.json',
'https://media.services.pbs.org/api/v1/specials/nova-making-granite-volcanic-rock/': 'test_fixtures/nova_specials_nova-making-granite-volcanic-rock.json',
'https://media.services.pbs.org/api/v1/specials/nova-making-north-america-human/': 'test_fixtures/nova_specials_nova-making-north-america-human.json',
'https://media.services.pbs.org/api/v1/specials/nova-mammal-survivors/': 'test_fixtures/nova_specials_nova-mammal-survivors.json',
'https://media.services.pbs.org/api/v1/specials/nova-manhattan-schist/': 'test_fixtures/nova_specials_nova-manhattan-schist.json',
'https://media.services.pbs.org/api/v1/specials/nova-manhattan-skyline/': 'test_fixtures/nova_specials_nova-manhattan-skyline.json',
'https://media.services.pbs.org/api/v1/specials/nova-massive-volcanic-eruption/': 'test_fixtures/nova_specials_nova-massive-volcanic-eruption.json',
'https://media.services.pbs.org/api/v1/specials/nova-microneedle/': 'test_fixtures/nova_specials_nova-microneedle.json',
'https://media.services.pbs.org/api/v1/specials/nova-mild-traumatic-brain-injury/': 'test_fixtures/nova_specials_nova-mild-traumatic-brain-injury.json',
'https://media.services.pbs.org/api/v1/specials/nova-millions-years-captured-chalk/': 'test_fixtures/nova_specials_nova-millions-years-captured-chalk.json',
'https://media.services.pbs.org/api/v1/specials/nova-modis-sensor/': 'test_fixtures/nova_specials_nova-modis-sensor.json',
'https://media.services.pbs.org/api/v1/specials/nova-monument-rocks-1/': 'test_fixtures/nova_specials_nova-monument-rocks-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-monument-rocks-2/': 'test_fixtures/nova_specials_nova-monument-rocks-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-mother/': 'test_fixtures/nova_specials_nova-mother.json',
'https://media.services.pbs.org/api/v1/specials/nova-mount-kilauea/': 'test_fixtures/nova_specials_nova-mount-kilauea.json',
'https://media.services.pbs.org/api/v1/specials/nova-mud-rock/': 'test_fixtures/nova_specials_nova-mud-rock.json',
'https://media.services.pbs.org/api/v1/specials/nova-muon-detection/': 'test_fixtures/nova_specials_nova-muon-detection.json',
'https://media.services.pbs.org/api/v1/specials/nova-muons/': 'test_fixtures/nova_specials_nova-muons.json',
'https://media.services.pbs.org/api/v1/specials/nova-new-yorks-mountains/': 'test_fixtures/nova_specials_nova-new-yorks-mountains.json',
'https://media.services.pbs.org/api/v1/specials/nova-newtons-dark-secrets-1/': 'test_fixtures/nova_specials_nova-newtons-dark-secrets-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-non-newtonian-gel/': 'test_fixtures/nova_specials_nova-non-newtonian-gel.json',
'https://media.services.pbs.org/api/v1/specials/nova-nova-2017-sizzle-reel/': 'test_fixtures/nova_specials_nova-nova-2017-sizzle-reel.json',
'https://media.services.pbs.org/api/v1/specials/nova-nova-full-episode-test/': 'test_fixtures/nova_specials_nova-nova-full-episode-test.json',
'https://media.services.pbs.org/api/v1/specials/nova-nuclear-option/': 'test_fixtures/nova_specials_nova-nuclear-option.json',
'https://media.services.pbs.org/api/v1/specials/nova-oobleck/': 'test_fixtures/nova_specials_nova-oobleck.json',
'https://media.services.pbs.org/api/v1/specials/nova-pangea/': 'test_fixtures/nova_specials_nova-pangea.json',
'https://media.services.pbs.org/api/v1/specials/nova-pitcher-plant/': 'test_fixtures/nova_specials_nova-pitcher-plant.json',
'https://media.services.pbs.org/api/v1/specials/nova-pizza-transistor/': 'test_fixtures/nova_specials_nova-pizza-transistor.json',
'https://media.services.pbs.org/api/v1/specials/nova-plants-white/': 'test_fixtures/nova_specials_nova-plants-white.json',
'https://media.services.pbs.org/api/v1/specials/nova-promise-solar-power/': 'test_fixtures/nova_specials_nova-promise-solar-power.json',
'https://media.services.pbs.org/api/v1/specials/nova-putting-energy-use/': 'test_fixtures/nova_specials_nova-putting-energy-use.json',
'https://media.services.pbs.org/api/v1/specials/nova-rare-earth-elements/': 'test_fixtures/nova_specials_nova-rare-earth-elements.json',
'https://media.services.pbs.org/api/v1/specials/nova-rnai-explained/': 'test_fixtures/nova_specials_nova-rnai-explained.json',
'https://media.services.pbs.org/api/v1/specials/nova-robot-arm/': 'test_fixtures/nova_specials_nova-robot-arm.json',
'https://media.services.pbs.org/api/v1/specials/nova-rock-time-capsules/': 'test_fixtures/nova_specials_nova-rock-time-capsules.json',
'https://media.services.pbs.org/api/v1/specials/nova-rockies-20/': 'test_fixtures/nova_specials_nova-rockies-20.json',
'https://media.services.pbs.org/api/v1/specials/nova-sail-wing/': 'test_fixtures/nova_specials_nova-sail-wing.json',
'https://media.services.pbs.org/api/v1/specials/nova-sandstone-top-canyon/': 'test_fixtures/nova_specials_nova-sandstone-top-canyon.json',
'https://media.services.pbs.org/api/v1/specials/nova-sandy-raw-1-2/': 'test_fixtures/nova_specials_nova-sandy-raw-1-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-sandy-raw-5-1/': 'test_fixtures/nova_specials_nova-sandy-raw-5-1.json',
'https://media.services.pbs.org/api/v1/specials/nova-secrets-sky-tombs/': 'test_fixtures/nova_specials_nova-secrets-sky-tombs.json',
'https://media.services.pbs.org/api/v1/specials/nova-secrets-sun/': 'test_fixtures/nova_specials_nova-secrets-sun.json',
'https://media.services.pbs.org/api/v1/specials/nova-semi-precious-pebbles/': 'test_fixtures/nova_specials_nova-semi-precious-pebbles.json',
'https://media.services.pbs.org/api/v1/specials/nova-severe-storms/': 'test_fixtures/nova_specials_nova-severe-storms.json',
'https://media.services.pbs.org/api/v1/specials/nova-shape-memory-alloys/': 'test_fixtures/nova_specials_nova-shape-memory-alloys.json',
'https://media.services.pbs.org/api/v1/specials/nova-shelley/': 'test_fixtures/nova_specials_nova-shelley.json',
'https://media.services.pbs.org/api/v1/specials/nova-shrinking-chips/': 'test_fixtures/nova_specials_nova-shrinking-chips.json',
'https://media.services.pbs.org/api/v1/specials/nova-silicon-transistors/': 'test_fixtures/nova_specials_nova-silicon-transistors.json',
'https://media.services.pbs.org/api/v1/specials/nova-slime-clothes/': 'test_fixtures/nova_specials_nova-slime-clothes.json',
'https://media.services.pbs.org/api/v1/specials/nova-slips/': 'test_fixtures/nova_specials_nova-slips.json',
'https://media.services.pbs.org/api/v1/specials/nova-slow-goldfish/': 'test_fixtures/nova_specials_nova-slow-goldfish.json',
'https://media.services.pbs.org/api/v1/specials/nova-sneak-peek-making-stuff-faster/': 'test_fixtures/nova_specials_nova-sneak-peek-making-stuff-faster.json',
'https://media.services.pbs.org/api/v1/specials/nova-sneak-peek-making-stuff-wilder/': 'test_fixtures/nova_specials_nova-sneak-peek-making-stuff-wilder.json',
'https://media.services.pbs.org/api/v1/specials/nova-soft-story-buildings/': 'test_fixtures/nova_specials_nova-soft-story-buildings.json',
'https://media.services.pbs.org/api/v1/specials/nova-solar-leaves/': 'test_fixtures/nova_specials_nova-solar-leaves.json',
'https://media.services.pbs.org/api/v1/specials/nova-solving-storage-problem/': 'test_fixtures/nova_specials_nova-solving-storage-problem.json',
'https://media.services.pbs.org/api/v1/specials/nova-steffens-plan/': 'test_fixtures/nova_specials_nova-steffens-plan.json',
'https://media.services.pbs.org/api/v1/specials/nova-stromatolies-were-kings/': 'test_fixtures/nova_specials_nova-stromatolies-were-kings.json',
'https://media.services.pbs.org/api/v1/specials/nova-superconductor/': 'test_fixtures/nova_specials_nova-superconductor.json',
'https://media.services.pbs.org/api/v1/specials/nova-swarm-algorithm/': 'test_fixtures/nova_specials_nova-swarm-algorithm.json',
'https://media.services.pbs.org/api/v1/specials/nova-swarming-robots/': 'test_fixtures/nova_specials_nova-swarming-robots.json',
'https://media.services.pbs.org/api/v1/specials/nova-tearing-north-america-apart/': 'test_fixtures/nova_specials_nova-tearing-north-america-apart.json',
'https://media.services.pbs.org/api/v1/specials/nova-the-fabric-of-the-cosmos-what-is-space/': 'test_fixtures/nova_specials_nova-the-fabric-of-the-cosmos-what-is-space.json',
'https://media.services.pbs.org/api/v1/specials/nova-therapeutic-hypothermia-2/': 'test_fixtures/nova_specials_nova-therapeutic-hypothermia-2.json',
'https://media.services.pbs.org/api/v1/specials/nova-therapeutic-hypothermia-3/': 'test_fixtures/nova_specials_nova-therapeutic-hypothermia-3.json',
'https://media.services.pbs.org/api/v1/specials/nova-therapeutic-hypothermia-4/': 'test_fixtures/nova_specials_nova-therapeutic-hypothermia-4.json',
'https://media.services.pbs.org/api/v1/specials/nova-therapeutic-hypothermia/': 'test_fixtures/nova_specials_nova-therapeutic-hypothermia.json',
'https://media.services.pbs.org/api/v1/specials/nova-traveling-salesman/': 'test_fixtures/nova_specials_nova-traveling-salesman.json',
'https://media.services.pbs.org/api/v1/specials/nova-treasures-earth-gems/': 'test_fixtures/nova_specials_nova-treasures-earth-gems.json',
'https://media.services.pbs.org/api/v1/specials/nova-treasures-earth-metals/': 'test_fixtures/nova_specials_nova-treasures-earth-metals.json',
'https://media.services.pbs.org/api/v1/specials/nova-treasures-earth-power-pro/': 'test_fixtures/nova_specials_nova-treasures-earth-power-pro.json',
'https://media.services.pbs.org/api/v1/specials/nova-treasures-metals-eiffel-tower/': 'test_fixtures/nova_specials_nova-treasures-metals-eiffel-tower.json',
'https://media.services.pbs.org/api/v1/specials/nova-verizon-megastorm/': 'test_fixtures/nova_specials_nova-verizon-megastorm.json',
'https://media.services.pbs.org/api/v1/specials/nova-virus-battery/': 'test_fixtures/nova_specials_nova-virus-battery.json',
'https://media.services.pbs.org/api/v1/specials/nova-volcanoes-sand/': 'test_fixtures/nova_specials_nova-volcanoes-sand.json',
'https://media.services.pbs.org/api/v1/specials/nova-what-made-1918-flu-so-deadly/': 'test_fixtures/nova_specials_nova-what-made-1918-flu-so-deadly.json',
'https://media.services.pbs.org/api/v1/specials/nova-wheat-straw-plastic/': 'test_fixtures/nova_specials_nova-wheat-straw-plastic.json',
'https://media.services.pbs.org/api/v1/specials/nova-why-so-many-cloud-types/': 'test_fixtures/nova_specials_nova-why-so-many-cloud-types.json',
'https://media.services.pbs.org/api/v1/specials/school-future-spanish-version-tu3i4f/': 'test_fixtures/nova_specials_school-future-spanish-version-tu3i4f.json'
}
|
from enum import Enum
import numpy as np
class KEY(Enum):
UP = 0,
DOWN = 1,
LEFT = 2,
RIGHT = 3,
PICKUP = 4,
TRANSFORM = 5,
USE_1 = 5,
USE_2 = 6,
USE_3 = 7,
USE_4 = 8,
USE_5 = 9,
QUIT = 'q'
WHITE = (255, 255, 255)
LIGHT = (196, 196, 196)
GREEN = (80, 160, 0)
DARK = (128, 128, 128)
DARK_RED = (139, 0, 0)
BLACK = (0, 0, 0)
MOVE_ACTS = {KEY.UP, KEY.DOWN, KEY.LEFT, KEY.RIGHT}
EMPTY = -1
AGENT = 0
BLOCK = 1
WATER = 2
OBJ_BIAS = 3
TYPE_PICKUP = 0
TYPE_TRANSFORM = 1
def get_id_from_ind_multihot(indexed_tensor, mapping, max_dim):
if type(mapping) == dict:
mapping_ = np.zeros(max(mapping.keys())+1, dtype=np.long)
for k, v in mapping.items():
mapping_[k] = v
mapping = mapping_
if indexed_tensor.ndim == 2:
nbatch = indexed_tensor.shape[0]
out = np.zeros(nbatch, max_dim).astype(np.byte)
np.add.at(out, mapping.ravel(), indexed_tensor.ravel())
else:
out = np.zeros(max_dim).astype(np.byte)
np.add.at(out, mapping.ravel(), indexed_tensor.ravel())
return out
|
#! /usr/bin/env python
from distutils.core import setup, Extension
import os
import sys
pwd = os.getcwd()
project_top_dir = os.getenv('PROJECT_TOP_DIR')
if project_top_dir is None:
project_top_dir = os.path.join(pwd, '/../')
include_dir = os.path.join( project_top_dir, 'include' )
src_python_dir = os.path.join( project_top_dir, 'python' )
lib_dir = os.getenv('LIB_DIR')
if lib_dir is None:
lib_dir = os.path.join( project_top_dir, 'tmp-target/build/lib' )
ccflags = os.getenv('CCFLAGS')
if ccflags is None:
ccflags = ''
module_sources = [ 'if3_kernel.cpp' ]
if3kernel = Extension(
'if3_kernel',
include_dirs = [ include_dir ],
library_dirs = [ lib_dir ],
libraries = [ 'if3kernel' ],
sources = [ os.path.join(src_python_dir,z) for z in module_sources ],
)
setup (name = 'if3_kernel',
version = '3.0',
description = 'Internet Filter Kernel by Turner and Sons Productions, Inc.',
ext_modules = [if3kernel])
|
from setuptools import find_packages, setup
def requirements(filename):
with open(filename) as f:
lines = f.read().splitlines()
return lines
setup(
name="contrail-vcenter-manager",
version="0.1dev",
packages=find_packages(),
package_data={'': ['*.html', '*.css', '*.xml', '*.yml']},
zip_safe=False,
long_description="Contrail vCenter Manager",
install_requires=["six", "future"],
entry_points = {
'console_scripts' : [
'contrail-vcenter-manager = cvm.__main__:server_main',
],
},
)
|
import json
def loadFile(filename):
with open('./sophyAssistant/src/'+filename) as json_file:
data = json.load(json_file)
return data |
'''
re模板:2.2.1
requests模板:2.18.4
bs4模板:4.6.0
json模板:2.0.9
爬取百度新闻所有的新闻的前1页 标题和URL地址
'''
import requests
import json
from bs4 import BeautifulSoup
import re
#获得每页新闻标题和新闻地址
def getPageInfo(url,page):
newurl=url + str(page)
res = requests.get(newurl)
jd = json.loads(res.text)
list1 = jd['data']['list']
it = iter(list1)
for one in it:
print("\t新闻标题="+one['title']+"\t"+"新闻地址="+one['url'])
#获得各个种类的新闻信息
def getInfo(classInfo):
print("种类是:"+classInfo)
#当种类为 “推荐” 的时候他的url和其他的种类URL不一样,所以单独处理
if classInfo == '推荐':
url = 'http://jian.news.baidu.com/ajax/list?type=chosen/推荐&pn='
getPageInfo(url,1)
else:
url = 'http://jian.news.baidu.com/ajax/list?type=info/{}&pn='.format(classInfo)
#print(url)
#这里取的是分页信息 我只取了前4页 取分页信息在这里修改
for page in range(1,2):
getPageInfo(url,page)
#获得新闻的种类
def getClassInfo():
list2 = []
res = requests.get('http://jian.news.baidu.com/')
res.encoding = 'utf-8'
soup = BeautifulSoup(res.text,'html.parser')
data = soup.select('script')[2].text
#下面用了正则表达式来替换,为了得到我们想要的数据
sea = re.sub('\s',"",data)
sea = sea.rstrip("window.menulist=menulist;")
ss = re.sub('varmenulist=\[\];',"",sea)
#print(ss)
ss = re.sub('menulist.push\(',"",ss)
ss = re.sub('\);',";",ss)
ss = re.sub('\)',"",ss)
ss = re.sub('\'',"\"",ss)
list1 = ss.split(';')
#print(list1)
it = iter(list1)
for one in it:
#print(one)
jd = json.loads(one)
#print(type(jd))
#print(jd['topic'])
list2.append(jd['topic'])
return list2
listt = getClassInfo()
it = iter(listt)
for one in it:
getInfo(one)
|
import abjad
from .underfull_duration import underfull_duration
def selection_is_full(selection: abjad.Selection) -> bool:
r"""Returns a :obj:`bool` representing whether the last measure of an input
|abjad.Selection| is fully filled in or not.
Basic usage:
Returns ``True`` if the last measure of a selection is full, otherwise
returns ``False``. If no time signature is encountered at the
beginning, it uses LilyPond's convention and considers the container
as in 4/4.
>>> container1 = abjad.Container(r"c'4 d'4 e'4 f'4")
>>> container2 = abjad.Container(r"c'4 d'4 e'4")
>>> container3 = abjad.Container(r"c'4 d'4 e'4 f'4 | c'4")
>>> container4 = abjad.Container(r"c'4 d'4 e'4 f'4 | c'4 d'4 e'4 f'4")
>>> auxjad.get.selection_is_full(container1[:])
True
>>> auxjad.get.selection_is_full(container2[:])
False
>>> auxjad.get.selection_is_full(container3[:])
False
>>> auxjad.get.selection_is_full(container4[:])
True
.. note::
Auxjad automatically adds this function as an extension function to
|abjad.get|. It can thus be used from either |auxjad.get|_ or
|abjad.get| namespaces. Therefore, the two lines below are equivalent:
>>> container = abjad.Container(r"c'4 d'4 e'4 f'4")
>>> auxjad.get.selection_is_full(container[:])
True
>>> abjad.get.selection_is_full(container[:])
True
Time signature changes:
Handles any time signatures as well as changes of time signature.
>>> container1 = abjad.Container(r"\time 4/4 c'4 d'4 e'4 f'4")
>>> container2 = abjad.Container(r"\time 3/4 a2. \time 2/4 r2")
>>> container3 = abjad.Container(r"\time 5/4 g1 ~ g4 \time 4/4 af'2")
>>> container4 = abjad.Container(r"\time 6/8 c'2 ~ c'8")
>>> auxjad.get.selection_is_full(container1[:])
True
>>> auxjad.get.selection_is_full(container2[:])
True
>>> auxjad.get.selection_is_full(container3[:])
False
>>> auxjad.get.selection_is_full(container4[:])
False
Partial time signatures:
Correctly handles partial time signatures.
>>> container = abjad.Container(r"c'4 d'4 e'4 f'4")
>>> time_signature = abjad.TimeSignature((3, 4), partial=(1, 4))
>>> abjad.attach(time_signature, container[0])
>>> auxjad.get.selection_is_full(container[:])
True
Multi-measure rests:
It also handles multi-measure rests.
>>> container1 = abjad.Container(r"R1")
>>> container2 = abjad.Container(r"\time 3/4 R1 * 3/4 \time 2/4 r2")
>>> container3 = abjad.Container(r"\time 5/4 R1 * 5/4 \time 4/4 g''4")
>>> container4 = abjad.Container(r"\time 6/8 R1 * 1/2")
>>> auxjad.get.selection_is_full(container1[:])
True
>>> auxjad.get.selection_is_full(container2[:])
True
>>> auxjad.get.selection_is_full(container3[:])
False
>>> auxjad.get.selection_is_full(container4[:])
False
.. error::
If a selection is malformed, i.e. it has an underfilled measure before
a time signature change, the function raises a :exc:`ValueError`
exception. This is also the case when a selection starts in the middle
of a measure.
>>> container = abjad.Container(r"\time 5/4 g''1 \time 4/4 f'1")
>>> auxjad.get.selection_is_full(container[:])
ValueError: 'selection' is malformed, with an underfull measure
preceding a time signature change
.. warning::
The input selection must be a contiguous logical voice. When dealing
with a container with multiple subcontainers (e.g. a score containing
multiple staves), the best approach is to cycle through these
subcontainers, applying this function to them individually.
"""
if not isinstance(selection, abjad.Selection):
raise TypeError("argument must be 'abjad.Selection'")
if not selection.leaves().are_contiguous_logical_voice():
raise ValueError("argument must be contiguous logical voice")
return underfull_duration(selection) == abjad.Duration(0)
|
import string
import secrets
population = set().union(string.ascii_lowercase, string.ascii_uppercase, string.digits)
len_password = 20
print(f"Population size: {len(population)}. Output size: {len_password}.")
print("".join(secrets.SystemRandom().choices(tuple(population), weights=None, cum_weights=None, k=len_password)))
|
import os
import sys
import traceback
from PyQt5.QtWidgets import QMainWindow, QDesktopWidget, QApplication, \
QFileDialog, QMessageBox
from PyQt5 import uic
from PyQt5.QtCore import QUrl, QSettings, Qt
from PyQt5.QtGui import QDesktopServices
from copasi_petab_importer import convert_petab
class PETabGui(QMainWindow):
def __init__(self):
# super().__init__()
QMainWindow.__init__(self)
self.dir = None
self.model_dir = None
self.model = None
self.out_dir = None
self.show_progress = True
self.show_result = True
self.show_result_per_experiment = False
self.show_result_per_dependent = False
self.write_report = False
self.ui = uic.loadUi(os.path.join(os.path.dirname(__file__), 'petab.ui'), self)
self.center()
self.load_settings()
self.load_model_dirs()
self.show()
def closeEvent(self, event):
self.save_settings()
@staticmethod
def _get_user_dir():
home = os.getenv("HOME")
if home is not None:
return home
from pathlib import Path
return Path.home()
def load_settings(self):
settings = QSettings(os.path.join(PETabGui._get_user_dir(), ".petab.ini"), QSettings.IniFormat)
benchmark_dir = '../benchmarks/hackathon_contributions_new_data_format'
self.dir = settings.value("dir", benchmark_dir)
self.model_dir = settings.value("model_dir", r'Becker_Science2010')
self.model = settings.value("model", 'Becker_Science2010__BaF3_Exp')
self.out_dir = settings.value("out_dir", './out')
self.show_progress = settings.value("show_progress", True, type=bool)
self.show_result = settings.value("show_result", True, type=bool)
self.show_result_per_experiment = settings.value("show_result_per_experiment", False, type=bool)
self.show_result_per_dependent = settings.value("show_result_per_dependent", False, type=bool)
self.write_report = settings.value("write_report", False, type=bool)
self.ui.txtDir.setText(self.dir)
self.ui.txtOutDir.setText(self.out_dir)
self.ui.chkPlotProgressOfFit.setChecked(self.show_progress)
self.ui.chkPlotResult.setChecked(self.show_result)
self.ui.chkPlotResultPerExperiment.setChecked(self.show_result_per_experiment)
self.ui.chkPlotResultPerDependent.setChecked(self.show_result_per_dependent)
self.ui.chkWriteReport.setChecked(self.write_report)
def save_settings(self):
settings = QSettings(os.path.join(PETabGui._get_user_dir(), ".petab.ini"), QSettings.IniFormat)
settings.setValue("dir", self.dir)
settings.setValue("model_dir", self.model_dir)
settings.setValue("model", self.model)
settings.setValue("out_dir", self.out_dir)
settings.setValue("show_progress", self.show_progress)
settings.setValue("show_result", self.show_result)
settings.setValue("show_result_per_experiment", self.show_result_per_experiment)
settings.setValue("show_result_per_dependent", self.show_result_per_dependent)
settings.setValue("write_report", self.write_report)
def slotOpenModelDir(self):
url = QUrl.fromLocalFile(os.path.join(self.dir, self.model_dir))
QDesktopServices.openUrl(url)
def slotOpenInCOPASI(self):
QDesktopServices.openUrl(
QUrl.fromLocalFile(
os.path.join(self.out_dir, os.path.splitext(self.model)[0] + '.cps')))
def slotSetBenchmarkDir(self, dir):
if not os.path.exists(dir):
return
self.dir = dir
self.ui.txtDir.setText(dir)
self.load_model_dirs()
def slotSetModelDir(self, model_dir):
self.model_dir = model_dir
items = self.ui.lstModelDirs.findItems(self.model_dir,
Qt.MatchFixedString)
if len(items) > 0:
self.ui.lstModelDirs.setCurrentItem(items[0])
self.load_models()
def slotSetModel(self, model):
self.model = model
self.setWindowFilePath(model)
items = self.ui.lstModels.findItems(self.model, Qt.MatchFixedString)
if len(items) > 0:
self.ui.lstModels.setCurrentItem(items[0])
def slotSetOutputDir(self, out_dir):
self.out_dir = out_dir
self.ui.txtOutDir.setText(out_dir)
def slotBrowseBenchmarkDir(self):
result = QFileDialog.getExistingDirectory(self,
'Select Benchmark dir',
self.dir)
if result is None:
return
self.slotSetBenchmarkDir(result)
def slotModelDirSelected(self):
selected = self.ui.lstModelDirs.currentItem()
if selected is None:
self.model_dir = None
else:
self.model_dir = selected.text()
self.slotSetModelDir(self.model_dir)
def load_model_dirs(self):
self.ui.lstModelDirs.clear()
if self.dir is None or not os.path.exists(self.dir):
return
for (dirpath, dirnames, filenames) in os.walk(self.dir):
self.ui.lstModelDirs.addItems(sorted(dirnames))
break # only from top level
if self.model_dir is not None:
self.slotSetModelDir(self.model_dir)
def load_models(self):
self.ui.lstModels.clear()
self.model = None
full_dir = os.path.join(self.dir, self.model_dir)
if self.model_dir is None or not os.path.exists(full_dir):
self.ui.wdgDetail.setEnabled(False)
return
self.ui.wdgDetail.setEnabled(True)
yaml = None
for (dirpath, dirnames, filenames) in os.walk(full_dir):
for file in sorted(filenames):
if file.startswith('model_'):
file = file[6:]
if file.endswith('.xml'):
file = file[:-4]
self.ui.lstModels.addItem(file)
self.model = file
if file.endswith('.yaml') and not '_solution' in file:
self.ui.lstModels.addItem(file)
yaml = file
break # skip other dirs
if yaml is not None:
self.model = yaml
self.slotSetModel(yaml)
elif self.model is not None:
self.slotSetModel(self.model)
def slotModelSelected(self):
selected = self.ui.lstModels.currentItem()
if selected is None:
return
self.slotSetModel(selected.text())
def slotBrowseOutputDir(self):
result = QFileDialog.getExistingDirectory(self,
'Select Output Folder',
self.out_dir)
if result is None:
return
self.slotSetOutputDir(result)
def slotConvert(self):
self.ui.cmdConvert.setEnabled(False)
self.ui.cmdOpenInCOPASI.setEnabled(False)
QApplication.processEvents()
try:
self.out_dir = self.ui.txtOutDir.text()
self.show_progress = self.ui.chkPlotProgressOfFit.isChecked()
self.show_result = self.ui.chkPlotResult.isChecked()
self.show_result_per_experiment = self.ui.chkPlotResultPerExperiment.isChecked()
self.show_result_per_dependent = self.ui.chkPlotResultPerDependent.isChecked()
self.write_report = self.ui.chkWriteReport.isChecked()
if not os.path.exists(self.out_dir):
os.makedirs(self.out_dir, exist_ok=True)
full_dir = os.path.join(self.dir, self.model_dir)
converter = convert_petab.PEtabConverter(full_dir, self.model,
self.out_dir, os.path.splitext(self.model)[0])
converter.transform_data = self.ui.chkTransformData.isChecked()
converter.show_progress_of_fit = self.show_progress
converter.show_result = self.show_result
converter.show_result_per_experiment = self.show_result_per_experiment
converter.show_result_per_dependent = self.show_result_per_dependent
converter.save_report = self.write_report
converter.convert()
if converter.experimental_data_file is not None:
with open(converter.experimental_data_file, 'r') as data:
text = data.read()
self.ui.txtData.document().setPlainText(text)
except BaseException:
msg = traceback.format_exc()
QMessageBox.critical(self, 'Error converting', msg)
self.ui.cmdConvert.setEnabled(True)
self.ui.cmdOpenInCOPASI.setEnabled(True)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def petab_gui():
app = QApplication(sys.argv)
widget = PETabGui()
sys.exit(app.exec_())
if __name__ == '__main__':
petab_gui()
|
from flask import Flask, render_template, jsonify
from flask_cors import CORS
import os
app = Flask(__name__)
port = int(os.getenv("PORT", 9099))
CORS(app)
@app.route('/')
def index():
return render_template('index.html'), 200
@app.route('/section.html')
def section():
return render_template('section.html'), 200
if __name__ == '__main__':
app.run(host='0.0.0.0', port=port)
|
# 15 shades of grey
display.fill(0)
for r in range(0, 16):
display.framebuf.fill_rect(0, r*6, 96, 6, r)
display.show()
|
import pytest
from flask import g, session
def test_login(client, auth):
# test that viewing the page renders without template errors
response = client.get('/auth/login')
assert response.status_code == 200
assert b"Login" in response.data
assert b"Username" in response.data
assert b"Password" in response.data
# test that successful login redirects to the index page
response = auth.login()
assert response.headers['Location'] == 'http://localhost/'
# login request set the user_id in the session
# check that the user is loaded from the session
with client:
client.get('/')
assert session['user_id'] == 2
assert g.user['name'] == 'test1'
@pytest.mark.parametrize(('username', 'password', 'message'), (
('wrong_user', 'test', b'Incorrect username.'),
('test1', 'wrong_password', b'Incorrect password.'),
('', 'test', b'Incorrect username.'),
('test1', '', b'Incorrect password.'),
))
def test_login_validate_input(auth, username, password, message):
response = auth.login(username, password)
assert message in response.data
def test_logout(client, auth):
auth.login()
with client:
auth.logout()
assert 'user_id' not in session
def test_passwd_update_nologin(client):
response = client.post(
'/auth/passwd/update',
follow_redirects=True,
data={'password1': '123456', 'password2': '123456'}
)
assert response.status_code == 200
assert b'Login' in response.data
assert b'Username' in response.data
assert b'Password' in response.data
def test_passwd_update_userlogin(client, auth):
auth.login()
response = client.post(
'/auth/passwd/update',
follow_redirects=True,
data={'password1': '123456', 'password2': '123456'}
)
assert response.status_code == 200
assert b'Home' in response.data
response = client.post(
'/auth/login',
follow_redirects=True,
data={'username': 'test1', 'password': '123456'}
)
assert response.status_code == 200
assert b'Home' in response.data
@pytest.mark.parametrize(('password1', 'password2', 'message'), (
('', '', b'Passwords are not equal.'),
('123456', '', b'Passwords are not equal.'),
('', '123456', b'Passwords are not equal.'),
('123', '456', b'Passwords are not equal.'),
))
def test_passwd_udpate_validate_input(client, auth, password1, password2, message):
auth.login()
response = client.post(
'/auth/passwd/update',
follow_redirects=True,
data={'password1': password1, 'password2': password2}
)
assert message in response.data
|
from notipy_me import Notipy
from repairing_genomic_gaps import cae_500, build_multivariate_dataset_cae, train_model
if __name__ == "__main__":
with Notipy():
model = cae_500()
train, test = build_multivariate_dataset_cae(500)
model = train_model(model, train, test, path="multivariate_gaps")
|
import re
class gtf_data:
def __init__(self, gtf_filename='lib/Saccharomyces_cerevisiae.EF4.70.gtf'):
self.has_introns = set()
self.intron_list = {}
self.txpt_ranges = {}
self.read_file(gtf_filename)
def read_file(self, gtf_filename):
# GTF files are 1-based. HTSeq genomic arrays are 0-based.
# We move all coordinates back by one so they will
# be treated as 0-based.
with open(gtf_filename, 'r') as f:
self.exons_by_txpt = {}
for li in f:
s = li.rstrip('\n').split('\t')
m = re.search("transcript_id ([^;]+);", li)
if m is None:
print "Error on line %s" % li
continue
txpt_id = re.sub(r'''"''', '', m.group(1))
if s[2] == "exon":
this_exon = {
'start': int(s[3]) - 1,
'end': int(s[4]) - 1,
'strand': s[6]}
self.exons_by_txpt.setdefault(txpt_id, [])
self.exons_by_txpt[txpt_id].append(this_exon)
for txpt in self.exons_by_txpt:
if len(self.exons_by_txpt[txpt]) > 1:
self.has_introns.add(txpt)
self.intron_list[txpt] = []
last_end = 0
for exon in sorted(self.exons_by_txpt[txpt],
key=lambda x: x['start']):
if last_end:
an_intron = [last_end, exon['start']]
self.intron_list[txpt].append(an_intron)
last_end = exon['end']
first_exon_start = min(
[x['start'] for x in self.exons_by_txpt[txpt]])
last_exon_end = max(
[x['end'] for x in self.exons_by_txpt[txpt]])
self.txpt_ranges[txpt] = [first_exon_start, last_exon_end]
|
# Acorn tree reactor | edelstein
REQUEST_FROM_A_DOCTOR = 23003
WHOLE_ACORN = 4034738
reactor.incHitCount()
if reactor.getHitCount() >= 3:
if sm.hasQuest(REQUEST_FROM_A_DOCTOR) and not sm.hasItem(WHOLE_ACORN, 2):
sm.dropItem(WHOLE_ACORN, sm.getPosition(objectID).getX(), sm.getPosition(objectID).getY())
sm.removeReactor()
|
# Window that opens when you pick one of the removal options
from PyQt5.QtCore import QProcess
from PyQt5.QtWidgets import QDialog, QVBoxLayout, QLabel, QHBoxLayout, QPushButton, QPlainTextEdit
class RemoverWindow(QDialog):
def __init__(self, selected_kernels: list = []):
super(QDialog, self).__init__()
self.setWindowTitle("Progress Window")
self.setFixedSize(400, 200)
self.main_layout = QVBoxLayout()
self.title_label = QLabel("<h2>Removing Kernel(s)</h2>")
self.status_label = QLabel("Please wait while the action is being performed...")
bottom_layout = QHBoxLayout()
self.close_button = QPushButton("Close")
self.close_button.setEnabled(False)
self.close_button.clicked.connect(self.close)
bottom_layout.addStretch()
bottom_layout.addWidget(self.close_button)
self.output_text_edit = QPlainTextEdit()
self.output_text_edit.setReadOnly(True)
self.main_layout.addWidget(self.title_label)
self.main_layout.addWidget(self.status_label)
# self.main_layout.addStretch()
self.main_layout.addWidget(self.output_text_edit)
self.main_layout.addLayout(bottom_layout)
self.setLayout(self.main_layout)
self.show()
self.process = QProcess(None)
self.process.setProcessChannelMode(QProcess.MergedChannels)
self.process.readyReadStandardOutput.connect(self.read_process_output)
self.process.finished.connect(self.on_process_finished)
self.perform_kernel_removal(selected_kernels)
def on_process_finished(self, exit_code, exit_status):
if exit_code == 0:
self.status_label.setText("Removal successfully completed!")
self.output_text_edit.appendPlainText("Removal successful!")
elif exit_code == 127:
self.status_label.setText("Authorization failed. Please try again.")
else:
self.status_label.setText("An error has occured.")
self.close_button.setEnabled(True)
def perform_kernel_removal(self, selected_kernels: list = []):
self.process.setProgram("pkexec")
kernels_str = " ".join(selected_kernels)
self.process.setArguments(["vkpurge", "rm", f"{kernels_str}" if len(selected_kernels) > 0 else "all"])
self.process.start()
def read_process_output(self):
output = bytearray(self.process.readAllStandardOutput())
self.output_text_edit.appendPlainText(output.decode("UTF-8").strip())
|
import pytest
from mikefm_skill.model import ModelResult
from mikefm_skill.observation import PointObservation
from mikefm_skill.metrics import root_mean_squared_error, mean_absolute_error
@pytest.fixture
def klagshamn():
fn = "tests/testdata/smhi_2095_klagshamn.dfs0"
return PointObservation(fn, item=0, x=366844, y=6154291, name="Klagshamn")
@pytest.fixture
def drogden():
fn = "tests/testdata/dmi_30357_Drogden_Fyr.dfs0"
return PointObservation(fn, item=0, x=355568.0, y=6156863.0)
@pytest.fixture
def modelresult_oresund_2d():
return ModelResult("tests/testdata/Oresund2D.dfsu")
def test_compound_skill(modelresult_oresund_2d, klagshamn, drogden):
mr = modelresult_oresund_2d
mr.add_observation(klagshamn, item=0)
mr.add_observation(drogden, item=0)
collection = mr.extract()
assert collection.compound_skill(metric=root_mean_squared_error) > 0.0
report = collection.skill_report(
metrics=[root_mean_squared_error, mean_absolute_error]
)
def test_compound_weighted_skill(modelresult_oresund_2d, klagshamn, drogden):
mr = modelresult_oresund_2d
mr.add_observation(klagshamn, item=0)
mr.add_observation(drogden, item=0)
c = mr.extract()
unweighted_skill = c.compound_skill()
mrw = modelresult_oresund_2d
mrw.add_observation(klagshamn, item=0, weight=1.0)
mrw.add_observation(drogden, item=0, weight=0.0)
cw = mrw.extract()
weighted_skill = cw.compound_skill()
assert unweighted_skill != weighted_skill
|
#!/usr/bin/env python
"""Run a worker for the job queue.
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from rq import Worker
from byceps.application import create_app
from byceps.util.jobqueue import connection, get_queue
if __name__ == '__main__':
app = create_app()
with app.app_context():
with connection():
queues = [get_queue(app)]
worker = Worker(queues)
worker.work(with_scheduler=True)
|
#!/usr/bin/env python3
#
# Copyright (c) 2020 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
from pathlib import Path
import sys
import subprocess
import logging
CHIP_ROOT_DIR = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../..'))
class ZAPGenerateTarget:
def __init__(self, zap_config, template=None, output_dir=None):
self.script = './scripts/tools/zap/generate.py'
self.zap_config = str(zap_config)
self.template = template
if output_dir:
# make sure we convert any os.PathLike object to string
self.output_dir = str(output_dir)
else:
self.output_dir = None
def generate(self):
"""Runs a ZAP generate command on the configured zap/template/outputs.
"""
cmd = [self.script, self.zap_config]
if self.template:
cmd.append('-t')
cmd.append(self.template)
if self.output_dir:
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
cmd.append('-o')
cmd.append(self.output_dir)
logging.info("Generating target: %s" % " ".join(cmd))
subprocess.check_call(cmd)
def checkPythonVersion():
if sys.version_info[0] < 3:
print('Must use Python 3. Current version is ' +
str(sys.version_info[0]))
exit(1)
def setupArgumentsParser():
parser = argparse.ArgumentParser(
description='Generate content from ZAP files')
parser.add_argument('--type', default='all', choices=['all', 'tests'],
help='Choose which content type to generate (default: all)')
parser.add_argument('--tests', default='all', choices=['all', 'chip-tool', 'darwin', 'app1', 'app2'],
help='When generating tests only target, Choose which tests to generate (default: all)')
return parser.parse_args()
def getGlobalTemplatesTargets():
targets = []
for filepath in Path('./examples').rglob('*.zap'):
example_name = filepath.as_posix()
example_name = example_name[example_name.index('examples/') + 9:]
example_name = example_name[:example_name.index('/')]
# Place holder has apps within each build
if example_name == "placeholder":
example_name = filepath.as_posix()
example_name = example_name[example_name.index(
'apps/') + len('apps/'):]
example_name = example_name[:example_name.index('/')]
logging.info("Found example %s (via %s)" %
(example_name, str(filepath)))
# The name zap-generated is to make includes clear by using
# a name like <zap-generated/foo.h>
output_dir = os.path.join(
'zzz_generated', 'placeholder', example_name, 'zap-generated')
template = 'examples/placeholder/templates/templates.json'
targets.append(ZAPGenerateTarget(filepath, output_dir=output_dir))
targets.append(
ZAPGenerateTarget(filepath, output_dir=output_dir, template=template))
continue
logging.info("Found example %s (via %s)" %
(example_name, str(filepath)))
# The name zap-generated is to make includes clear by using
# a name like <zap-generated/foo.h>
output_dir = os.path.join(
'zzz_generated', example_name, 'zap-generated')
targets.append(ZAPGenerateTarget(filepath, output_dir=output_dir))
targets.append(ZAPGenerateTarget(
'./src/controller/data_model/controller-clusters.zap',
output_dir=os.path.join('zzz_generated/controller-clusters/zap-generated')))
return targets
def getTestsTemplatesTargets(test_target):
templates = {
'chip-tool': {
'zap': 'src/controller/data_model/controller-clusters.zap',
'template': 'examples/chip-tool/templates/tests/templates.json',
'output_dir': 'zzz_generated/chip-tool/zap-generated'
},
'darwin': {
'zap': 'src/controller/data_model/controller-clusters.zap',
'template': 'src/darwin/Framework/CHIP/templates/tests/templates.json',
'output_dir': None
}
}
# Place holder has apps within each build
for filepath in Path('./examples/placeholder').rglob('*.zap'):
example_name = filepath.as_posix()
example_name = example_name[example_name.index(
'apps/') + len('apps/'):]
example_name = example_name[:example_name.index('/')]
templates[example_name] = {
'zap': filepath,
'template': 'examples/placeholder/templates/templates.json',
'output_dir': os.path.join('zzz_generated', 'placeholder', example_name, 'zap-generated')
}
targets = []
for key, target in templates.items():
if test_target == 'all' or test_target == key:
logging.info("Found test target %s (via %s)" %
(key, target['template']))
targets.append(ZAPGenerateTarget(
target['zap'], template=target['template'], output_dir=target['output_dir']))
return targets
def getSpecificTemplatesTargets():
zap_filepath = 'src/controller/data_model/controller-clusters.zap'
# Mapping of required template and output directory
templates = {
'src/app/common/templates/templates.json': 'zzz_generated/app-common/app-common/zap-generated',
'examples/chip-tool/templates/templates.json': 'zzz_generated/chip-tool/zap-generated',
'src/controller/python/templates/templates.json': None,
'src/darwin/Framework/CHIP/templates/templates.json': None,
'src/controller/java/templates/templates.json': None,
'src/app/tests/suites/templates/templates.json': 'zzz_generated/controller-clusters/zap-generated',
}
targets = []
for template, output_dir in templates.items():
targets.append(ZAPGenerateTarget(
zap_filepath, template=template, output_dir=output_dir))
return targets
def getTargets(type, test_target):
targets = []
if type == 'all':
targets.extend(getGlobalTemplatesTargets())
targets.extend(getTestsTemplatesTargets('all'))
targets.extend(getSpecificTemplatesTargets())
elif type == 'tests':
targets.extend(getTestsTemplatesTargets(test_target))
return targets
def main():
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)-7s %(message)s'
)
checkPythonVersion()
os.chdir(CHIP_ROOT_DIR)
args = setupArgumentsParser()
targets = getTargets(args.type, args.tests)
for target in targets:
target.generate()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from dataclasses import dataclass
from typing import Optional
@dataclass
class TranslationData:
homepage: Optional[str]
overview: Optional[str]
title: Optional[str]
def __str__(self) -> str:
return self.title
@dataclass
class Translation:
data: Optional[TranslationData]
english_name: Optional[str]
iso_3166_1: Optional[str]
iso_639_1: Optional[str]
name: Optional[str]
def __str__(self) -> str:
return self.name
|
# -*- coding: utf-8 -*-
# @Author: freefly801213
# @Date: 2022-01-03 00:02
from flask_restful import Resource, reqparse, marshal_with
from service.GeneralService import GeneralService
from common import Commons, HttpCommons
class GeneralApi(Resource):
"""
通用图片/PDF的OCR处理接口
"""
def __init__(self):
"""
类初始化方法
定义了接口处理类中的参数校验规则
"""
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('file', type=str, required=True, location='json', trim=True)
self.reqparse.add_argument('filetype', type=str, choices=['img', 'pdf'], required=True, location='json', trim=True)
self.generalService = GeneralService()
super(GeneralApi, self).__init__()
@marshal_with(HttpCommons.HTTP_RESPONSE_STR_LIST_FIELDS)
def post(self):
"""
POST请求处理方法
该类只接收并处理POST方式的请求
"""
# 首先验证并得到所有的请求参数
args = self.reqparse.parse_args()
if not Commons.verify_filedata(args['file'], args['filetype']):
return "base64文件数据错误"
results = ""
# 调用业务方法得到识别结果
if args['filetype'] == 'img':
results = self.generalService.processImg(args['file'])
if args['filetype'] == 'pdf':
results = self.generalService.processPdf(args['file'])
# 返回结果
return HttpCommons.success(data=results)
|
from .screen_point import *
from .locate_points import InteractivePositionLocator
|
from functools import partial
import pytest
from .vm_test_helpers import run_test
run_control_flow_ops_vm_test = partial(
run_test,
"tests/fixtures/LegacyTests/Constantinople/VMTests/vmIOandFlowOperations",
)
@pytest.mark.parametrize(
"test_file, check_gas_left",
[
("jump0_jumpdest0.json", True),
("jump0_jumpdest2.json", True),
("jumpAfterStop.json", True),
("jumpdestBigList.json", True),
("jumpTo1InstructionafterJump.json", True),
("jumpDynamicJumpSameDest.json", True),
("indirect_jump1.json", True),
("indirect_jump2.json", True),
("indirect_jump3.json", True),
("DynamicJump_value1.json", True),
("DynamicJump_value2.json", True),
("DynamicJump_value3.json", True),
("stackjump1.json", True),
("indirect_jump4.json", True),
("JDfromStorageDynamicJump0_jumpdest0.json", False),
("JDfromStorageDynamicJump0_jumpdest2.json", False),
("DynamicJump0_jumpdest0.json", True),
("DynamicJump0_jumpdest2.json", True),
("DynamicJumpAfterStop.json", True),
("DynamicJumpJD_DependsOnJumps1.json", True),
("DynamicJumpPathologicalTest0.json", True),
("DynamicJumpStartWithJumpDest.json", True),
("BlockNumberDynamicJump0_jumpdest0.json", True),
("BlockNumberDynamicJump0_jumpdest2.json", True),
("bad_indirect_jump1.json", True),
("bad_indirect_jump2.json", True),
("jump0_AfterJumpdest.json", True),
("jump0_AfterJumpdest3.json", True),
("jump0_outOfBoundary.json", True),
("jump0_withoutJumpdest.json", True),
("jump1.json", True),
("jumpHigh.json", True),
("jumpInsidePushWithJumpDest.json", True),
("jumpInsidePushWithoutJumpDest.json", True),
("jumpTo1InstructionafterJump_jumpdestFirstInstruction.json", True),
("jumpTo1InstructionafterJump_noJumpDest.json", True),
("jumpToUint64maxPlus1.json", True),
("jumpToUintmaxPlus1.json", True),
("JDfromStorageDynamicJump0_AfterJumpdest.json", True),
("JDfromStorageDynamicJump0_AfterJumpdest3.json", True),
("JDfromStorageDynamicJump0_withoutJumpdest.json", True),
("JDfromStorageDynamicJump1.json", True),
("JDfromStorageDynamicJumpInsidePushWithJumpDest.json", True),
("JDfromStorageDynamicJumpInsidePushWithoutJumpDest.json", True),
("DyanmicJump0_outOfBoundary.json", True),
("DynamicJump0_AfterJumpdest.json", True),
("DynamicJump0_AfterJumpdest3.json", True),
("DynamicJump0_withoutJumpdest.json", True),
("DynamicJump1.json", True),
("DynamicJumpInsidePushWithJumpDest.json", True),
("DynamicJumpInsidePushWithoutJumpDest.json", True),
("DynamicJumpJD_DependsOnJumps0.json", True),
("DynamicJumpPathologicalTest1.json", True),
("DynamicJumpPathologicalTest2.json", True),
("DynamicJumpPathologicalTest3.json", True),
("BlockNumberDynamicJump0_AfterJumpdest.json", True),
("BlockNumberDynamicJump0_AfterJumpdest3.json", True),
("BlockNumberDynamicJump0_withoutJumpdest.json", True),
("BlockNumberDynamicJump1.json", True),
("BlockNumberDynamicJumpInsidePushWithJumpDest.json", True),
("BlockNumberDynamicJumpInsidePushWithoutJumpDest.json", True),
("jump0_foreverOutOfGas.json", True),
("JDfromStorageDynamicJump0_foreverOutOfGas.json", True),
("DynamicJump0_foreverOutOfGas.json", True),
("BlockNumberDynamicJump0_foreverOutOfGas.json", True),
("jumpOntoJump.json", True),
("DynamicJump_valueUnderflow.json", True),
("stack_loop.json", True),
],
)
def test_jump(test_file: str, check_gas_left: bool) -> None:
run_control_flow_ops_vm_test(test_file, check_gas_left=check_gas_left)
@pytest.mark.parametrize(
"test_file, check_gas_left",
[
("jumpi1.json", True),
("jumpiAfterStop.json", True),
("jumpi_at_the_end.json", True),
("JDfromStorageDynamicJumpi1.json", False),
("JDfromStorageDynamicJumpiAfterStop.json", False),
("DynamicJumpi1.json", True),
("DynamicJumpiAfterStop.json", True),
("BlockNumberDynamicJumpi1.json", True),
("BlockNumberDynamicJumpiAfterStop.json", True),
("jumpi0.json", True),
("jumpi1_jumpdest.json", True),
("jumpifInsidePushWithJumpDest.json", True),
("jumpifInsidePushWithoutJumpDest.json", True),
("jumpiOutsideBoundary.json", True),
("jumpiToUint64maxPlus1.json", True),
("jumpiToUintmaxPlus1.json", True),
("JDfromStorageDynamicJumpi0.json", True),
("JDfromStorageDynamicJumpi1_jumpdest.json", True),
("JDfromStorageDynamicJumpifInsidePushWithJumpDest.json", True),
("JDfromStorageDynamicJumpifInsidePushWithoutJumpDest.json", True),
("JDfromStorageDynamicJumpiOutsideBoundary.json", True),
("DynamicJumpi0.json", True),
("DynamicJumpi1_jumpdest.json", True),
("DynamicJumpifInsidePushWithJumpDest.json", True),
("DynamicJumpifInsidePushWithoutJumpDest.json", True),
("DynamicJumpiOutsideBoundary.json", True),
("BlockNumberDynamicJumpi0.json", True),
("BlockNumberDynamicJumpi1_jumpdest.json", True),
("BlockNumberDynamicJumpifInsidePushWithJumpDest.json", True),
("BlockNumberDynamicJumpifInsidePushWithoutJumpDest.json", True),
("BlockNumberDynamicJumpiOutsideBoundary.json", True),
],
)
def test_jumpi(test_file: str, check_gas_left: bool) -> None:
run_control_flow_ops_vm_test(test_file, check_gas_left=check_gas_left)
@pytest.mark.parametrize(
"test_file",
[
"pc0.json",
"pc1.json",
],
)
def test_pc(test_file: str) -> None:
run_control_flow_ops_vm_test(test_file)
@pytest.mark.parametrize(
"test_file",
["gas0.json", "gas1.json", "gasOverFlow.json"],
)
def test_gas(test_file: str) -> None:
run_control_flow_ops_vm_test(test_file)
@pytest.mark.parametrize(
"test_file",
[
"for_loop1.json",
"for_loop2.json",
"loop_stacklimit_1020.json",
"loop_stacklimit_1021.json",
],
)
def test_loop(test_file: str) -> None:
run_control_flow_ops_vm_test(test_file)
def test_when() -> None:
run_control_flow_ops_vm_test("when.json")
@pytest.mark.parametrize(
"test_file",
[
"byte1.json",
"calldatacopyMemExp.json",
"codecopyMemExp.json",
"deadCode_1.json",
"dupAt51becameMload.json",
"swapAt52becameMstore.json",
"log1MemExp.json",
],
)
def test_miscellaneous(test_file: str) -> None:
run_control_flow_ops_vm_test(test_file)
|
import os
os.chdir("D:\\Programmes\\Boxes\\KaliLinux\\FCSC2020\\forensics\\CryptoLocker")
f = open("flag.txt.enc", "rb")
encoded = f.read()
f.close()
f = open("key.txt", "rb")
key = f.read()
f.close()
flag = ""
for j in range(len(key)):
flag = ""
for i in range(len(encoded)):
flag += chr((encoded[i] ^ key[(i+j)%len(key)]) % 256)
if "FCSC" in flag:
print(flag)
break |
from .interface import res
|
"""File storage support."""
from collections import defaultdict
import datetime
import os
import re
import constants as cn
from database import model
from PIL import Image, ExifTags
PHOTO_TYPE = ["jpg"]
def _from_exif_real(value):
"""Gets value from EXIF REAL type = tuple(numerator, denominator)"""
return value[0]/value[1]
def _from_GPS(tuple_tag, string_tag, pattern, default):
"""Gets information from GPS tuples and formats it according to pattern given
Args:
tuple_tag: tag in GPS_TAGS containing tuple of tuples to format, or None.
string_tag: tag in GPS_TAGS containing string information.
pattern: str, pattern in %-format to format incoming tuples.
default: returned if tuples is None,
Return:
str: formatted string or default if tuple is None
"""
if tuple_tag:
return (pattern % (tuple(_from_exif_real(l) for l in tuple_tag) +
(string_tag,)))
return default
class FileStorage(object):
"""Gets/Stores initial information from photo files.
Scans starting from root path recursively for all files defined as PHOTO_TYPE
Args:
path: str, root path to scan photo files
"""
def __init__(self, path):
if not os.path.isdir(path):
raise ValueError("{path} is not directory".format(path=path))
self.path = path
self._photos = []
self._photos_index = 0
def __getitem__(self, idx):
"""Gets photo item."""
return self._photos[idx]
def __iter__(self):
"""Iterating over photos list."""
return self
def __next__(self):
try:
result = self._photos[self._photos_index]
except IndexError:
raise StopIteration
self._photos_index += 1
return result
def __len__(self):
return len(self._photos)
def scan(self):
"""Scans file information to Photo instances."""
filters = [re.compile(flt) for flt in cn.FILE_PATTERN]
for (dirpath, dirnames, filenames) in os.walk(self.path):
for f in filenames:
if any([re.search(flt, f) for flt in filters]):
self._photos.append(self._scan_file(os.path.join(dirpath, f)))
def store(self, photo_file, new_path):
"""Stores photo file into path into storage.
Args:
photo_file: str, path to photo file
new_path: str, path in storage to copy photo_file
"""
os.copy(photo_file, os.path.join(self.path, new_path))
def _scan_file(self, filename):
"""Make a photo object from a file.
Args:
filename: str, file name
Returns:
Photo object
"""
img = Image.open(filename)
exif_real = defaultdict(str)
# Get EXIF tags
for k, v in img._getexif().items():
exif_real[ExifTags.TAGS.get(k, '')] = v
exif_GPS = gps_lat = gps_long = gps_altitude = gps_datetime = ''
# Get GPS tags if available
if 'GPSInfo' in exif_real:
exif_GPS = {ExifTags.GPSTAGS[k]: v
for k, v in exif_real['GPSInfo'].items()}
gps_lat = _from_GPS(exif_GPS[cn.EXIF_GPS_LATITUDE],
exif_GPS[cn.EXIF_GPS_LATITUDE_REF], "%d %d' %5.2f''%s" , "0 0' 0.0'N")
gps_long = _from_GPS(exif_GPS[cn.EXIF_GPS_LONGITUDE],
exif_GPS[cn.EXIF_GPS_LONGITUDE_REF], "%d %d' %5.2f''%s" , "0 0' 0.0'W")
gps_altitude = _from_exif_real(exif_GPS[cn.EXIF_GPS_ALTITUDE])
gps_datetime = datetime.datetime.strptime('%s %d:%d:%d' % ((
exif_GPS[cn.EXIF_GPS_DATE],) + tuple(
_from_exif_real(l) for l in exif_GPS[cn.EXIF_GPS_TIME])),
cn.EXIF_DATE_FORMAT)
# Creating a photo model
return model.Photo(
name=filename,
width=img.width,
height=img.height,
date_original=datetime.datetime.strptime(
exif_real[cn.EXIF_DATE_ORIGINAL], cn.EXIF_DATE_FORMAT),
aperture=_from_exif_real(exif_real[cn.EXIF_APERTURE]),
shutter=_from_exif_real(exif_real[cn.EXIF_SHUTTER]),
iso=exif_real[cn.EXIF_ISO],
metering_mode=cn.MeteringMode(exif_real[cn.EXIF_METERING_MODE]).name,
exposure_mode=cn.ExposureMode(exif_real[cn.EXIF_EXPOSURE_MODE]).name,
white_balance=cn.WhiteBalance(exif_real[cn.EXIF_WHITE_BALANCE]).name,
camera=' '.join([exif_real[cn.EXIF_CAMERA_MAKE],
exif_real[cn.EXIF_CAMERA_MODEL]]),
camera_id=exif_real[cn.EXIF_CAMERA_ID],
lens_type=exif_real[cn.EXIF_LENS_MODEL],
lens_serial=exif_real[cn.EXIF_LENS_SERIAL_NUMBER],
gps_lat=gps_lat,
gps_long=gps_long,
gps_alt=gps_altitude,
gps_datetime=gps_datetime,
comments='',
focal_length=_from_exif_real(exif_real[cn.EXIF_FOCAL_LENGTH]),
)
|
import logging
import random
import time
log = logging.getLogger(__name__)
class LoadTest(object):
def __init__(self, gun):
self.gun = gun
def case1(self, task):
with self.gun.measure(task):
log.info("Shoot case 1: %s", task.data)
time.sleep(random.random())
def case2(self, task):
with self.gun.measure(task) as m:
m.action = "PREPARE"
log.info("Prepare case 2: %s", task.data)
time.sleep(random.random())
with self.gun.measure(task) as m:
m.action = "SHOOT"
log.info("Shoot case 2: %s", task.data)
time.sleep(random.random())
raise RuntimeError()
def default(self, task):
with self.gun.measure(task):
log.info("Shoot default case: %s", task.data)
time.sleep(random.random())
def setup(self, param):
log.info("Setting up LoadTest: %s", param)
def teardown(self):
log.info("Tearing down LoadTest")
|
from collections import defaultdict
import six
from chainercv.chainer_experimental.datasets.sliceable.sliceable_dataset \
import _as_tuple
from chainercv.chainer_experimental.datasets.sliceable import SliceableDataset
class TupleDataset(SliceableDataset):
"""A sliceable version of :class:`chainer.datasets.TupleDataset`.
Here is an example.
>>> # omit keys
>>> dataset = TupleDataset([0, 1, 2], [0, 1, 4])
>>> dataset.keys # (None, None)
>>> dataset.slice[:, 0][:] # [0, 1, 2]
>>>
>>> dataset_more = TupleDataset(dataset, [0, 1, 8])
>>> dataset_more.keys # (None, None, None)
>>> dataset_more.slice[:, [1, 2]][:] # [(0, 0), (1, 1), (4, 8)]
>>>
>>> # specify the name of a key
>>> named_dataset = TupleDataset(('feat0', [0, 1, 2]), [0, 1, 4])
>>> named_dataset.keys # ('feat0', None)
>>> # slice takes both key and index (or their mixture)
>>> named_dataset.slice[:, ['feat0', 1]][:] # [(0, 0), (1, 1), (2, 4)]
Args:
datasets: The underlying datasets.
The following datasets are acceptable.
* An inheritance of \
:class:~chainer.datasets.sliceable.SliceableDataset`.
* A tuple of a name and a data array. \
The data array should be list or :class:`numpy.ndarray`.
* A data array. In this case, the name of key is :obj:`None`.
"""
def __init__(self, *datasets):
if len(datasets) == 0:
raise ValueError('At least one dataset is required')
self._len = None
self._keys = []
self._datasets = []
for dataset in datasets:
if isinstance(dataset, SliceableDataset):
self._datasets.append(dataset)
for key_index, key in enumerate(_as_tuple(dataset.keys)):
self._keys.append(
(key, len(self._datasets) - 1, key_index))
else:
if isinstance(dataset, tuple):
key, dataset = dataset
else:
key = None
self._datasets.append(dataset)
self._keys.append((key, len(self._datasets) - 1, None))
if self._len is None:
self._len = len(dataset)
if not len(dataset) == self._len:
raise ValueError(
'All datasets should have the same length')
def __len__(self):
return self._len
@property
def keys(self):
return tuple(key for key, _, _ in self._keys)
def get_example_by_keys(self, index, key_indices):
datasets_key_indices = defaultdict(set)
for key_index in key_indices:
_, dataset_index, key_index = self._keys[key_index]
if key_index is None:
datasets_key_indices[dataset_index] = None
else:
datasets_key_indices[dataset_index].add(key_index)
values = {}
for dataset_index, dataset_key_indices in \
six.iteritems(datasets_key_indices):
dataset = self._datasets[dataset_index]
if dataset_key_indices is None:
values[(dataset_index, None)] = dataset[index]
else:
dataset_key_indices = tuple(dataset_key_indices)
values.update(six.moves.zip(
((dataset_index, key_index)
for key_index in dataset_key_indices),
dataset.get_example_by_keys(index, dataset_key_indices)))
return tuple(
values[self._keys[key_index][1:]] for key_index in key_indices)
|
#Author: Nicolas Schapeler
#Github: https://github.com/nschapeler
#Problem Statement: https://www.hackerrank.com/challenges/matrix-script/problem
import re
first_multiple_input = input().rstrip().split()
n = int(first_multiple_input[0])
m = int(first_multiple_input[1])
matrix = []
for _ in range(n):
matrix_item = input()
matrix.append(matrix_item)
word = ""
for i in range(0,m):
for j in range(0,n):
word = word + matrix[j][i]
print(re.sub(r"(?<=\w)([^\w]+)(?=\w)", " ", word))
|
#!/usr/bin/env python
# MIT License
# (c) baltasar 2016
from google.appengine.ext import ndb
class Photo(ndb.Model):
added = ndb.DateProperty(auto_now_add=True)
title = ndb.StringProperty(required=True)
image = ndb.BlobProperty()
tags = ndb.StringProperty(repeated=True)
|
#!/usr/bin/env python3
import argparse
import genanki
import glob
import mistune
import os
import re
import sys
from HighlightRenderer import HighlightRenderer
markdown = mistune.create_markdown(renderer=HighlightRenderer())
cardSeparator = "---"
frontBackSeparator = "%"
md_file_ext = (".md", ".markdown")
media_file_ext = (".png", ".jpg", ".jpeg", ".mp3")
parser = argparse.ArgumentParser()
parser.add_argument("input", help="either *.md file or a folder containing *.md files.")
parser.add_argument("output", help="name of the *.apkg file that will be generated.")
parser.add_argument("--deckname", default=None, help="name of the generated deck. If not specified, the name of the input will be used as the deckname.")
args = parser.parse_args()
input = args.input
output = args.output
deckname = args.deckname if args.deckname else input
model = genanki.Model(
1234321,
"My Model",
fields = [{"name": "Question"}, {"name": "Answer"}],
templates = [
{
"name": "Card 1",
"qfmt": "{{Question}}",
"afmt": "{{FrontSide}}<hr id=\"answer\">{{Answer}}",
}
],
css=".card { font-family: Arial, \"Helvetica Neue\", Helvetica, sans-serif; font-size: 16px; color: black; background-color: white; } .highlight { font-size: 0.9em; font-family: Monaco, Consolas, \"Courier New\", monospace; } .highlight .hll { background-color: #ffffcc } .highlight { background: #ffffff; } .highlight .c { color: #999988; font-style: italic } /* Comment */ .highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */ .highlight .k { color: #000000; font-weight: bold } /* Keyword */ .highlight .o { color: #000000; font-weight: bold } /* Operator */ .highlight .ch { color: #999988; font-style: italic } /* Comment.Hashbang */ .highlight .cm { color: #999988; font-style: italic } /* Comment.Multiline */ .highlight .cp { color: #999999; font-weight: bold; font-style: italic } /* Comment.Preproc */ .highlight .cpf { color: #999988; font-style: italic } /* Comment.PreprocFile */ .highlight .c1 { color: #999988; font-style: italic } /* Comment.Single */ .highlight .cs { color: #999999; font-weight: bold; font-style: italic } /* Comment.Special */ .highlight .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ .highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ .highlight .gr { color: #aa0000 } /* Generic.Error */ .highlight .gh { color: #999999 } /* Generic.Heading */ .highlight .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ .highlight .go { color: #888888 } /* Generic.Output */ .highlight .gp { color: #555555 } /* Generic.Prompt */ .highlight .gs { font-weight: bold } /* Generic.Strong */ .highlight .gu { color: #aaaaaa } /* Generic.Subheading */ .highlight .gt { color: #aa0000 } /* Generic.Traceback */ .highlight .kc { color: #000000; font-weight: bold } /* Keyword.Constant */ .highlight .kd { color: #000000; font-weight: bold } /* Keyword.Declaration */ .highlight .kn { color: #000000; font-weight: bold } /* Keyword.Namespace */ .highlight .kp { color: #000000; font-weight: bold } /* Keyword.Pseudo */ .highlight .kr { color: #000000; font-weight: bold } /* Keyword.Reserved */ .highlight .kt { color: #445588; font-weight: bold } /* Keyword.Type */ .highlight .m { color: #009999 } /* Literal.Number */ .highlight .s { color: #dd1144 } /* Literal.String */ .highlight .na { color: #008080 } /* Name.Attribute */ .highlight .nb { color: #0086B3 } /* Name.Builtin */ .highlight .nc { color: #445588; font-weight: bold } /* Name.Class */ .highlight .no { color: #008080 } /* Name.Constant */ .highlight .nd { color: #3c5d5d; font-weight: bold } /* Name.Decorator */ .highlight .ni { color: #800080 } /* Name.Entity */ .highlight .ne { color: #990000; font-weight: bold } /* Name.Exception */ .highlight .nf { color: #990000; font-weight: bold } /* Name.Function */ .highlight .nl { color: #990000; font-weight: bold } /* Name.Label */ .highlight .nn { color: #555555 } /* Name.Namespace */ .highlight .nt { color: #000080 } /* Name.Tag */ .highlight .nv { color: #008080 } /* Name.Variable */ .highlight .ow { color: #000000; font-weight: bold } /* Operator.Word */ .highlight .w { color: #bbbbbb } /* Text.Whitespace */ .highlight .mb { color: #009999 } /* Literal.Number.Bin */ .highlight .mf { color: #009999 } /* Literal.Number.Float */ .highlight .mh { color: #009999 } /* Literal.Number.Hex */ .highlight .mi { color: #009999 } /* Literal.Number.Integer */ .highlight .mo { color: #009999 } /* Literal.Number.Oct */ .highlight .sb { color: #dd1144 } /* Literal.String.Backtick */ .highlight .sc { color: #dd1144 } /* Literal.String.Char */ .highlight .sd { color: #dd1144 } /* Literal.String.Doc */ .highlight .s2 { color: #dd1144 } /* Literal.String.Double */ .highlight .se { color: #dd1144 } /* Literal.String.Escape */ .highlight .sh { color: #dd1144 } /* Literal.String.Heredoc */ .highlight .si { color: #dd1144 } /* Literal.String.Interpol */ .highlight .sx { color: #dd1144 } /* Literal.String.Other */ .highlight .sr { color: #009926 } /* Literal.String.Regex */ .highlight .s1 { color: #dd1144 } /* Literal.String.Single */ .highlight .ss { color: #990073 } /* Literal.String.Symbol */ .highlight .bp { color: #999999 } /* Name.Builtin.Pseudo */ .highlight .vc { color: #008080 } /* Name.Variable.Class */ .highlight .vg { color: #008080 } /* Name.Variable.Global */ .highlight .vi { color: #008080 } /* Name.Variable.Instance */ .highlight .il { color: #009999 } /* Literal.Number.Integer.Long */)"
)
deck = genanki.Deck(123454321, deckname)
media_files = []
def processMarkdownFile(file):
fileContent = open(file).read()
cardStrings = re.split(cardSeparator, fileContent)
for cardString in cardStrings:
front, back = cardString.split(frontBackSeparator)
question = mistune.html(markdown(front.strip()))
answer = mistune.html(markdown(back.strip()))
note = genanki.Note(model=model, fields=[question,answer])
deck.add_note(note)
def processMediaFile(file):
media_files.append(file)
def processFile(file):
if file.endswith(md_file_ext):
processMarkdownFile(file)
elif file.lower().endswith(media_file_ext):
processMediaFile(file)
if os.path.isdir(input):
for file in os.listdir(input):
processFile(os.path.join(input,file))
else:
processFile(input)
package = genanki.Package(deck)
package.media_files = media_files
package.write_to_file(output)
|
from neighapp.models import Business, Neighbourhood, Post, Profile
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
email=forms.EmailField()
class Meta:
model = User
fields = ['username', 'email','password1', 'password2']
def save(self, commit=True):
user=super().save(commit=False)
user.email=self.cleaned_data['email']
if commit:
user.save()
return user
class profileForm(forms.ModelForm):
class Meta:
model = Profile
exclude = [ 'user', 'neighbourhood']
class userForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model= User
fields= ['username', 'email']
class NeighbourHoodForm(forms.ModelForm):
class Meta:
model = Neighbourhood
exclude = ('occupants_count', 'admin')
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = ('user', 'neighbourhood')
class BusinessForm(forms.ModelForm):
class Meta:
model = Business
exclude = ('user', 'neighbourhood') |
#system libraries
import pickle
def writepickle(data, filename, mode):
if mode == 'lambda':
import boto3
s3 = boto3.client('s3')
serializedListObject = pickle.dumps(data)
s3.put_object(Bucket='weather-lambda-myapp',Key=filename,Body=serializedListObject)
else:
#local
with open(filename, 'wb') as f:
# Pickle the 'data' dictionary using the highest protocol available.
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
def readpickle(filename, mode):
if mode == 'lambda':
import boto3
try:
s3 = boto3.client('s3')
object = s3.get_object(Bucket='weather-lambda-myapp',Key=filename)
serializedObject = object['Body'].read()
myList = pickle.loads(serializedObject)
return myList
except:
return None
else:
#local
try:
with open(filename, 'rb') as f:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f)
return data
except:
#logger.debug(f'file {filename} does not exist')
return None
|
import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import dates
from datetime import datetime,date
import pyowm
from pyowm import OWM
from matplotlib import rcParams
from pytz import timezone
from pyowm.utils import timestamps
weather_api='0833f103dc7c2924da06db624f74565c'
owm=OWM(weather_api)
def _max_width_():
max_width_str = f"max-width: 2000px;"
st.markdown(
f"""
<style>
.reportview-container .main .block-container{{
{max_width_str}
}}
</style>
""",
unsafe_allow_html=True,
)
_max_width_()
html_temp = """
<div style="background-color:black ;padding:10px">
<h1 style="color:green;text-align:center;">Weather Forecaster ❄️🌧️🌦️⛅🌞🌕 </h1>
</div>
"""
st.markdown(html_temp, unsafe_allow_html=True)
st.markdown("<h2 style='text-align: center; color: black;'>Made by Rohan Kumar</h2>",
unsafe_allow_html=True)
#html_temp = """
#<div style="background-color:black ;padding:10px">
# <h2 style="color:green;text-align:center;">MADE BY ROHAN KUMAR </h2>
#</div>
#"""
#st.markdown(html_temp, unsafe_allow_html=True)
st.write("### Follow the steps :")
place=st.text_input("ENTER NAME OF THE CITY :", "")
unit=st.selectbox("SELECT THE TEMPERATURE UNIT",("Celsius (°C)","Fahrenheit(°F)"))
g_type=st.selectbox("SELECT THE TYPE OF GRAPH",("Line Graph","Bar Graph"))
b=st.button("ENTER")
def plot_line(days, min_t, max_t):
days = dates.date2num(days)
rcParams['figure.figsize'] = 7, 4
plt.plot(days, max_t, color='green', linestyle='dashdot', linewidth=2, marker='o', markerfacecolor='red',
markersize=6)
plt.plot(days, min_t, color='red', linestyle='dashdot', linewidth=2, marker='o', markerfacecolor='green',
markersize=6)
plt.ylim(min(min_t) - 4, max(max_t) + 4)
plt.xticks(days)
x_y_axis = plt.gca()
xaxis_format = dates.DateFormatter('%d/%m')
x_y_axis.xaxis.set_major_formatter(xaxis_format)
plt.grid(True, color='white')
plt.legend(["Maximum Temperaure", "Minimum Temperature"], loc=1)
plt.xlabel('Dates(dd/mm)')
plt.ylabel('Temperature')
plt.title('5-Day Forecast')
for i in range(5):
plt.text(days[i], min_t[i] - 1.5, min_t[i],
horizontalalignment='center',
verticalalignment='bottom',
color='green')
for i in range(5):
plt.text(days[i], max_t[i] + 0.5, max_t[i],
horizontalalignment='center',
verticalalignment='bottom',
color='red')
# plt.show()
#plt.savefig('line.png')
st.pyplot()
plt.clf()
def plot_bars(days,min_t,max_t):
#print(days)
rcParams['figure.figsize']=6,4
days=dates.date2num(days)
#print(days)
min_temp_bar=plt.bar(days-0.2, min_t, width=0.3, color='yellow')
max_temp_bar=plt.bar(days+0.2, max_t, width=0.3, color='green')
plt.xticks(days)
x_y_axis=plt.gca()
xaxis_format=dates.DateFormatter('%d/%m')
x_y_axis.xaxis.set_major_formatter(xaxis_format)
plt.xlabel('Dates(dd/mm)')
plt.ylabel('Temperature')
plt.title('5-Day Forecast')
for bar_chart in [min_temp_bar,max_temp_bar]:
for index,bar in enumerate(bar_chart):
height=bar.get_height()
xpos=bar.get_x()+bar.get_width()/2.0
ypos=height
label_text=str(int(height))
plt.text(xpos, ypos,label_text,
horizontalalignment='center',
verticalalignment='bottom',
color='red')
st.pyplot()
plt.clf()
def find_min_max(place,unit,g_type):
mgr=owm.weather_manager()
days=[]
dates_2=[]
min_t=[]
max_t=[]
forecaster = mgr.forecast_at_place(place, '3h')
forecast = forecaster.forecast
if unit=='Celsius':
unit_c='celsius'
else:
unit_c='fahrenheit'
for weather in forecast:
day = datetime.utcfromtimestamp(weather.reference_time())
date = day.date()
if date not in dates_2:
dates_2.append(date)
min_t.append(None)
max_t.append(None)
days.append(date)
temperature = weather.temperature(unit_c)['temp']
if not min_t[-1] or temperature < min_t[-1]:
min_t[-1]=temperature
if not max_t[-1] or temperature > max_t[-1]:
max_t[-1]=temperature
#days = dates.date2num(days)
#plt.xticks(days)
#return days,min_t,max_t
#print(f"| Minimum Temperature in {unit_c} for {place} is |",min_t)
#print(f"| Maximum Temperature in {unit_c} for {place} is |",max_t)
if g_type=="Line Graph":
plot_line(days,min_t,max_t)
elif g_type=="Bar Graph":
plot_bars(days,min_t,max_t)
i=0
st.write(f"# Date : Max - Min ({unit})")
for obj in days:
d=(obj.strftime("%d/%m"))
st.write(f"### \v {d} :\t ({max_t[i]} - {min_t[i]})")
i+=1
obs=mgr.weather_at_place(place)
weather=obs.weather
st.title(f"Weather details in {place} currently:")
st.write(f"### Sky 🌈 : {weather.detailed_status}")
st.write(f"### Wind Speed 🌬️💨 : {weather.wind()['speed']} mph")
st.write(f"### Sunrise Time ☀️ : {weather.sunrise_time(timeformat='iso')} GMT")
st.write(f"### Sunset Time ⛅🌥️ : {weather.sunset_time(timeformat='iso')} GMT")
st.title("Expected Weather Changes:")
if forecaster.will_have_fog():
st.write("### FOG EXPECTED 🌫️")
if forecaster.will_have_rain():
st.write("### RAIN EXPECTED 🌦️")
if forecaster.will_have_storm():
st.write("### STORM MAYBE ⚡")
if forecaster.will_have_snow():
st.write("### SNOW EXPECTED ❄️☃️")
if forecaster.will_have_tornado():
st.write("### TORNADO ALERT 🌪️")
if forecaster.will_have_hurricane():
st.write("### HURRICANE ALERT ")
if forecaster.will_have_clouds():
st.write("### CLOUDY SKIES EXPECTED 🌤️⛅")
if forecaster.will_have_clear():
st.write("### YAY!! CLEAR WEATHER! 🌞")
if b:
if not place=="":
find_min_max(place,unit,g_type)
|
# RailFence Cipher using Python
# Author : wh0am1
# GitHub : https://github.com/wh0th3h3llam1
import math
import os
import platform
import random
import string
def encrypt(strng, rails):
strng = strng.replace(" ", "")
len_enc = math.ceil(len(strng) / rails)
w, h = len_enc, rails
encrypted = [[0 for x in range(w)] for y in range(h)]
count = 0
for i in range(len_enc):
for h in range(rails):
if count < len(strng):
encrypted[h][i] = strng[count]
count += 1
for _ in range(h, rails):
encrypted[h][i] = random.choice(string.ascii_lowercase)
print("Encrypted Message : ", end="")
for i in range(h+1):
for j in range(w):
print(encrypted[i][j], end="")
def decrypt(strng, rails):
strng = strng.replace(" ", "")
len_dec = int(len(strng) / rails)
w, h = len_dec, rails
decrypted = [[0 for x in range(w)] for y in range(h)]
count = 0
for i in range(rails):
for j in range(len_dec):
if count < len(strng):
decrypted[i][j] = strng[count]
count += 1
print("Decrypted Message : ", end="")
for i in range(w):
for j in range(h):
print(decrypted[j][i], end="")
def rail_fence():
while True:
if platform.system() == 'Windows':
os.system('cls')
else:
os.system('clear')
print("****************************************************")
print("\t\tRAILFENCE CIPHER")
print("****************************************************")
print("1. Encrypt")
print("2. Decrypt")
print("0. Exit")
print("Enter Your Choice : ", end="")
choice = int(input())
print("----------------------------------------------------")
if choice == 1:
strng = input("Enter String to Encrypt : ")
rails = int(input("Enter No. of Rails : "))
strng = strng.lower()
print("----------------------------------------------------")
encrypt(strng, rails)
elif choice == 2:
strng = input("Enter String to Decrypt : ")
rails = int(input("Enter No. of Rails : "))
print("----------------------------------------------------")
decrypt(strng, rails)
elif choice == 0:
print("BYE...")
print("----------------------------------------------------")
exit(0)
else:
# print("Invalid Choice, Try Again.")
pass
x = input()
if __name__ == '__main__':
rail_fence()
|
from __future__ import annotations
from .core import ECSBase
# We start with a simple manager like entity, i.e. it does not encapsulate creation/ factory
# TODO: finalize
# TODO: adapt to asset management like graphics, and audio
# TODO: make this a singleton
# https://python-3-patterns-idioms-test.readthedocs.io/en/latest/Singleton.html?highlight=singleton
class ECSManager(ECSBase):
"""A manager is an entity that is responsible for resource loading, including assets like graphics or sound effects.
The manager follows an operation model whereby assets are:
- first registered, this makes them known to the managing entity
- loaded or acquired, i.e. this loads the asset into the memory space managed by the entity manager
- requested, i.e. delivered to the consumer
"""
def __init__(self, **kwargs):
super(ECSManager, self).__init__(**kwargs)
self._o_map = {}
self._on_asset_acquired = None
self._on_asset_released = None
def register(self, asset_descriptor) -> ECSManager:
pass
def request(self, resource_id: str):
raise ValueError("Not implemented")
def acquire(self, asset_descriptor: list = None):
raise ValueError("Not implemented")
def release(self, resource_ids: list = None):
raise ValueError("Not implemented")
@property
def on_asset_acquired(self):
return self._on_asset_acquired
@on_asset_acquired.setter
def on_asset_acquired(self, v) -> ECSManager:
self._on_asset_acquired = v
return self
@property
def on_asset_released(self):
return self._on_asset_released
@on_asset_released.setter
def on_asset_released(self, v) -> ECSManager:
self._on_asset_released = v
return self
|
# RUN: test-lexer.sh %s
# NOTE: "import" isn't a proper keyword yet.
import module1 # LEXER-LABEL: >0 import module1
import module2 # LEXER-NEXT: >0 import module2
global_y = 1 # LEXER-NEXT: >0 global_y = 1n
def my_func(x): # LEXER-NEXT: >0 >def my_func ( x ) :
if x == 1: # LEXER-NEXT: >4 >if x == 1n :
return global_y + x # LEXER-NEXT: >8 >return global_y + x
else: # LEXER-NEXT: >4 >else :
return 2 # LEXER-NEXT: >8 >return 2n
if __name__ == '__main__': # LEXER-NEXT: >0 >if __name__ == '__main__' :
print(global_y) # LEXER-NEXT: >4 print ( global_y )
# LEXER-NEXT: >EOF
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
def get_extractor(url):
if re.search('xingyan.panda', url):
from .import xingyan as s
else:
from .import panda as s
return s.site
|
file_ = '$DATA_FILE'
log_file = '$LOG_FILE'
import sys
import os
from PyQt5.QtWidgets import (QLineEdit, QApplication, QInputDialog, QMessageBox)
import base64
from cryptography.fernet import Fernet, InvalidToken
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
import logging
logger = logging.getLogger()
pwd_attempts = 0
def configure_logger():
os.mkdir(os.path.dirname(log_file)) if not os.path.exists(os.path.dirname(log_file)) else None
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(log_file)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info("SESSION INITIALISED FOR: {}".format(file_))
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
base_path = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
return os.path.join(base_path, relative_path)
def getPassword():
pwd, ok = QInputDialog.getText(None, "Win-Lock", "Enter your win-lock password",
QLineEdit.Password)
if not ok:
logger.info("User Canceled password input")
sys.exit(1)
return pwd
def init():
app = QApplication(sys.argv)
config = {
'user': 'test_user',
'password': None,
'log_level': 'debug',
#'database': r'D:\cambo\Docs\Projects\win-lock\prototype\tests\test_data\test_database.db'
}
configure_logger()
main(app, config)
def main(app, config):
config['password'] = getPassword()
try:
manager = CryptManager(config, b'salty_boy')
with open(resource_path(file_), 'rb') as f:
encrypted_bytes = f.read()
logging.info("Manager initialised")
try:
decrypted_data = manager.decrypt_bytes(encrypted_bytes)
except InvalidToken as e:
global pwd_attempts
pwd_attempts += 1
logger.info("User entered Incorrect Password... {} Attempts".format(pwd_attempts))
QMessageBox.question(None,'Win-Lock',"Incorrect Password",QMessageBox.Ok)
return main(app, config)
logger.info("Decrypted {}".format(file_))
out_file = file_.replace('.wl','')
with open(os.path.join(os.getcwd(),out_file), 'wb') as f:
f.write(decrypted_data)
logger.info("Wrote {}".format(os.path.abspath(out_file)))
logger.info("Done")
except Exception as e:
logger.exception("FAILED -- {}".format(e))
class CryptManager(object):
def __init__(self, config, salt=b'Default Salt'):
self.salt = salt
self.kdf = self._get_kdf(self.salt)
self.key = self._get_key(config.pop('password'))
def encrypt_file(self, file_path):
with open(file_path, 'rb') as f:
file_bytes = f.read()
encrypted_bytes = self.encrypt_bytes(file_bytes)
with open(file_path + '.wl', 'wb') as f:
f.write(encrypted_bytes)
def encrypt_bytes(self, data):
return Fernet(self.key).encrypt(data)
def decrypt_bytes(self, data):
return Fernet(self.key).decrypt(data)
def _get_kdf(self, salt):
return PBKDF2HMAC(
algorithm=hashes.SHA3_256(),
length=32,
salt=salt,
iterations=1000,
backend=default_backend()
)
def _get_key(self, password):
password = password.encode() if not isinstance(password, bytes) else password
return base64.urlsafe_b64encode(self.kdf.derive(password))
if __name__ == '__main__':
init()
|
from __future__ import absolute_import, division, print_function
from six.moves import range
def exercise_specific():
from libtbx.topological_sort import stable
from libtbx.topological_sort import strongly_connected_components as scc
connections = [
("a", ["b", "d"]),
("b", []),
("c", []),
("d", ["c"])]
node_list = stable(connections=connections)
assert node_list == ["b", "c", "d", "a"]
assert scc(dict(connections)) == []
assert scc(dict(connections), omit_single_node_components=False) \
== [("b",), ("c",), ("d",), ("a",)]
connections = [
("a", ["d", "b"]),
("b", []),
("c", []),
("d", ["c"])]
node_list = stable(connections=connections)
assert node_list == ["b", "c", "d", "a"]
assert scc(dict(connections)) == []
connections = [
(0, [1]),
(1, [2]),
(2, [1,3]),
(3, [3])]
node_list = stable(connections=connections)
assert node_list == [3,2,1,0]
assert scc(dict(connections)) == [(1, 2)]
connections = [
("a", ["d", "b", "e"]),
("b", []),
("c", []),
("d", ["c"])]
node_list = stable(connections=connections)
assert node_list == ["b", "c", "d", "e", "a"]
assert scc(dict(connections)) == []
#
assert len(scc(
successors_by_node={
"node1": ["successor1", "successor2"],
"node2": ["successor1", "successor3"]},
omit_single_node_components=False)) == 5
def exercise_random(rng, n_nodes):
# meant to discover endless loops or similar bugs
connections = []
for i_node in range(n_nodes):
if (rng.randrange(10) > 7):
continue
n_del = max(int(n_nodes*0.6), rng.randrange(n_nodes))
deps = list(range(n_nodes))
for i_del in range(n_del):
i = rng.randrange(len(deps))
del deps[i]
connections.append((i_node, deps))
from libtbx.topological_sort import stable
stable(connections=connections)
#
from libtbx.topological_sort import strongly_connected_components as scc
from libtbx.topological_sort import find_path
sbn = dict(connections)
components = scc(successors_by_node=sbn)
for component in components:
for a in component:
for b in component:
path = find_path(successors_by_node=sbn, from_node=a, to_node=b)
assert path is not None
def run(args):
assert len(args) == 0
exercise_specific()
import random
random.seed(0)
for i_trial in range(10):
exercise_random(rng=random, n_nodes=10)
print("OK")
if (__name__ == "__main__"):
import sys
run(args=sys.argv[1:])
|
from dotenv import load_dotenv
from datetime import datetime
import os
from pathlib import Path
from tkinter import Tk, Canvas, Entry, Text, Button, PhotoImage
# Import namespaces
import azure.cognitiveservices.speech as speech_sdk
global targetLanguage
targetLanguage = ''
def gui():
# from tkinter import *
# Explicit imports to satisfy Flake8
OUTPUT_PATH = Path(__file__).parent
ASSETS_PATH = OUTPUT_PATH / Path("./assets")
def relative_to_assets(path: str) -> Path:
return ASSETS_PATH / Path(path)
window = Tk()
window.geometry("843x491")
window.configure(bg="#FE724D")
canvas = Canvas(
window,
bg="#FE724D",
height=491,
width=843,
bd=0,
highlightthickness=0,
relief="ridge"
)
canvas.place(x=0, y=0)
button_image_1 = PhotoImage(
file=relative_to_assets("button_1.png"))
button_1 = Button(
image=button_image_1,
borderwidth=0,
highlightthickness=0,
command=lambda: makeFR(targetLanguage),
relief="flat"
)
button_1.place(
x=591.6849365234375,
y=53.99999999999994,
width=214.3150634765625,
height=186.14793395996094
)
button_image_2 = PhotoImage(
file=relative_to_assets("button_2.png"))
button_2 = Button(
image=button_image_2,
borderwidth=0,
highlightthickness=0,
command=lambda: makeES(targetLanguage),
relief="flat"
)
button_2.place(
x=359.0,
y=53.99999999999994,
width=214.3150634765625,
height=186.14793395996094
)
button_image_3 = PhotoImage(
file=relative_to_assets("button_3.png"))
button_3 = Button(
image=button_image_3,
borderwidth=0,
highlightthickness=0,
command=lambda: makeHI(targetLanguage),
relief="flat"
)
button_3.place(
x=359.0,
y=257.2931518554687,
width=214.3150634765625,
height=186.14793395996094
)
button_image_4=PhotoImage(
file=relative_to_assets("button_4.png"))
button_4=Button(
image=button_image_4,
borderwidth=0,
highlightthickness=0,
command=lambda: makeDE(targetLanguage),
relief="flat"
)
button_4.place(
x=591.6849365234375,
y=257.2931518554687,
width=214.3150634765625,
height=186.14793395996094
)
canvas.create_rectangle(
37.0,
40.99999999999994,
306.0,
442.99999999999994,
fill="#FFFFFF",
outline="")
button_image_5=PhotoImage(
file=relative_to_assets("button_5.png"))
button_5=Button(
image=button_image_5,
borderwidth=0,
highlightthickness=0,
command=lambda: makeqt(targetLanguage),
relief="flat"
)
button_5.place(
x=57.0,
y=383.99999999999994,
width=230.0,
height=39.0
)
canvas.create_text(
54.0,
70.99999999999994,
anchor="nw",
text="Captraduire",
fill="#8A00BB",
font=("Comfortaa Regular", 34 * -1)
)
canvas.create_text(
60.0,
360.99999999999994,
anchor="nw",
text=" to End the program, click below\n",
fill="#000000",
font=("Comfortaa Regular", 13 * -1)
)
canvas.create_text(
58.0,
207.99999999999994,
anchor="nw",
text="Instructions :",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
61.0,
217.99999999999994,
anchor="nw",
text="",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
269.99999999999994,
anchor="nw",
text="∘ Speak in English",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
324.99999999999994,
anchor="nw",
text="∘ You're good to go",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
235.99999999999994,
anchor="nw",
text="∘ Click on the desired",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
249.99999999999994,
anchor="nw",
text=" language button",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
290.99999999999994,
anchor="nw",
text="∘ Wait for the program to",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
75.0,
305.99999999999994,
anchor="nw",
text=" translate",
fill="#6D6D6D",
font=("FontAwesome5Brands Regular", 15 * -1)
)
canvas.create_text(
60.0,
125.99999999999994,
anchor="nw",
text="Welcome to Captraduire,",
fill="#FE724D",
font=("Comfortaa Regular", 18 * -1)
)
canvas.create_text(
60.0,
146.99999999999994,
anchor="nw",
text="your on the go",
fill="#FE724D",
font=("Comfortaa Regular", 18 * -1)
)
canvas.create_text(
60.0,
167.99999999999994,
anchor="nw",
text="translator.",
fill="#FE724D",
font=("Comfortaa Regular", 18 * -1)
)
window.resizable(False, False)
window.mainloop()
def main(targetLanguage):
try:
global speech_config
global translation_config
# Get Configuration Settings
load_dotenv()
cog_key=os.getenv('COG_SERVICE_KEY')
cog_region=os.getenv('COG_SERVICE_REGION')
# Configure translation
translation_config=speech_sdk.translation.SpeechTranslationConfig(
cog_key, cog_region)
translation_config.speech_recognition_language='en-US'
translation_config.add_target_language('fr')
translation_config.add_target_language('es')
translation_config.add_target_language('hi')
translation_config.add_target_language('de')
print('ready to translate from',
translation_config.speech_recognition_language)
# Configure speech
speech_config=speech_sdk.SpeechConfig(cog_key, cog_region)
# Get user input
targetLanguag=''
while targetLanguag != 'quit':
targetLanguag=gui()
except Exception as ex:
print(ex)
def makeFR(targetLanguage):
targetLanguage="fr"
Translate(targetLanguage)
return targetLanguage
def makeES(targetLanguage):
targetLanguage="es"
Translate(targetLanguage)
return targetLanguage
def makeHI(targetLanguage):
targetLanguage="hi"
Translate(targetLanguage)
return targetLanguage
def makeDE(targetLanguage):
targetLanguage="de"
Translate(targetLanguage)
return targetLanguage
def makeqt(targetLanguage):
return exit()
def Translate(targetLanguage):
translation=''
# Translate speech
audio_config=speech_sdk.AudioConfig(use_default_microphone=True)
translator=speech_sdk.translation.TranslationRecognizer(
translation_config, audio_config)
print("Speak now...")
result=translator.recognize_once_async().get()
print(f'Translating {result.text}')
translation=result.translations[targetLanguage]
print(translation)
# Synthesize translation
voices={
"fr": "fr-FR-DeniseNeural",
"es": "es-ES-AlvaroNeural",
"hi": "hi-IN-MadhurNeural",
"de": "de-DE-KatjaNeural"
}
speech_config.speech_synthesis_voice_name=voices.get(targetLanguage)
speech_synthesizer=speech_sdk.SpeechSynthesizer(speech_config)
speak=speech_synthesizer.speak_text_async(translation).get()
if speak.reason != speech_sdk.ResultReason.SynthesizingAudioCompleted:
print(speak.reason)
if __name__ == "__main__":
main(targetLanguage)
|
class Transaction:
def __init__(self, db):
self.db = db
def __enter__(self):
self.db.__enter__()
self.cursor = self.db.cursor()
self.cursor.__enter__()
return self.cursor
def __exit__(self, type, value, traceback):
self.cursor.__exit__(type, value, traceback)
self.db.__exit__(type, value, traceback)
|
# Generated by sila2.code_generator; sila2.__version__: 0.8.0
from __future__ import annotations
from typing import Any, NamedTuple
class MoveToHomePosition_Responses(NamedTuple):
pass
class StopMoving_Responses(NamedTuple):
pass
class MoveToPosition_Responses(NamedTuple):
pass
Position = Any
|
"""Gaussian process utlilities for gpytorch."""
import torch
import hessian
import gpytorch
import numpy as np
from torch.nn import ModuleList
from gpytorch.distributions import MultivariateNormal
from safe_exploration.state_space_models import StateSpaceModel
from .utilities import compute_jacobian
__all__ = ['BatchMean', 'BatchKernel', 'LinearMean', 'MultiOutputGP', 'GPyTorchSSM']
class BatchMean(gpytorch.means.Mean):
"""Combine different mean functions across batches.
Parameters
----------
base_means : list
List of mean functions used for each batch.
"""
def __init__(self, base_means):
super(BatchMean, self).__init__()
self.base_means = ModuleList(base_means)
@property
def batch_size(self):
"""Return the batch_size of the underlying model."""
return len(self.base_kernels)
def __getitem__(self, item):
"""Retrieve the ith mean."""
return self.base_means[item]
def __iter__(self):
"""Iterate over the means."""
yield from self.base_means
def forward(self, input):
"""Evaluate the mean functions and combine to a `b x len(input[0])` matrix."""
return torch.stack([mean(x) for x, mean in zip(input, self.base_means)])
class BatchKernel(gpytorch.kernels.Kernel):
"""Combine different covariance functions across batches.
Parameters
----------
base_kernels : list
List of base kernels used for each batch.
"""
def __init__(self, base_kernels):
super(BatchKernel, self).__init__(batch_size=len(base_kernels))
self.base_kernels = ModuleList(base_kernels)
def __getitem__(self, item):
"""Retrieve the ith kernel."""
return self.base_kernels[item]
def __iter__(self):
"""Iterate over the kernels."""
yield from self.base_kernels
def forward(self, x1, x2, diag=False, batch_dims=None, **params):
"""Evaluate the kernel functions and combine them."""
kernels = [kernel.forward(x1[i], x2[i], **params).squeeze(0)
for i, kernel in enumerate(self.base_kernels)]
if diag:
kernels = [kernel.diag() for kernel in kernels]
return torch.stack(kernels)
def size(self, x1, x2):
"""Return the size of the resulting covariance matrix."""
non_batch_size = (x1.size(-2), x2.size(-2))
return torch.Size((x1.size(0),) + non_batch_size)
class LinearMean(gpytorch.means.Mean):
"""A linear mean function.
If the matrix has more than one rows, the mean will be applied in batch-mode.
Parameters
----------
matrix : torch.tensor
A 2d matrix. For each feature vector x in (d, 1) the output is `A @ x`.
trainable : bool, optional
Whether the mean matrix should be trainable as a parameter.
prior : optional
The gpytorch prior for the parameter. Ignored if trainable is False.
"""
def __init__(self, matrix, trainable=False, prior=None):
super().__init__()
if trainable:
self.register_parameter(name='matrix',
parameter=torch.nn.Parameter(matrix))
if prior is not None:
self.register_prior('matrix_prior', prior, 'matrix')
else:
self.matrix = matrix
@property
def batch_size(self):
return self.matrix.size(0)
def forward(self, x):
"""Compute the linear product."""
return torch.einsum('ij,ilj->il', self.matrix, x)
class WrappedNormal(object):
"""A wrapper around gpytorch.NormalDistribution that doesn't squeeze empty dims."""
def __init__(self, normal):
super().__init__()
self.normal = normal
def __getattr__(self, key):
"""Unsqueeze empty dimensions."""
res = getattr(self.normal, key)
batch_shape = self.normal.batch_shape
if not batch_shape and key in ('mean', 'variance', 'covariance_matrix'):
res = res.unsqueeze(0)
return res
class MultiOutputGP(gpytorch.models.ExactGP):
"""A GP model that uses the gpytorch batch mode for multi-output predictions.
The main difference to simple batch mode, is that the model assumes that all GPs
use the same input data. Moreover, even for single-input data it outputs predictions
together with a singular dimension for the batchsize.
Parameters
----------
train_x : torch.tensor
A (n x d) tensor with n data points of d dimensions each.
train_y : torch.tensor
A (n x o) tensor with n data points across o output dimensions.
kernel : gpytorch.kernels.Kernel
A kernel with appropriate batchsize. See `BatchKernel`.
likelihood : gpytorch.likelihoods.Likelihood
A GP likelihood with appropriate batchsize.
mean : gpytorch.means.Mean, optional
The mean function with appropriate batchsize. See `BatchMean`. Defaults to
`gpytorch.means.ZeroMean()`.
"""
def __init__(self, train_x, train_y, kernel, likelihood, mean=None):
if mean is None:
mean = gpytorch.means.ZeroMean()
if train_y.dim() > 1:
# Try to remove the first data row if it's empty
train_y = train_y.squeeze(0)
if train_y.dim() > 1:
train_x = train_x.expand(len(train_y), *train_x.shape)
super(MultiOutputGP, self).__init__(train_x, train_y, likelihood)
self.mean = mean
self.kernel = kernel
@property
def batch_size(self):
"""Return the batch size of the model."""
return self.kernel.batch_size
def set_train_data(self, inputs=None, targets=None, strict=True):
"""Set the GP training data."""
raise NotImplementedError('TODO')
def loss(self, mml):
"""Return the negative log-likelihood of the model.
Parameters
----------
mml : marginal log likelihood
"""
output = super().__call__(*self.train_inputs)
return -mml(output, self.train_targets).sum()
def __call__(self, *args, **kwargs):
"""Evaluate the underlying batch_mode model."""
if self.batch_size > 1:
args = [arg.unsqueeze(-1) if arg.ndimension() == 1 else arg for arg in args]
# Expand input arguments across batches
args = list(map(lambda x: x.expand(self.batch_size, *x.shape), args))
normal = super().__call__(*args, **kwargs)
if self.batch_size > 1:
return normal
else:
return WrappedNormal(normal)
def forward(self, x):
"""Compute the resulting batch-distribution."""
return MultivariateNormal(self.mean(x), self.kernel(x))
class GPyTorchSSM(StateSpaceModel):
""" A Gaussian process state space model based on GPytorch.
We approximate the function x_{t+1} = f(x_t, u_t) with x in (1 x n) and u in (1 x m)
based on noisy observations of f.
"""
def __init__(self, num_states, num_actions, train_x, train_y, kernel, likelihood, mean=None):
""" """
# check compatability of the parameters required for super classes
assert np.shape(train_x)[1] == num_states + num_actions, "Input needs to have dimensions N x(n + m)"
assert np.shape(train_y)[0] == num_states, "Input needs to have dimensions N x n"
self.pytorch_gp = MultiOutputGP(train_x, train_y, kernel, likelihood, mean)
self.pytorch_gp.eval()
super(GPyTorchSSM, self).__init__(num_states, num_actions, True, True)
def _compute_hessian_mean(self, states, actions):
""" Generate the hessian of the mean prediction
Parameters
----------
states : np.ndarray
A (1 x n) array of states.
actions : np.ndarray
A (1 x m) array of actions.
Returns
-------
hess_mean:
"""
inp = torch.cat((torch.from_numpy(np.array(states, dtype=np.float32)), torch.from_numpy(np.array(actions, dtype=np.float32))), dim=1)
inp.requires_grad = True
n_in = self.num_states + self.num_actions
hess_mean = torch.empty(self.num_states, n_in, n_in)
for i in range(self.num_states): # unfortunately hessian only works for scalar outputs
hess_mean[i, :, :] = hessian.hessian(self.pytorch_gp(inp).mean[i, 0], inp)
return hess_mean.numpy()
def _predict(self, states, actions, jacobians=False, full_cov=False):
"""Predict the next states and uncertainty.
Parameters
----------
states : torch.tensor
A (N x n) tensor of states.
actions : torch.tensor
A (N x m) tensor of actions.
jacobians : bool, optional
If true, return two additional outputs corresponding to the jacobians.
full_cov : bool, optional
Whether to return the full covariance.
Returns
-------
mean : torch.tensor
A (N x n) mean prediction for the next states.
variance : torch.tensor
A (N x n) variance prediction for the next states. If full_cov is True,
then instead returns the (n x N x N) covariance matrix for each independent
output of the GP model.
jacobian_mean : torch.tensor
A (N x n x n + m) tensor with the jacobians for each datapoint on the axes.
jacobian_variance : torch.tensor
Only supported without the full_cov flag.
"""
if full_cov:
raise NotImplementedError("Not implemented right now.")
inp = torch.cat((states, actions), dim=1)
inp.requires_grad = True
self.inp = inp
pred = self.pytorch_gp(inp)
pred_mean = pred.mean
pred_var = pred.variance
if jacobians:
jac_mean = compute_jacobian(pred_mean, inp).squeeze()
jac_var = compute_jacobian(pred_var, inp).squeeze()
return pred_mean, pred_var, jac_mean, jac_var
else:
self._forward_cache = torch.cat((pred_mean, pred_var))
return pred_mean, pred_var
def predict(self, states, actions, jacobians=False, full_cov=False):
"""Predict the next states and uncertainty.
Parameters
----------
states : np.ndarray
A (N x n) array of states.
actions : np.ndarray
A (N x m) array of actions.
jacobians : bool, optional
If true, return two additional outputs corresponding to the jacobians.
full_cov : bool, optional
Whether to return the full covariance.
Returns
-------
mean : np.ndarray
A (N x n) mean prediction for the next states.
variance : np.ndarray
A (N x n) variance prediction for the next states. If full_cov is True,
then instead returns the (n x N x N) covariance matrix for each independent
output of the GP model.
jacobian_mean : np.ndarray
A (N x n x n + m) array with the jacobians for each datapoint on the axes.
jacobian_variance : np.ndarray
Only supported without the full_cov flag.
"""
out = self._predict(torch.from_numpy(np.array(states, dtype=np.float32)),
torch.from_numpy(np.array(actions, dtype=np.float32)),
jacobians,
full_cov)
return tuple([var.detach().numpy() for var in out])
def linearize_predict(self, states, actions, jacobians=False, full_cov=False):
"""Predict the next states and uncertainty.
Parameters
----------
states : np.ndarray
A (N x n) array of states.
actions : np.ndarray
A (N x m) array of actions.
jacobians : bool, optional
If true, return two additional outputs corresponding to the jacobians of the predictive
mean, the linearized predictive mean and variance.
full_cov : bool, optional
Whether to return the full covariance.
Returns
-------
mean : np.ndarray
A (N x n) mean prediction for the next states.
variance : np.ndarray
A (N x n) variance prediction for the next states. If full_cov is True,
then instead returns the (n x N x N) covariance matrix for each independent
output of the GP model.
jacobian_mean : np.ndarray
A (N x n x (n + m) array with the jacobians for each datapoint on the axes.
jacobian_variance : np.ndarray
Only supported without the full_cov flag.
hessian_mean: np.ndarray
A (N x n*(n+m) x (n+m)) Array with the derivatives of each entry in the jacobian for each input
"""
N, n = np.shape(states)
if jacobians and N > 1:
raise NotImplementedError("""'linearize_predict' currently only allows for single
inputs, i.e. (1 x n) arrays, when computing jacobians.""")
out = self._predict(torch.from_numpy(np.array(states, dtype=np.float32)),
torch.from_numpy(np.array(actions, dtype=np.float32)),
True,
full_cov)
jac_mean = out[2]
self._linearize_forward_cache = torch.cat((out[0], out[1], jac_mean.view(-1, 1)))
out = [var.detach().numpy() for var in out]
if jacobians:
hess_mean = self._compute_hessian_mean(states, actions)
return out[0], out[1], out[2], out[3], hess_mean
else:
return out[0], out[1], out[2]
def get_reverse(self, seed):
""" """
self._forward_cache.backward(torch.from_numpy(seed), retain_graph=True)
inp_grad = self.inp.grad
grad_state = inp_grad[0, :self.num_states]
grad_action = inp_grad[0, self.num_states:]
return grad_state.detach().numpy(), grad_action.detach().numpy()
def get_linearize_reverse(self, seed):
""" """
self._linearize_forward_cache.backward(torch.from_numpy(seed), retain_graph=True)
inp_grad = self.inp.grad
grad_state = inp_grad[0, :self.num_states].detach().numpy()[:, None]
grad_action = inp_grad[0, self.num_states:].detach().numpy()[:, None]
return grad_state, grad_action
|
import os
from core.database import Database
from core.seed import create_tables
from telegram.ext import Updater, MessageHandler, CommandHandler, CallbackQueryHandler, Filters
from barcode import Code128
from barcode.writer import ImageWriter
from dotenv import load_dotenv
from core.clients import Clients, Client
from core.items import Items, Item
from core.purchases import Purchases
class CoasterBotHandler:
def __init__(self, database, admin_telegram_id):
self.database = database
self.admin_telegram_id = admin_telegram_id
self.clients = Clients(self.database)
self.items = Items(self.database)
self.purchases = Purchases(self.database)
def is_admin(self, update, context):
if int(update.message.from_user.id) != int(self.admin_telegram_id):
context.bot.send_message(chat_id=update.effective_chat.id,
text="Only admin is allowed to perform this action.")
return False
return True
def add_product(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 5:
context.bot.send_message(chat_id=update.effective_chat.id,
text="format: /add_product product description EAN_code price")
return
product_name = command_split[1].strip()
product_description = command_split[2].strip()
product_ean = command_split[3].strip()
product_price = command_split[4].strip()
product = Item.create(product_name, product_description, product_ean, product_price)
self.items.persist(product)
context.bot.send_message(chat_id=update.effective_chat.id,
text="The product {} is added.".format(product_name))
def remove_product(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 2:
context.bot.send_message(chat_id=update.effective_chat.id,
text="format: /remove_product productname")
return
product_name = command_split[1].strip()
product = self.items.get_by_item_name(product_name)
if not product:
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"The product with name {product_name} is not found.")
self.items.remove(product.item_id)
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"The product {product_name} is removed.")
def change_price(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 3:
context.bot.send_message(chat_id=update.effective_chat.id,
text="format: /change_price productname new_price")
return
product_name = command_split[1].strip()
product_new_price = command_split[2].strip()
product = self.items.get_by_item_name(product_name)
product.price = product_new_price
self.items.persist(product)
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"The product {product_name}'s new price is {product_new_price}.")
def get_balance(self, update, context):
telegram_id = update.message.from_user.id
client = self.clients.get_by_telegram_id(telegram_id)
if client:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Hello, {}, your balance is: {}".format(client.nickname,
round(client.balance, 2)))
else:
context.bot.send_message(chat_id=update.effective_chat.id,
text="No client with telegram id {} can be found".format(telegram_id))
def reset_balance(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 2:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /reset_balance nickname")
return
client_name = command_split[1]
client = self.clients.get_by_nickname(client_name)
if client is not None:
client.balance = 0
self.clients.persist(client)
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"Balance of {client_name} is reset.")
else:
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"No client with name {client_name}.")
def create_client(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 3:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /create_client nickname telegram_id")
return
client_name = command_split[1]
client_telegram_id = command_split[2]
new_client = Client.create(client_name, client_telegram_id)
self.clients.persist(new_client)
context.bot.send_message(chat_id=update.effective_chat.id,
text="Hello, {}! Have a drink, have a snack, it's all on you tonight!".format(
new_client.nickname))
def delete_client(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 2:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /delete_client nickname")
return
client_name = command_split[1]
client = self.clients.get_by_nickname(client_name)
if not client:
context.bot.send_message(chat_id=update.effective_chat.id,
text="No client with nickname {}".format(client_name))
return
if client.balance > 0:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Cannot delete a client with a positive balance.".format(client_name))
return
self.clients.remove(client_name)
context.bot.send_message(chat_id=update.effective_chat.id,
text="Client with nickname {} is deleted.".format(
client_name))
@staticmethod
def get_telegram_id(update, context):
telegram_id = update.message.from_user.id
context.bot.send_message(chat_id=update.effective_chat.id,
text="Your telegram ID is: {}".format(telegram_id))
def get_barcode(self, update, context):
telegram_id = update.message.from_user.id
client = self.clients.get_by_telegram_id(telegram_id)
if client is not None:
filename = "barcodes/{}.png".format(client.barcode)
if not os.path.isfile(filename):
code = Code128(client.barcode, writer=ImageWriter())
code.save("barcodes/{}".format(client.barcode))
context.bot.send_photo(chat_id=update.effective_chat.id,
photo=open("barcodes/{}.png".format(client.barcode), "rb"))
else:
context.bot.send_message(chat_id=update.effective_chat.id,
text="You currently are not a client.")
def list_stock(self, update, context):
if not self.is_admin(update, context):
return
items_list = self.items.list()
if not len(items_list):
context.bot.send_message(chat_id=update.effective_chat.id,
text="No products yet.")
return
context.bot.send_message(chat_id=update.effective_chat.id,
text="\n".join(["{}: {}".format(item.name, item.stock) for item in items_list]))
def list_balances(self, update, context):
if not self.is_admin(update, context):
return
client_list = self.clients.list()
if not len(client_list):
context.bot.send_message(chat_id=update.effective_chat.id,
text="No clients yet.")
return
context.bot.send_message(chat_id=update.effective_chat.id,
text="\n".join([f"Client {client.nickname}: {round(client.balance, 2)}" for client in
client_list]))
def list_purchases(self, update, context):
telegram_id = update.message.from_user.id
client = self.clients.get_by_telegram_id(telegram_id)
if client is not None:
purchases_by_client = self.purchases.get_by_user_name(client.nickname)
# TODO: Limit to a specific amount
if not len(purchases_by_client):
context.bot.send_message(chat_id=update.effective_chat.id,
text="You currently don't have any purchases.")
return
context.bot.send_message(chat_id=update.effective_chat.id,
text="\n".join([
f"{purchase.timestamp[:purchase.timestamp.find('T')]}:"
f" {purchase.item_name}, {purchase.paid_price}"
for
purchase in purchases_by_client]))
else:
context.bot.send_message(chat_id=update.effective_chat.id,
text="You currently are not a client.")
def get_item_barcode(self, update, context):
command_split = update.message.text.split(" ")
if len(command_split) < 2:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /get_item_barcode item")
return
item_name = command_split[1]
item = self.items.get_by_item_name(item_name)
if item is not None:
filename = "barcodes/{}.png".format(item.barcode)
if not os.path.isfile(filename):
code = Code128(item.barcode, writer=ImageWriter())
code.save("barcodes/{}".format(item.barcode))
context.bot.send_photo(chat_id=update.effective_chat.id,
photo=open("barcodes/{}.png".format(item.barcode), "rb"))
else:
context.bot.send_message(chat_id=update.effective_chat.id,
text=f"The item with name {item_name} currently does not exist.")
def list_item_prices(self, update, context):
items_list = self.items.list()
if not len(items_list):
context.bot.send_message(chat_id=update.effective_chat.id,
text="No products yet.")
return
context.bot.send_message(chat_id=update.effective_chat.id,
text="\n".join(["{}: {}".format(item.name, item.price) for item in items_list]))
def add_stock(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 3:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /add_stock product_name quantity")
return
item_name = command_split[1]
item_add_stock = int(command_split[2])
item = self.items.get_by_item_name(item_name)
if not item:
context.bot.send_message(chat_id=update.effective_chat.id,
text="No product with name {} was found.".format(item_name))
item.stock += item_add_stock
self.items.persist(item)
context.bot.send_message(chat_id=update.effective_chat.id,
text="Item {} has current stock: {}".format(item.name, item.stock))
def remove_stock(self, update, context):
if not self.is_admin(update, context):
return
command_split = update.message.text.split(" ")
if len(command_split) < 3:
context.bot.send_message(chat_id=update.effective_chat.id,
text="Format: /remove_stock product_name quantity")
return
item_name = command_split[1]
item_remove_stock = int(command_split[2])
item = self.items.get_by_item_name(item_name)
if not item:
context.bot.send_message(chat_id=update.effective_chat.id,
text="No product with name {} was found.".format(item_name))
item.stock -= item_remove_stock
self.items.persist(item)
context.bot.send_message(chat_id=update.effective_chat.id,
text="Item {} has current stock: {}".format(item.name, item.stock))
@staticmethod
def help(update, context):
public_commands = ["/balance", "/telegram_id", "/get_barcode", "/get_item_barcode", "/list_purchases", "/list_prices", "/help"]
admin_commands = ["/add_product", "/create_client", "/delete_client", "/list_stock", "/add_stock", "/reset_balance",
"/remove_product", "/list_balances", "/remove_stock", "/change_price"]
context.bot.send_message(chat_id=update.effective_chat.id,
text="Current public commands are: {}\nCurrent admin-only commands are: {}".format(
", ".join(public_commands), ", ".join(admin_commands)))
@staticmethod
def unknown(update, context):
context.bot.send_message(chat_id=update.effective_chat.id,
text="Sorry I don't know this command. "
"Enter /help for more information on how to use this bot.")
def run(self):
# Updater and dispatcher for Telegram bot
updater = Updater(os.getenv("telegram_token"), use_context=True)
dispatcher = updater.dispatcher
# Add handlers for commands
add_client_handler = CommandHandler('create_client', self.create_client)
dispatcher.add_handler(add_client_handler)
delete_client_handler = CommandHandler('delete_client', self.delete_client)
dispatcher.add_handler(delete_client_handler)
get_barcode_handler = CommandHandler('get_barcode', self.get_barcode)
dispatcher.add_handler(get_barcode_handler)
add_product_handler = CommandHandler('add_product', self.add_product)
dispatcher.add_handler(add_product_handler)
balance_handler = CommandHandler('balance', self.get_balance)
dispatcher.add_handler(balance_handler)
telegram_id_handler = CommandHandler('telegram_id', self.get_telegram_id)
dispatcher.add_handler(telegram_id_handler)
list_stock_handler = CommandHandler('list_stock', self.list_stock)
dispatcher.add_handler(list_stock_handler)
add_stock_handler = CommandHandler('add_stock', self.add_stock)
dispatcher.add_handler(add_stock_handler)
reset_balance_handler = CommandHandler('reset_balance', self.reset_balance)
dispatcher.add_handler(reset_balance_handler)
remove_product_handler = CommandHandler('remove_product', self.remove_product)
dispatcher.add_handler(remove_product_handler)
change_price_handler = CommandHandler('change_price', self.change_price)
dispatcher.add_handler(change_price_handler)
list_balances_handler = CommandHandler('list_balances', self.list_balances)
dispatcher.add_handler(list_balances_handler)
list_purchases_handler = CommandHandler('list_purchases', self.list_purchases)
dispatcher.add_handler(list_purchases_handler)
item_barcode_handler = CommandHandler('get_item_barcode', self.get_item_barcode)
dispatcher.add_handler(item_barcode_handler)
remove_stock_handler = CommandHandler('remove_stock', self.remove_stock)
dispatcher.add_handler(remove_stock_handler)
list_prices_handler = CommandHandler('list_prices', self.list_item_prices)
dispatcher.add_handler(list_prices_handler)
help_handler = CommandHandler('help', self.help)
dispatcher.add_handler(help_handler)
# Unknown command catchall handler
unknown_handler = MessageHandler(Filters.command, self.unknown)
dispatcher.add_handler(unknown_handler)
# To poll for incoming commands
updater.start_polling()
if __name__ == "__main__":
# Load environment variables
load_dotenv()
# Create database, retrieve admin id
db = Database(os.getenv("db_file"))
admin_id = os.getenv("admin_telegram_id")
# Make sure all tables are instantiated
create_tables(db)
# Create bot handler
handler = CoasterBotHandler(db, admin_id)
# Run the bot
handler.run()
|
# Generated by Django 2.1.1 on 2018-10-05 10:51
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('django_workflow', '0013_auto_20180925_1334'),
]
operations = [
migrations.CreateModel(
name='ApprovalGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transitions', models.ManyToManyField(related_name='group', to='django_workflow.Transition')),
],
),
]
|
"""
The main window for the GUI.
"""
from msl.qt import (
QtWidgets,
QtGui,
Qt,
Signal,
convert,
prompt,
application,
)
from msl.qt.utils import (
screen_geometry,
drag_drop_paths,
)
from msl.io import read
from .line_edit import LineEdit
from .. import logger
from .equipment import (
widgets,
find_widget,
)
from ..plugins import plugins
from .network import (
StartManager,
CreateClient,
StartEquipmentService,
StartService,
)
class MainWindow(QtWidgets.QMainWindow):
update_progress_bar = Signal(float) # a value in the range [0, 100]
show_indeterminate_progress_bar = Signal()
hide_progress_bar = Signal()
status_bar_message = Signal(str) # the message
def __init__(self, app, **kwargs):
"""Create the main application widget.
Parameters
----------
app : :class:`~photons.App`
The application instance.
kwargs
Passed to :class:`~QtWidgets.QMainWindow`.
"""
super(MainWindow, self).__init__(**kwargs)
self.app = app
app.added_connection.connect(self.on_added_connection)
app.removed_connection.connect(self.on_removed_connection)
# a list of all the docked widgets that are open
self._docked_widgets = []
# a list of all the plugins that are open
self._plugin_widgets = []
self.setWindowTitle('Single Photons')
self.setAcceptDrops(True)
self._drag_drop_root = None
self.setCorner(Qt.TopLeftCorner, Qt.TopDockWidgetArea)
self.setCorner(Qt.TopRightCorner, Qt.RightDockWidgetArea)
self.setCorner(Qt.BottomLeftCorner, Qt.LeftDockWidgetArea)
self.setCorner(Qt.BottomRightCorner, Qt.BottomDockWidgetArea)
# add a progress bar to the status bar
self._progress_bar = QtWidgets.QProgressBar()
self._progress_bar.setAlignment(Qt.AlignCenter)
self._progress_bar.setRange(0, 100)
self.statusBar().addPermanentWidget(self._progress_bar)
# connect the progress bar and status bar signals/slots
self.update_progress_bar.connect(self.on_update_progress_bar)
self.show_indeterminate_progress_bar.connect(self.on_show_indeterminate_progress_bar)
self.hide_progress_bar.connect(self.on_hide_progress_bar)
self.status_bar_message.connect(self.on_status_bar_message)
self.hide_progress_bar.emit()
menubar = self.menuBar()
# create the File menubar
exit_action = QtGui.QAction(convert.to_qicon('shell32|41'), 'Exit', self)
exit_action.setShortcut('Ctrl+Q')
exit_action.setStatusTip('Exit application')
exit_action.setToolTip('Exit application')
exit_action.triggered.connect(self.closeEvent)
self.file_menu = menubar.addMenu('File')
self.file_menu.addAction(exit_action)
self.file_menu.setToolTipsVisible(True)
# create the Connections menubar
self.connection_menu = menubar.addMenu('Connections')
for alias, record in sorted(self.app.equipment.items()):
if record.connection is not None:
action = QtGui.QAction(alias, self)
action.setStatusTip(f'Connect to {record.manufacturer} {record.model}')
action.setToolTip(f'{record.manufacturer} {record.model}')
action.setCheckable(True)
action.triggered.connect(lambda *args, a=action, r=record: self.on_connect_to_equipment(a, r))
self.connection_menu.addAction(action)
self.connection_menu.setToolTipsVisible(True)
# create the Network menubar
self.network_menu = menubar.addMenu('Network')
self.network_menu.setToolTipsVisible(True)
start_manager_action = QtGui.QAction('Start a Manager', self)
start_manager_action.setStatusTip('Start a Network Manager')
start_manager_action.setToolTip('Start a Network Manager')
start_manager_action.triggered.connect(lambda *args: StartManager(self))
self.network_menu.addAction(start_manager_action)
start_service_action = QtGui.QAction('Start a Service', self)
start_service_action.setStatusTip('Start a service')
start_service_action.setToolTip('Start a service')
start_service_action.triggered.connect(lambda *args: StartService(self))
self.network_menu.addAction(start_service_action)
start_equip_service_action = QtGui.QAction('Start an Equipment Service', self)
start_equip_service_action.setStatusTip('Start a service that interfaces with equipment')
start_equip_service_action.setToolTip('Start a service that interfaces with equipment')
start_equip_service_action.triggered.connect(lambda *args: StartEquipmentService(self))
self.network_menu.addAction(start_equip_service_action)
create_client_action = QtGui.QAction('Create a Client', self)
create_client_action.setStatusTip('Connect to a Network Manager as a Client')
create_client_action.setToolTip('Connect to a Network Manager as a Client')
create_client_action.triggered.connect(lambda *args: CreateClient(self))
self.network_menu.addAction(create_client_action)
# create the Widgets menubar
self.widgets_menu = menubar.addMenu('Widgets')
for alias, record in sorted(self.app.equipment.items()):
for w in widgets:
if w.matches(record):
if record.connection is not None:
action = QtGui.QAction(alias, self)
action.setStatusTip(f'Connect to {record.manufacturer} {record.model}')
action.setToolTip(f'{record.manufacturer} {record.model}')
action.setCheckable(True)
action.triggered.connect(lambda *args, a=action, r=record: self.on_show_widget(a, r))
self.widgets_menu.addAction(action)
self.widgets_menu.setToolTipsVisible(True)
# create the Plugins menubar
self.plugin_menu = menubar.addMenu('Plugins')
for cls, name, description in plugins:
action = QtGui.QAction(name, self)
action.setStatusTip(description)
action.setToolTip(description)
action.setCheckable(True)
action.triggered.connect(lambda *args, a=action, c=cls, n=name: self.on_show_plugin(a, c, n))
self.plugin_menu.addAction(action)
self.plugin_menu.setToolTipsVisible(True)
self.resize(screen_geometry().width()//4, self.statusBar().size().height())
def on_added_connection(self, alias):
"""Slot for the :obj:`~photons.app.App.added_connection` signal."""
for connection_action in self.connection_menu.actions():
if connection_action.text() == alias:
connection_action.setChecked(True)
break
def on_removed_connection(self, alias):
"""Slot for the :obj:`~photons.app.App.removed_connection` signal."""
for connection_action in self.connection_menu.actions():
if connection_action.text() == alias:
connection_action.setChecked(False)
break
def on_update_progress_bar(self, percentage: float) -> None:
"""Slot the self.update_progress_bar.emit signal.
Call this method if a process completion rate can be determined.
Automatically makes the progress bar visible if it isn't already visible.
Parameters
----------
percentage : :class:`float`
A value in the range [0, 100] that shows the status of a process.
"""
if not self._progress_bar.isVisible():
self._progress_bar.setMaximum(100)
self._progress_bar.show()
self._progress_bar.setValue(percentage)
def on_show_indeterminate_progress_bar(self) -> None:
"""Slot for the self.show_indeterminate_progress_bar signal.
Call this method if a process completion rate is unknown or if it is
not necessary to indicate how long the process will take.
"""
self._progress_bar.setMaximum(0)
self._progress_bar.show()
def on_hide_progress_bar(self) -> None:
"""Slot for the self.hide_progress_bar signal.
Hide the progress bar.
"""
self._progress_bar.hide()
def on_status_bar_message(self, message) -> None:
"""Slot for the self.status_bar_message signal.
Display a message in the status bar.
Parameters
----------
message : :class:`str`
The message to display.
"""
self.statusBar().showMessage(message)
def on_connect_to_equipment(self, action, record) -> None:
"""Slot -> Connect/Disconnect to/from the equipment.
Parameters
----------
action : :class:`QtGui.QAction`
The menu action.
record : :class:`~msl.equipment.record_types.EquipmentRecord`
The equipment equipment_record.
"""
if action.isChecked():
self.status_bar_message.emit(f'Connecting to {record.alias!r}...')
self.show_indeterminate_progress_bar.emit()
application().processEvents()
try:
self.app.connect_equipment(record.alias)
except:
action.setChecked(False)
raise
finally:
self.status_bar_message.emit('')
self.hide_progress_bar.emit()
else:
self.app.disconnect_equipment(record.alias)
def on_show_widget(self, action, record) -> None:
"""Slot -> Show the widget for the equipment record.
Parameters
----------
action : :class:`QtGui.QAction`
The menu action.
record : :class:`~msl.equipment.record_types.EquipmentRecord`
The equipment equipment_record.
"""
if not action.isChecked():
# if it was unchecked while the widget is visible then we want to re-check
# the action in the menu and make the widget active
action.setChecked(True)
for dock in self._docked_widgets:
if dock.widget().record is record:
dock.setWindowState(Qt.WindowActive)
dock.activateWindow()
dock.show()
break
return
for w in widgets:
if w.matches(record):
self.status_bar_message.emit(f'Creating widget for {record.alias!r}...')
self.show_indeterminate_progress_bar.emit()
application().processEvents()
try:
connection = self.app.connect_equipment(record.alias)
except:
action.setChecked(False)
raise
else:
dock = QtWidgets.QDockWidget(self)
dock.setAllowedAreas(Qt.AllDockWidgetAreas)
widget = w.cls(connection, parent=dock)
widget.window_closing.connect(lambda a=action, d=dock: self.on_widget_closed(a, d))
dock.setWindowTitle(widget.windowTitle())
dock.setWidget(widget)
dock.closeEvent = widget.closeEvent
dock.topLevelChanged.connect(self.on_dock_top_level_changed)
# alternative where to add the dock widget
area = Qt.TopDockWidgetArea if len(self._docked_widgets) % 2 else Qt.LeftDockWidgetArea
self.addDockWidget(area, dock)
self._docked_widgets.append(dock)
logger.debug(f'added {widget.__class__.__name__!r} as a docked widget')
finally:
self.status_bar_message.emit('')
self.hide_progress_bar.emit()
return
prompt.critical(f'There is no widget registered for\n\n{record}')
action.setChecked(False)
def on_show_plugin(self, action, plugin, name) -> None:
"""Slot -> Show the Plugin.
Parameters
----------
action : :class:`QtGui.QAction`
The menu action.
plugin : :class:`~photons.plugin.Plugin`
The Plugin class.
name : :class:`str`
The name of the Plugin.
"""
if not action.isChecked():
# if it was unchecked while the plugin is visible then we want to re-check
# the action in the menu and make the widget active
action.setChecked(True)
for p in self._plugin_widgets:
if p is plugin:
p.setWindowState(Qt.WindowActive)
p.activateWindow()
p.show()
break
return
self.status_bar_message.emit(f'Starting plugin {name!r}...')
self.show_indeterminate_progress_bar.emit()
plug = plugin(self)
self._plugin_widgets.append(plug)
logger.debug(f'added {plug.__class__.__name__!r} as a plugin widget')
plug.window_closing.connect(lambda a=action, p=plug: self.on_plugin_closed(a, p))
if plug.show_plugin:
plug.show()
else:
plug.close()
self.status_bar_message.emit('')
self.hide_progress_bar.emit()
def on_widget_closed(self, action, dock) -> None:
"""Slot -> Called when a widget closes.
Parameters
----------
action : :class:`QtGui.QAction`
The menu action.
dock : :class:`QtWidgets.QDockWidget`
The docked widget.
"""
action.setChecked(False)
self._docked_widgets.remove(dock)
self.removeDockWidget(dock)
logger.debug(f'removed {dock.widget().__class__.__name__!r} as a docked widget')
def on_plugin_closed(self, action, plugin) -> None:
"""Slot -> Called when a Plugin closes.
Parameters
----------
action : :class:`QtGui.QAction`
The menu action.
plugin : :class:`~photons.plugin.Plugin`
The Plugin class.
"""
action.setChecked(False)
self._plugin_widgets.remove(plugin)
logger.debug(f'removed {plugin.__class__.__name__!r} as a plugin widget')
def on_dock_top_level_changed(self, is_floating) -> None:
"""Slot -> Show the Minimum, Maximum and Close buttons when a docked widget becomes floating."""
if is_floating:
widget = self.sender()
widget.setWindowFlags(
Qt.CustomizeWindowHint |
Qt.Window |
Qt.WindowMinimizeButtonHint |
Qt.WindowMaximizeButtonHint |
Qt.WindowCloseButtonHint
)
widget.show()
@staticmethod
def create_palette(name: str) -> QtGui.QPalette:
"""Create a :class:`QtGui.QPalette` based on a colour theme.
Parameters
----------
name : :class:`str`
The name of the theme.
Returns
-------
:class:`QtGui.QPalette`
The palette.
"""
palette = QtGui.QPalette()
if name == 'dark':
# taken from https://github.com/Jorgen-VikingGod/Qt-Frameless-Window-DarkStyle
palette.setColor(QtGui.QPalette.Window, QtGui.QColor(53, 53, 53))
palette.setColor(QtGui.QPalette.WindowText, Qt.white)
palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, QtGui.QColor(127, 127, 127))
palette.setColor(QtGui.QPalette.Base, QtGui.QColor(42, 42, 42))
palette.setColor(QtGui.QPalette.AlternateBase, QtGui.QColor(66, 66, 66))
palette.setColor(QtGui.QPalette.ToolTipBase, Qt.white)
palette.setColor(QtGui.QPalette.ToolTipText, QtGui.QColor(53, 53, 53))
palette.setColor(QtGui.QPalette.Text, Qt.white)
palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.Text, QtGui.QColor(127, 127, 127))
palette.setColor(QtGui.QPalette.Dark, QtGui.QColor(35, 35, 35))
palette.setColor(QtGui.QPalette.Shadow, QtGui.QColor(20, 20, 20))
palette.setColor(QtGui.QPalette.Button, QtGui.QColor(53, 53, 53))
palette.setColor(QtGui.QPalette.ButtonText, Qt.white)
palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, QtGui.QColor(127, 127, 127))
palette.setColor(QtGui.QPalette.BrightText, Qt.red)
palette.setColor(QtGui.QPalette.Link, QtGui.QColor(42, 130, 218))
palette.setColor(QtGui.QPalette.Highlight, QtGui.QColor(42, 130, 218))
palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, QtGui.QColor(80, 80, 80))
palette.setColor(QtGui.QPalette.HighlightedText, Qt.white)
palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.HighlightedText, QtGui.QColor(127, 127, 127))
return palette
def closeEvent(self, event) -> None:
"""Overrides :meth:`QtWidgets.QMainWindow.closeEvent`."""
if self._docked_widgets:
if not prompt.yes_no('There are docked widgets. Quit application?'):
event.ignore()
return
if self._plugin_widgets:
if not prompt.yes_no('There are Plugins open. Quit application?'):
event.ignore()
return
application().quit()
def dragEnterEvent(self, event) -> None:
"""Overrides :meth:`QtWidgets.QMainWindow.dragEnterEvent`."""
paths = drag_drop_paths(event)
if paths:
try:
self._drag_drop_root = read(paths[0])
event.accept()
except:
event.ignore()
else:
event.ignore()
def dropEvent(self, event) -> None:
"""Overrides :meth:`QtWidgets.QMainWindow.dropEvent`."""
self.app.plot(file=self._drag_drop_root, block=False)
event.accept()
|
print("Hello World, I am Farhan") |
import sys
import os
import gc
import musdb
import itertools
import torch
import museval
from functools import partial
import numpy as np
import random
import argparse
from xumx_slicq.transforms import make_filterbanks, NSGTBase, phasemix_sep, ComplexNorm
from tqdm import tqdm
import scipy
from scipy.signal import stft, istft
import json
from types import SimpleNamespace
eps = 1.e-10
def _fast_sdr(track, estimates_dct, target, device):
references = torch.cat([torch.unsqueeze(torch.tensor(source.audio.T, device=device), dim=0) for source_name, source in track.sources.items() if source_name == target])
estimates = torch.cat([torch.unsqueeze(est, dim=0) for est_name, est in estimates_dct.items() if est_name == target])
# compute SDR for one song
num = torch.sum(torch.square(references), dim=(1, 2)) + eps
den = torch.sum(torch.square(references - estimates), dim=(1, 2)) + eps
sdr_target = 10.0 * torch.log10(num / den)
return sdr_target
def ideal_mixphase(track, fwd, bwd, cnorm, device):
"""
ideal performance of magnitude from estimated source + phase of mix
which is the default umx strategy for separation
"""
N = track.audio.shape[0]
audio = torch.tensor(track.audio.T, device=device)
# unsqueeze to add (1,) batch dimension
X = fwd(torch.unsqueeze(audio, dim=0))
# Compute sources spectrograms
P = {}
# compute model as the sum of spectrograms
model = [eps]*len(X)
# parallelize this
for name, source in track.sources.items():
# compute spectrogram of target source:
# magnitude of STFT
src_coef = fwd(torch.unsqueeze(torch.tensor(source.audio.T, device=device), dim=0))
P[name] = cnorm(src_coef)
# store the original, not magnitude, in the mix
for i, src_coef_block in enumerate(src_coef):
model[i] += src_coef_block + eps
# now performs separation
estimates = {}
for name, source in track.sources.items():
source_mag = P[name]
#Yj = [None]*len(model)
#for i, model_block in enumerate(model):
Yj = phasemix_sep(model, source_mag)
# invert to time domain
target_estimate = bwd(Yj, N)
# set this as the source estimate
estimates[name] = torch.squeeze(target_estimate, dim=0)
return estimates
class TrackEvaluator:
def __init__(self, tracks, max_sllen, device="cuda"):
self.tracks = tracks
self.max_sllen = max_sllen
self.device = device
def oracle(self, scale='cqlog', fmin=20.0, bins=12, gamma=25):
bins = int(bins)
med_sdrs_bass = []
med_sdrs_drums = []
med_sdrs_vocals = []
med_sdrs_other = []
n = NSGTBase(scale, bins, fmin, device=self.device, gamma=gamma)
# skip too big transforms
if n.sllen > self.max_sllen:
return (
float('-inf'),
float('-inf'),
float('-inf'),
float('-inf'),
)
nsgt, insgt = make_filterbanks(n)
cnorm = ComplexNorm().to(self.device)
for track in tqdm(self.tracks):
#print(f'track:\n\t{track.name}\n\t{track.chunk_duration}\n\t{track.chunk_start}')
N = track.audio.shape[0]
ests = ideal_mixphase(track, nsgt.forward, insgt.forward, cnorm.forward, device=self.device)
med_sdrs_bass.append(_fast_sdr(track, ests, target='bass', device=self.device))
med_sdrs_drums.append(_fast_sdr(track, ests, target='drums', device=self.device))
med_sdrs_vocals.append(_fast_sdr(track, ests, target='vocals', device=self.device))
med_sdrs_other.append(_fast_sdr(track, ests, target='other', device=self.device))
del ests
torch.cuda.empty_cache()
gc.collect()
# return 1 sdr per source
return (
torch.mean(torch.cat([torch.unsqueeze(med_sdr, dim=0) for med_sdr in med_sdrs_bass])),
torch.mean(torch.cat([torch.unsqueeze(med_sdr, dim=0) for med_sdr in med_sdrs_drums])),
torch.mean(torch.cat([torch.unsqueeze(med_sdr, dim=0) for med_sdr in med_sdrs_vocals])),
torch.mean(torch.cat([torch.unsqueeze(med_sdr, dim=0) for med_sdr in med_sdrs_other])),
)
def evaluate_single(f, params):
curr_score_bass, curr_score_drums, curr_score_vocals, curr_score_other = f(scale=params['scale'], fmin=params['fmin'], bins=params['bins'], gamma=params['gamma'])
print('bass, drums, vocals, other sdr! {0:.2f} {1:.2f} {2:.2f} {3:.2f}'.format(
curr_score_bass,
curr_score_drums,
curr_score_vocals,
curr_score_other,
))
print('total sdr: {0:.2f}'.format((curr_score_bass+curr_score_drums+curr_score_vocals+curr_score_other)/4))
def optimize_many(f, params, n_iter, per_target):
if per_target:
best_score_bass = float('-inf')
best_param_bass = None
best_score_drums = float('-inf')
best_param_drums = None
best_score_vocals = float('-inf')
best_param_vocals = None
best_score_other = float('-inf')
best_param_other = None
fmins = list(np.arange(*params['fmin']))
gammas = list(np.arange(*params['gamma']))
#print(f'optimizing target {target_name}')
for _ in tqdm(range(n_iter)):
while True: # loop in case we skip for exceeding sllen
scale = random.choice(params['scales'])
bins = np.random.randint(*params['bins'])
fmin = random.choice(fmins)
gamma = random.choice(gammas)
curr_score_bass, curr_score_drums, curr_score_vocals, curr_score_other = f(scale=scale, fmin=fmin, bins=bins, gamma=gamma)
params_tup = (scale, bins, fmin, gamma)
if curr_score_bass == curr_score_drums and curr_score_drums == curr_score_vocals and curr_score_vocals == curr_score_other and curr_score_other == float('-inf'):
# sllen not supported
print('reroll for sllen...')
continue
if curr_score_bass > best_score_bass:
best_score_bass = curr_score_bass
best_param_bass = params_tup
print('good bass sdr! {0}, {1}'.format(best_score_bass, best_param_bass))
if curr_score_drums > best_score_drums:
best_score_drums = curr_score_drums
best_param_drums = params_tup
print('good drums sdr! {0}, {1}'.format(best_score_drums, best_param_drums))
if curr_score_vocals > best_score_vocals:
best_score_vocals = curr_score_vocals
best_param_vocals = params_tup
print('good vocals sdr! {0}, {1}'.format(best_score_vocals, best_param_vocals))
if curr_score_other > best_score_other:
best_score_other = curr_score_other
best_param_other = params_tup
print('good other sdr! {0}, {1}'.format(best_score_other, best_param_other))
break
print(f'best scores')
print(f'bass: \t{best_score_bass}\t{best_param_bass}')
print(f'drums: \t{best_score_drums}\t{best_param_drums}')
print(f'other: \t{best_score_other}\t{best_param_other}')
print(f'vocals: \t{best_score_vocals}\t{best_param_vocals}')
else:
best_score_total = float('-inf')
best_param_total = None
fmins = list(np.arange(*params['fmin']))
gammas = list(np.arange(*params['gamma']))
for _ in tqdm(range(n_iter)):
while True: # loop in case we skip for exceeding sllen
scale = random.choice(params['scales'])
bins = np.random.randint(*params['bins'])
fmin = random.choice(fmins)
gamma = random.choice(gammas)
curr_score_bass, curr_score_drums, curr_score_vocals, curr_score_other = f(scale=scale, fmin=fmin, bins=bins, gamma=gamma)
tot = (curr_score_bass+curr_score_drums+curr_score_vocals+curr_score_other)/4
# hack to maximize negative score
#tot *= -1
params_tup = (scale, bins, fmin, gamma)
if curr_score_bass == curr_score_drums and curr_score_drums == curr_score_vocals and curr_score_vocals == curr_score_other and curr_score_other == float('-inf'):
# sllen not supported
print('reroll for sllen...')
continue
if tot > best_score_total:
best_score_total = tot
best_param_total = params_tup
print('good total sdr! {0}, {1}'.format(best_score_total, best_param_total))
break
print(f'best scores')
print(f'total: \t{best_score_total}\t{best_param_total}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Search NSGT configs for best ideal mask'
)
parser.add_argument(
'--root',
type=str,
default=None,
help='path to MUSDB18-HQ root'
)
parser.add_argument(
'--bins',
type=str,
default='10,300',
help='comma-separated range of bins to evaluate'
)
parser.add_argument(
'--fmins',
type=str,
default='10,130,0.1',
help='comma-separated range of fmin to evaluate'
)
parser.add_argument(
'--gammas',
type=str,
default='0,100,0.1',
help='comma-separated range of gamma to evaluate'
)
parser.add_argument(
'--n-iter',
type=int,
default=60,
help='number of iterations'
)
parser.add_argument(
'--fscale',
type=str,
default='bark,mel,cqlog,vqlog',
help='nsgt frequency scales, csv (choices: vqlog, cqlog, mel, bark)'
)
parser.add_argument(
'--device',
type=str,
default='cuda',
help='torch device (cpu vs cuda)'
)
parser.add_argument(
"--cuda-device", type=int, default=-1, help="choose which gpu to train on (-1 = 'cuda' in pytorch)"
)
parser.add_argument(
'--random-seed',
type=int,
default=42,
help='rng seed to pick the same random 5 songs'
)
parser.add_argument(
'--max-sllen',
type=int,
default=32768,
help='maximum sllen above which to skip iterations'
)
parser.add_argument(
'--single',
action='store_true',
help='evaluate single nsgt instead of randomized param search'
)
parser.add_argument(
'--per-target',
action='store_true',
help='maximize each target separately'
)
args = parser.parse_args()
random.seed(args.random_seed)
if args.cuda_device >= 0:
print(f'setting device to {torch.cuda.get_device_name(args.cuda_device)}')
device = torch.device(args.cuda_device)
else:
device = args.device
# initiate musdb
mus = musdb.DB(root=args.root, subsets='train', split='valid', is_wav=True)
if not args.single:
scales = args.fscale.split(',')
bins = tuple([int(x) for x in args.bins.split(',')])
fmins = tuple([float(x) for x in args.fmins.split(',')])
gammas = tuple([float(x) for x in args.gammas.split(',')])
print(f'Parameter ranges to evaluate:\n\tscales: {scales}\n\tbins: {bins}\n\tfmins: {fmins}\n\tgammas: {gammas}')
print(f'Ignoring fscales that exceed sllen {args.max_sllen}')
params = {
'scales': scales,
'bins': bins,
'fmin': fmins,
'gamma': gammas,
}
t = TrackEvaluator(mus.tracks, args.max_sllen, device=device)
optimize_many(t.oracle, params, args.n_iter, args.per_target)
else:
params = {
'scale': args.fscale,
'bins': int(args.bins),
'fmin': float(args.fmins),
'gamma': float(args.gammas),
}
print(f'Parameter to evaluate:\n\t{params}')
t = TrackEvaluator(mus.tracks, args.max_sllen, device=device)
evaluate_single(t.oracle, params)
|
import os
import zipfile
from glob import glob
from pathlib import Path
import boto3
import pandas as pd
import xarray as xr
import mkgu_packaging
from brainio_base.assemblies import BehavioralAssembly
from brainio_base.stimuli import StimulusSet
from brainio_collection.lookup import pwdb
from brainio_collection.knownfile import KnownFile as kf
from brainio_collection.assemblies import AssemblyModel, AssemblyStoreMap, AssemblyStoreModel
from brainio_collection.stimuli import ImageModel, AttributeModel, ImageMetaModel, StimulusSetModel, ImageStoreModel, \
StimulusSetImageMap, ImageStoreMap
def get_objectome(source_data_path):
objectome = pd.read_pickle(os.path.join(source_data_path, 'objectome24s100_humanpool.pkl'))
# objectome['correct'] = objectome['choice'] == objectome['sample_obj']
objectome['truth'] = objectome['sample_obj']
subsample = pd.read_pickle(os.path.join(source_data_path, 'objectome24s100_imgsubsampled240_pandas.pkl'))
objectome['enough_human_data'] = objectome['id'].isin(subsample.values[:, 0])
objectome = to_xarray(objectome)
return objectome
def to_xarray(objectome):
columns = objectome.columns
objectome = xr.DataArray(objectome['choice'],
coords={column: ('presentation', objectome[column]) for column in columns},
dims=['presentation'])
objectome = objectome.rename({'id': 'image_id'})
objectome = objectome.set_index(presentation=[col if col != 'id' else 'image_id' for col in columns])
objectome = BehavioralAssembly(objectome)
return objectome
def load_stimuli(meta_assembly, source_stim_path):
stimuli_paths = list(glob(os.path.join(source_stim_path, '*.png')))
stimuli_paths.sort()
stimuli = StimulusSet({'image_current_local_file_path': stimuli_paths,
'image_id': [os.path.splitext(os.path.basename(filepath))[0] for filepath in stimuli_paths],
'image_path_within_store': [os.path.basename(filepath) for filepath in stimuli_paths]})
assert all(meta_assembly['sample_obj'].values == meta_assembly['truth'].values)
image_meta = {image_id: coord_value for image_id, coord_value in
zip(meta_assembly['image_id'].values, meta_assembly['sample_obj'].values)}
meta_values = [image_meta[image_id] for image_id in stimuli['image_id'].values]
stimuli['image_sample_obj'] = meta_values
stimuli['image_label'] = stimuli['image_sample_obj']
return stimuli
def load_responses(source_data_path):
objectome = get_objectome(source_data_path)
fitting_objectome, testing_objectome = objectome.sel(enough_human_data=False), objectome.sel(enough_human_data=True)
return objectome, fitting_objectome, testing_objectome
def create_image_zip(stimuli, target_zip_path):
os.makedirs(os.path.dirname(target_zip_path), exist_ok=True)
with zipfile.ZipFile(target_zip_path, 'w') as target_zip:
for image in stimuli.itertuples():
target_zip.write(image.image_current_local_file_path, arcname=image.image_path_within_store)
zip_kf = kf(target_zip_path)
return zip_kf.sha1
def write_netcdf(assembly, target_netcdf_file):
assembly.reset_index(assembly.indexes.keys(), inplace=True)
assembly.to_netcdf(target_netcdf_file)
def add_stimulus_set_metadata_and_lookup_to_db(stimuli, stimulus_set_name, bucket_name, zip_file_name,
image_store_unique_name, zip_sha1):
pwdb.connect(reuse_if_open=True)
stim_set_model, created = StimulusSetModel.get_or_create(name=stimulus_set_name)
image_store, created = ImageStoreModel.get_or_create(location_type="S3", store_type="zip",
location=f"https://{bucket_name}.s3.amazonaws.com/{zip_file_name}",
unique_name=image_store_unique_name,
sha1=zip_sha1)
add_image_metadata_to_db(stimuli, stim_set_model, image_store)
return stim_set_model
def add_image_metadata_to_db(stimuli, stim_set_model, image_store):
pwdb.connect(reuse_if_open=True)
eav_image_sample_obj, created = AttributeModel.get_or_create(name="image_sample_obj", type="str")
eav_image_label, created = AttributeModel.get_or_create(name="image_label", type="str")
for image in stimuli.itertuples():
pw_image, created = ImageModel.get_or_create(image_id=image.image_id)
pw_stimulus_set_image_map, created = StimulusSetImageMap.get_or_create(stimulus_set=stim_set_model, image=pw_image)
pw_image_image_store_map, created = ImageStoreMap.get_or_create(image=pw_image, image_store=image_store,
path=image.image_path_within_store)
ImageMetaModel.get_or_create(image=pw_image, attribute=eav_image_sample_obj, value=str(image.image_sample_obj))
ImageMetaModel.get_or_create(image=pw_image, attribute=eav_image_label, value=str(image.image_label))
def add_assembly_lookup(assembly_name, stim_set_model, bucket_name, target_netcdf_file, assembly_store_unique_name):
kf_netcdf = kf(target_netcdf_file)
assy, created = AssemblyModel.get_or_create(name=assembly_name, assembly_class="BehavioralAssembly",
stimulus_set=stim_set_model)
store, created = AssemblyStoreModel.get_or_create(assembly_type="netCDF",
location_type="S3",
location=f"https://{bucket_name}.s3.amazonaws.com/{assembly_store_unique_name }.nc",
unique_name=assembly_store_unique_name,
sha1=kf_netcdf.sha1)
assy_store_map, created = AssemblyStoreMap.get_or_create(assembly_model=assy, assembly_store_model=store, role=assembly_name)
def upload_to_s3(source_file_path, bucket_name, target_s3_key):
client = boto3.client('s3')
client.upload_file(source_file_path, bucket_name, target_s3_key)
def main():
pkg_path = Path(mkgu_packaging.__file__).parent
source_path = Path("/braintree/home/msch/share/objectome")
source_data_path = source_path / 'data'
source_stim_path = source_path / 'stim'
target_path = pkg_path.parent / "objectome" / "out"
target_bucket_name = "brainio-dicarlo"
assembly_name = "dicarlo.Rajalingham2018"
public_stimulus_set_unique_name = "dicarlo.objectome.public"
public_image_store_unique_name = "image_dicarlo_objectome_public"
public_assembly_unique_name = "dicarlo.Rajalingham2018.public"
public_assembly_store_unique_name = "assy_dicarlo_Rajalingham2018_public"
public_target_zip_basename = public_image_store_unique_name + ".zip"
public_target_zip_path = target_path / public_target_zip_basename
public_target_netcdf_basename = public_assembly_store_unique_name + ".nc"
public_target_netcdf_path = target_path / public_target_netcdf_basename
public_target_zip_s3_key = public_target_zip_basename
public_target_netcdf_s3_key = public_target_netcdf_basename
private_stimulus_set_unique_name = "dicarlo.objectome.private"
private_image_store_unique_name = "image_dicarlo_objectome_private"
private_assembly_unique_name = "dicarlo.Rajalingham2018.private"
private_assembly_store_unique_name = "assy_dicarlo_Rajalingham2018_private"
private_target_zip_basename = private_image_store_unique_name + ".zip"
private_target_zip_path = target_path / private_target_zip_basename
private_target_netcdf_basename = private_assembly_store_unique_name + ".nc"
private_target_netcdf_path = target_path / private_target_netcdf_basename
private_target_zip_s3_key = private_target_zip_basename
private_target_netcdf_s3_key = private_target_netcdf_basename
[all_assembly, public_assembly, private_assembly] = load_responses(source_data_path)
all_assembly.name = assembly_name
public_assembly.name = public_assembly_unique_name
private_assembly.name = private_assembly_unique_name
all_stimuli = load_stimuli(all_assembly, source_stim_path)
public_stimuli = all_stimuli[all_stimuli['image_id'].isin(public_assembly['image_id'].values)]
private_stimuli = all_stimuli[all_stimuli['image_id'].isin(private_assembly['image_id'].values)]
public_stimuli.name = public_stimulus_set_unique_name
private_stimuli.name = private_stimulus_set_unique_name
assert len(public_assembly) + len(private_assembly) == len(all_assembly) == 927296
assert len(private_assembly) == 341785
assert len(set(public_assembly['image_id'].values)) == len(public_stimuli) == 2160
assert len(set(private_assembly['image_id'].values)) == len(private_stimuli) == 240
assert set(all_stimuli['image_id'].values) == set(all_assembly['image_id'].values)
assert set(public_stimuli['image_id'].values) == set(public_assembly['image_id'].values)
assert set(private_stimuli['image_id'].values) == set(private_assembly['image_id'].values)
assert len(set(private_assembly['choice'].values)) == len(set(public_assembly['choice'].values)) == 24
print([assembly.name for assembly in [all_assembly, public_assembly, private_assembly]])
public_zip_sha1 = create_image_zip(public_stimuli, public_target_zip_path)
public_stimulus_set_model = add_stimulus_set_metadata_and_lookup_to_db(public_stimuli, public_stimulus_set_unique_name, target_bucket_name, public_target_zip_basename, public_image_store_unique_name, public_zip_sha1)
write_netcdf(public_assembly, public_target_netcdf_path)
add_assembly_lookup(public_assembly_unique_name,public_stimulus_set_model,target_bucket_name,public_target_netcdf_path, public_assembly_store_unique_name)
private_zip_sha1 = create_image_zip(private_stimuli, private_target_zip_path)
private_stimulus_set_model = add_stimulus_set_metadata_and_lookup_to_db(private_stimuli, private_stimulus_set_unique_name, target_bucket_name, private_target_zip_basename, private_image_store_unique_name, private_zip_sha1)
write_netcdf(private_assembly, private_target_netcdf_path)
add_assembly_lookup(private_assembly_unique_name,private_stimulus_set_model,target_bucket_name,private_target_netcdf_path, private_assembly_store_unique_name)
print("uploading to S3")
upload_to_s3(str(public_target_zip_path), target_bucket_name, public_target_zip_s3_key)
upload_to_s3(str(public_target_netcdf_path), target_bucket_name, public_target_netcdf_s3_key)
upload_to_s3(str(private_target_zip_path), target_bucket_name, private_target_zip_s3_key)
upload_to_s3(str(private_target_netcdf_path), target_bucket_name, private_target_netcdf_s3_key)
return [(public_assembly, public_stimuli), (private_assembly, private_stimuli)]
if __name__ == '__main__':
main()
|
from presidio_analyzer.predefined_recognizers import SpacyRecognizer
class StanzaRecognizer(SpacyRecognizer):
def __init__(self, **kwargs):
self.DEFAULT_EXPLANATION = self.DEFAULT_EXPLANATION.replace("SpaCy", "Stanza")
super().__init__(**kwargs)
|
import cv2
import pafy
import tensorflow as tf
import numpy as np
import glob
from hitnet import HitNet, ModelType, draw_disparity, draw_depth, CameraConfig
out = cv2.VideoWriter('outpy2.avi',cv2.VideoWriter_fourcc('M','J','P','G'), 30, (881*3,400))
# Get image list
left_images = glob.glob('DrivingStereo images/left/*.jpg')
left_images.sort()
right_images = glob.glob('DrivingStereo images/right/*.jpg')
right_images.sort()
depth_images = glob.glob('DrivingStereo images/depth/*.png')
depth_images.sort()
# Select model type
model_type = ModelType.middlebury
# model_type = ModelType.flyingthings
# model_type = ModelType.eth3d
if model_type == ModelType.middlebury:
model_path = "models/middlebury_d400.pb"
elif model_type == ModelType.flyingthings:
model_path = "models/flyingthings_finalpass_xl.pb"
elif model_type == ModelType.eth3d:
model_path = "models/eth3d.pb"
camera_config = CameraConfig(0.546, 1000)
max_distance = 50
# Initialize model
hitnet_depth = HitNet(model_path, model_type, camera_config)
cv2.namedWindow("Estimated depth", cv2.WINDOW_NORMAL)
for left_path, right_path, depth_path in zip(left_images[1500:1700:2], right_images[1500:1700:2], depth_images[1500:1700:2]):
# Read frame from the video
left_img = cv2.imread(left_path)
right_img = cv2.imread(right_path)
depth_img = cv2.imread(depth_path, cv2.IMREAD_UNCHANGED).astype(np.float32)/256
# Estimate the depth
disparity_map = hitnet_depth(left_img, right_img)
depth_map = hitnet_depth.get_depth()
color_disparity = draw_disparity(disparity_map)
color_depth = draw_depth(depth_map, max_distance)
color_real_depth = draw_depth(depth_img, max_distance)
cobined_image = np.hstack((left_img,color_real_depth, color_depth))
out.write(cobined_image)
cv2.imshow("Estimated depth", cobined_image)
# Press key q to stop
if cv2.waitKey(1) == ord('q'):
break
out.release()
cv2.destroyAllWindows() |
#! /usr/bin/python3
"""Challenge #2 in Python."""
# coding: utf-8
list_with_chars = [2, 'a', 'l', 3, 'l', 4, 'k', 2, 3, 4, 'a', 6,
'c', 4, 'm', 6, 'm', 'k', 9, 10, 9, 8, 7, 8, 10, 7]
list_with_numbers = [2, 3, 4, 2, 3, 5, 4, 6, 4, 6, 9, 10, 9, 8, 7, 8, 10, 7]
def search_list(list_provided):
"""Search list provided for characters that are represented only once."""
for i in range(len(list_provided) - 1):
if not list_provided[i] in list_provided[i + 1:] and not list_provided[i] in list_provided[:i]:
"""If the same number is not present before or after in the list then
return the number"""
return str(list_provided[i])
break
print(search_list(list_with_numbers) + ', ' + search_list(list_with_chars))
|
from gym_quadrotor.dynamics import Euler
from gym_quadrotor.wrappers.angular_control import attitude_to_motor_control
from gym_quadrotor.dynamics.copter import DynamicsState, CopterParams
from gym_quadrotor.dynamics.dynamics import simulate_quadrotor
import numpy as np
import pytest
@pytest.mark.parametrize("yaw", np.linspace(0, 2*np.pi))
def test_angular_motor_commands(yaw):
startatt = [0.0, 0.0, yaw]
check_couple_direction(0, startatt)
check_couple_direction(1, startatt)
check_couple_direction(2, startatt)
def check_couple_direction(index, startat = None):
setup = CopterParams()
copterstatus = DynamicsState()
if startat is not None:
copterstatus._attitude = Euler.from_numpy_array(startat)
base = np.zeros(3)
base[index] = 0.25
control = attitude_to_motor_control(3.0, *base)
copterstatus.desired_rotor_speeds = control * setup.max_rotor_speed
copterstatus._rotorspeeds = copterstatus.desired_rotor_speeds
start_attitude = np.copy(copterstatus.attitude._euler)
for i in range(10):
simulate_quadrotor(setup, copterstatus, 0.001)
delta = copterstatus.attitude._euler - start_attitude
#assert(delta[index] > 0.0)
nd = delta / delta[index]
ref = np.zeros(3)
ref[index] = 1
assert abs(nd[0] - ref[0]) < 1e-2
assert abs(nd[1] - ref[1]) < 1e-2
assert abs(nd[2] - ref[2]) < 1e-2
|
import re
from dataclasses import InitVar, asdict, dataclass, field
from datetime import datetime
from typing import Dict, Optional
REGEX = r'(?P<month>[A-Z][a-z]{2}) (?P<day>[0-9]{,2}) ' \
+ r'(?P<time>[0-9]{2}:[0-9]{2}:[0-9]{2}) mail postfix/[a-z]+\[[0-9]+\]: ' \
+ r'(?P<mail_id>[A-Z0-9]+): to=<(?P<to_address>.*@.*)>, ' \
+ r'relay=(?P<relay>.*), delay=(?P<delay>[0-9.]+), ' \
+ r'delays=(?P<delays>[0-9][0-9/.]+), dsn=(?P<dsn>[0-9].[0-9].[0-9]), ' \
+ r'status=(?P<status>(sent|deferred|bounced)) \((?P<description>.*)\)'
PATTERN = re.compile(REGEX)
ParseResultType = Dict[str, str]
def parse(target: str) -> Optional[ParseResultType]:
"""Parse postfix maillog including send status
Args:
target (str): maillog
Returns:
Optional[ParseResultType]: return the following dict if match
{
'month': 'Aug',
'day': '1',
'time': '10:00:00',
'mail_id': '677RGS0',
'to_address': 'dummy@gmail.com',
'relay': 'local',
'delay': '0.06',
'delays': '0.06/0.01/0/0',
'dsn': '2.0.0',
'status': 'sent',
'description': 'delivered to maildir'
}
"""
match_obj = re.search(PATTERN, target)
if match_obj is None:
return None
result = match_obj.groupdict()
return ParseResult(**result).to_dict()
@dataclass
class ParseResult:
month: InitVar[str]
day: InitVar[str]
time: InitVar[str]
mail_id: str
to_address: str
relay: str
delay: str
delays: str
dsn: str
status: str
description: str
datetime: str = field(init=False)
def __post_init__(self, month: str, day: str, time: str) -> None:
self.datetime = self.convert2dateime(month, day, time)
def to_dict(self) -> ParseResultType:
return asdict(self)
@staticmethod
def convert2dateime(month: str, day: str, time: str) -> str:
tmp = datetime.strptime(f'{month}{day}{time}', '%b%d%H:%M:%S')
return tmp.replace(year=datetime.now().year).strftime('%Y%m%d%H%M%S')
|
from setuptools import setup, find_packages
setup(name='shared', version='1.3', packages=find_packages())
|
"""d2lib is a tool for retrieving information from Diablo 2 data files.
These files contain info about the character, items and their properties.
Currently supported:
.d2s - Diablo 2 save files;
.d2x - PlugY personal stash files;
.sss - PlugY shared stash files.
"""
__name__ = 'd2lib'
__version__ = '0.2.4'
__author__ = 'Artem Kustov'
__author_email__ = 'kust.artcom@gmail.com'
__license__ = 'MIT'
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from github_com.TheThingsNetwork.api.protocol.lorawan import device_pb2 as github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class DeviceManagerStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetDevice = channel.unary_unary(
'/lorawan.DeviceManager/GetDevice',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.DeviceIdentifier.SerializeToString,
response_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.Device.FromString,
)
self.SetDevice = channel.unary_unary(
'/lorawan.DeviceManager/SetDevice',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.Device.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteDevice = channel.unary_unary(
'/lorawan.DeviceManager/DeleteDevice',
request_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.DeviceIdentifier.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class DeviceManagerServicer(object):
# missing associated documentation comment in .proto file
pass
def GetDevice(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetDevice(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteDevice(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DeviceManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetDevice': grpc.unary_unary_rpc_method_handler(
servicer.GetDevice,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.DeviceIdentifier.FromString,
response_serializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.Device.SerializeToString,
),
'SetDevice': grpc.unary_unary_rpc_method_handler(
servicer.SetDevice,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.Device.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteDevice': grpc.unary_unary_rpc_method_handler(
servicer.DeleteDevice,
request_deserializer=github_dot_com_dot_TheThingsNetwork_dot_api_dot_protocol_dot_lorawan_dot_device__pb2.DeviceIdentifier.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'lorawan.DeviceManager', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
"""Simple wrapper to upgrade the files by github URL"""
import json
import logging
import os
import re
import shutil
import subprocess
import urllib
from hashlib import md5
from typing import Tuple, List
import requests
import tensorflow as tf
# TODO: install file properly with `pip install -e .`
import sys
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
from storage import FileStorage
from flask import (
Flask, redirect, request, render_template, send_from_directory)
app = Flask(__name__)
class NotebookDownloadException(Exception):
"""Notebook download exception"""
def __init__(self, message):
super(NotebookDownloadException, self).__init__(message)
self.message = message
class ConvertionException(Exception):
"""NBdime conversion exception"""
def __init__(self, message, details):
super(ConvertionException, self).__init__(message)
self.message = message
self.details = details
def download_file(requested_url: str) -> str:
"""Download a file from github repository"""
url = f"https://github.com/{requested_url.replace('blob', 'raw')}"
resp = requests.get(url)
logging.info(F"Requested URL: {requested_url}")
if resp.status_code != 200:
logging.info(f"Can not download {url}")
raise NotebookDownloadException("Can not download the file. Please, check the URL")
return resp.text
# TODO: Run conversion in temp folder,
# so we do not have issues with concurrent conversion
def convert_file(in_file: str, out_file: str) -> List[str]:
"""Upgrade file with tf_upgrade_v2."""
comand = f"tf_upgrade_v2 --infile {in_file} --outfile {out_file}"
process = subprocess.Popen(comand,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result_bytes = process.stdout.readlines()
process.wait()
result = [line.decode('utf-8') for line in result_bytes]
if process.returncode:
details = "<br>".join(result)
raise ConvertionException("Can not convert the file", details)
return result
def save_ipynb_from_py(folder: str, py_filename: str) -> str:
"""Save ipynb file based on python file"""
full_filename = f"{folder}/{py_filename}"
with open(full_filename) as pyfile:
code_lines = [line.replace("\n", "\\n").replace('"', '\\"')
for line in pyfile.readlines()]
pycode = '",\n"'.join(code_lines)
with open('template.ipynb') as template:
template_body = ''.join(template.readlines())
ipynb_code = template_body.replace('{{TEMPLATE}}', pycode)
new_filename = full_filename.replace('.py', '.ipynb')
with open(new_filename, "w") as ipynb_file:
ipynb_file.write(ipynb_code)
return py_filename.replace('.py', '.ipynb')
def process_file(file_url: str) -> Tuple[str, Tuple[str, ...]]:
"""Process file with download, cache and upgrade."""
_, file_ext = os.path.splitext(file_url)
folder_hash = md5(file_url.encode('utf-8')).hexdigest()
path = f"/notebooks/{folder_hash}"
original = f"original{file_ext}"
converted = f"converted{file_ext}"
# TODO: delete the folder completely if `force`
if not os.path.exists(path):
file_content = download_file(file_url)
os.mkdir(path)
with open(f"{path}/{original}", "w") as original_file:
original_file.write(file_content)
try:
output = convert_file(f"{path}/{original}", f"{path}/{converted}")
except ConvertionException as error:
shutil.rmtree(path)
raise error
with open(f"{path}/output", "w") as summary_output:
summary_output.write('\n'.join(output))
shutil.copy('report.txt', f"{path}/report")
# persist `report.txt` to GCS
storage = FileStorage()
storage.save_file('report.txt', folder_hash)
# found a python file, need to encode separately
if original.endswith('.py'):
result_filenames = []
for py_file in [original, converted]:
result_filenames.append(save_ipynb_from_py(path, py_file))
assert len(result_filenames) == 2
return path, tuple(result_filenames[:2])
if original.endswith('.py'):
return path, (original.replace('.py', '.ipynb'),
converted.replace('.py', '.ipynb'))
return path, (original, converted)
def inject_nbdime(content: str, folder_hash: str) -> str:
"""Inject report strings before `nbdime`' diff"""
replace_token = "<h3>Notebook Diff</h3>"
position = content.find(replace_token)
# nothing to inject here, just return the content
if position == -1:
return content
path = f"/notebooks/{folder_hash}"
with open(f"{path}/report") as summary_output:
report_lines = [line for line in summary_output.readlines()
if line.strip() != '']
return render_template("nbdime_inject.html",
before=content[:position],
report_lines=report_lines,
after=content[position:],
folder=folder_hash,
file='converted.ipynb',
tf_version=tf.version.VERSION)
@app.route("/")
def hello():
"""Index page with intro info."""
return render_template('index.html',
tf_version=tf.version.VERSION)
@app.route('/download/<path:folder>/<path:filename>')
def download(folder, filename):
"""Allow to download files."""
# TODO: move all /notebooks to a single config
uploads = os.path.join('/notebooks/', folder)
return send_from_directory(directory=uploads, filename=filename)
@app.route("/d/<path:path>", methods=['GET'])
def proxy(path):
"""Proxy request to index of `nbdime`"""
nbdime_url = os.environ.get('NBDIME_URL')
params = '&'.join([f"{k}={v}" for k, v in request.values.items()])
url = f"{nbdime_url}{path}?{params}"
logging.info(f"URL: {url}")
try:
response = urllib.request.urlopen(url)
content = response.read()
if b'notebooks' in content:
folder_hash = re.findall(r"/notebooks\/([^\/]+)/", url)[0]
try:
content = inject_nbdime(content.decode('utf-8'), folder_hash)
return content
except FileNotFoundError:
return ("The cache was invalidated meanwhile. "
"Please start by submitting the URL again.")
else:
return content
except urllib.error.URLError:
logging.error(f"Can not proxy nbdime for GET: {url}")
message = "Something went wrong, can not proxy nbdime"
return render_template('error.html', message=message), 502
@app.route("/d/<path:path>", methods=['POST'])
def proxy_api(path):
"""Proxy request to `nbdime` API"""
nbdime_url = os.environ.get('NBDIME_URL')
url = f"{nbdime_url}{path}"
try:
payload = json.dumps(request.json).encode()
headers = {'content-type': 'application/json'}
# dirty hack: seems like sometimes nbdime looses `content type`
# from `application/json` to `text/plain;charset=UTF-8`
if not request.json:
logging.warning(f"WARNING: somehow lost json payload {request.json}")
base = re.findall(r"base=([^\&]+)", request.referrer)[0]
remote = re.findall(r"remote=([^\&]+)", request.referrer)[0]
payload = json.dumps({'base': base, 'remote': remote})
payload = payload.replace('%2F', '/').encode('utf-8')
req = urllib.request.Request(url,
data=payload,
headers=headers)
resp = urllib.request.urlopen(req)
return resp.read()
except urllib.error.URLError:
logging.error(f"Can not proxy nbdime for POST: {url}")
message = "Something went wrong, can not proxy nbdime"
return render_template('error.html', message=message), 502
# TODO force refresh
@app.route('/<path:path>')
def catch_all(path):
"""Endpoint for all URLs from Github"""
if not (path.endswith('.py') or path.endswith('.ipynb')):
message = "Currently we only support `.py` and `.ipynb` files."
return render_template('error.html', message=message), 501
try:
folder, files = process_file(path)
url = f"/d/diff?base={folder}/{files[0]}&remote={folder}/{files[1]}"
return redirect(url, code=302)
except NotebookDownloadException as error:
message = error.args[0]
return render_template('error.html', message=message), 400
except ConvertionException as error:
logging.error(f"Can not convert for path {path}: {error.details}")
return render_template('error.html',
message=error.message,
details=error.details), 400
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
|
"""Backward-compatibility version of FCNTL; export constants exported by
fcntl, and issue a deprecation warning.
"""
import warnings
warnings.warn("the FCNTL module is deprecated; please use fcntl",
DeprecationWarning)
# Export the constants known to the fcntl module:
from fcntl import *
# and *only* the constants:
__all__ = [s for s in dir() if s[0] in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
|
from dataclasses import dataclass
import psycopg2
from mangum.backends.base import WebSocketBackend
@dataclass
class PostgreSQLBackend(WebSocketBackend):
def __post_init__(self) -> None:
self.connection = psycopg2.connect(self.dsn, connect_timeout=5)
self.cursor = self.connection.cursor()
self.cursor.execute(
"create table if not exists mangum_websockets (id varchar(64) primary key, initial_scope text)"
)
self.connection.commit()
def create(self, connection_id: str, initial_scope: str) -> None:
self.cursor.execute(
"insert into mangum_websockets values (%s, %s)",
(connection_id, initial_scope),
)
self.connection.commit()
self.connection.close()
def fetch(self, connection_id: str) -> str:
self.cursor.execute(
"select initial_scope from mangum_websockets where id = %s",
(connection_id,),
)
initial_scope = self.cursor.fetchone()[0]
self.cursor.close()
self.connection.close()
return initial_scope
def delete(self, connection_id: str) -> None:
self.cursor.execute(
"delete from mangum_websockets where id = %s", (connection_id,)
)
self.connection.commit()
self.cursor.close()
self.connection.close()
|
def test_parse_json_urls_file(
json_urls_provider, expected_urls_in_json_file, expected_regexp_in_json_file
):
parsed_urls = set()
parsed_regexp_list = set()
for url_data in json_urls_provider:
parsed_urls.add(str(url_data.url))
parsed_regexp_list.add(url_data.regexp)
assert parsed_urls == expected_urls_in_json_file
assert parsed_regexp_list == expected_regexp_in_json_file
|
"""
TASK 4:
The telephone company want to identify numbers that might be doing
telephone marketing. Create a set of possible telemarketers:
these are numbers that make outgoing calls but never send texts,
receive texts or receive incoming calls.
Print a message:
"These numbers could be telemarketers: "
<list of numbers>
The list of numbers should be print out one per line in lexicographic order with no duplicates.
"""
# Read file into texts and calls.
import csv
with open('texts.csv', 'r') as f:
reader = csv.reader(f)
texts = list(reader)
with open('calls.csv', 'r') as f:
reader = csv.reader(f)
calls = list(reader)
normal_number = set()
suspicious_number = set()
for record in texts:
normal_number.add(record[0])
normal_number.add(record[1])
for record in calls:
normal_number.add(record[1])
for number in calls:
if (number[0] not in normal_number):
suspicious_number.add(number[0].replace(' ', ''))
print('These numbers could be telemarketers: ')
for number in sorted(suspicious_number):
print(number)
# Time complexity : O(n4 + n log n)
|
#!/usr/bin/env python3
"""
The p2p connection frontend
"""
import os
import json
import hashlib
import logging
from kademlia.network import Server
from twisted.internet import asyncioreactor
import asyncio
from twisted.internet.defer import Deferred
from asyncio.tasks import ensure_future
# asyncioreactor.install(eventloop=loop)
class P2PConnection:
def __init__(self, listenPort):
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log = logging.getLogger('kademlia')
log.addHandler(handler)
log.setLevel(logging.DEBUG)
self.loop = asyncio.get_event_loop()
self.loop.set_debug(True)
asyncioreactor.install(eventloop=self.loop)
self.server = Server()
self.server.listen(listenPort)
def connect_p2p(self, bootstrap_address):
future = ensure_future(self.server.bootstrap([(bootstrap_address, 8468)]))
d = Deferred.fromFuture(future)
return d
def get_key(self, content):
return hashlib.sha256(str.encode(content)).hexdigest()
def set_chat_info(self, chatuuid, groupname):
chat_info = {
"name": groupname
}
chat_info_str = json.dumps(chat_info)
key = chatuuid
def done(res, key):
print("Stored key:'{}' in network".format(key))
d = self._send(key, chat_info_str)
d.addCallback(done, key)
d.addErrback(self.send_failed)
return d
def get_chat_info(self, chatuuid):
d = self.get(chatuuid)
return d
def _send(self, key, data):
print("Start storing key:'{}' in P2P-network".format(key))
fut = ensure_future(self.server.set(key, data))
d = Deferred.fromFuture(fut)
# from twisted.internet import reactor
# d.addTimeout(30, reactor, self.send_failed)
return d
def send(self, message):
key = self.get_key(message)
d = self._send(key, message)
return d, key
def send_failed(self, err):
#TODO: Auto resend to network or ask user to resend?
err.trap(TimeoutError)
print('P2P send timed out')
def get(self, key):
fut = ensure_future(self.server.get(key))
d = Deferred.fromFuture(fut)
return d
|
# Databricks notebook source
# MAGIC %md
# MAGIC # Model Registry
# MAGIC
# MAGIC MLflow Model Registry is a collaborative hub where teams can share ML models, work together from experimentation to online testing and production, integrate with approval and governance workflows, and monitor ML deployments and their performance. This lesson explores how to manage models using the MLflow model registry.
# MAGIC
# MAGIC ##  In this lesson you:<br>
# MAGIC - Register a model using MLflow
# MAGIC - Deploy that model into production
# MAGIC - Update a model in production to new version including a staging phase for testing
# MAGIC - Archive and delete models
# MAGIC
# MAGIC ## Prerequisites
# MAGIC - Web browser: Chrome
# MAGIC - A cluster configured with **8 cores** and **DBR 7.0 ML**
# COMMAND ----------
# MAGIC %md
# MAGIC ##  Classroom-Setup
# MAGIC
# MAGIC For each lesson to execute correctly, please make sure to run the **`Classroom-Setup`** cell at the<br/>
# MAGIC start of each lesson (see the next cell) and the **`Classroom-Cleanup`** cell at the end of each lesson.
# COMMAND ----------
# MAGIC %run "./Includes/Classroom-Setup"
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC ### Model Registry
# MAGIC
# MAGIC The MLflow Model Registry component is a centralized model store, set of APIs, and UI, to collaboratively manage the full lifecycle of an MLflow Model. It provides model lineage (which MLflow Experiment and Run produced the model), model versioning, stage transitions (e.g. from staging to production), annotations (e.g. with comments, tags), and deployment management (e.g. which production jobs have requested a specific model version).
# MAGIC
# MAGIC Model registry has the following features:<br><br>
# MAGIC
# MAGIC * **Central Repository:** Register MLflow models with the MLflow Model Registry. A registered model has a unique name, version, stage, and other metadata.
# MAGIC * **Model Versioning:** Automatically keep track of versions for registered models when updated.
# MAGIC * **Model Stage:** Assigned preset or custom stages to each model version, like “Staging” and “Production” to represent the lifecycle of a model.
# MAGIC * **Model Stage Transitions:** Record new registration events or changes as activities that automatically log users, changes, and additional metadata such as comments.
# MAGIC * **CI/CD Workflow Integration:** Record stage transitions, request, review and approve changes as part of CI/CD pipelines for better control and governance.
# MAGIC
# MAGIC <div><img src="https://files.training.databricks.com/images/eLearning/ML-Part-4/model-registry.png" style="height: 400px; margin: 20px"/></div>
# MAGIC
# MAGIC <img alt="Side Note" title="Side Note" style="vertical-align: text-bottom; position: relative; height:1.75em; top:0.05em; transform:rotate(15deg)" src="https://files.training.databricks.com/static/images/icon-note.webp"/> See <a href="https://mlflow.org/docs/latest/registry.html" target="_blank">the MLflow docs</a> for more details on the model registry.
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC ### Registering a Model
# MAGIC
# MAGIC The following workflow will work with either the UI or in pure Python. This notebook will use pure Python.
# MAGIC
# MAGIC <img alt="Side Note" title="Side Note" style="vertical-align: text-bottom; position: relative; height:1.75em; top:0.05em; transform:rotate(15deg)" src="https://files.training.databricks.com/static/images/icon-note.webp"/> Explore the UI throughout this lesson by clicking the "Models" tab on the left-hand side of the screen.
# COMMAND ----------
# MAGIC %md
# MAGIC Confirm that you are using `mlflow` version 1.7 or higher.
# COMMAND ----------
from distutils.version import LooseVersion, StrictVersion
import mlflow
assert StrictVersion(mlflow.__version__) >= StrictVersion("1.7.0"), "Update MLflow to version 1.7.0+"
# COMMAND ----------
# MAGIC %md
# MAGIC Train a model and log it to MLflow.
# COMMAND ----------
import mlflow
import mlflow.sklearn
import pandas as pd
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from sklearn.model_selection import train_test_split
df = pd.read_csv("/dbfs/mnt/training/airbnb/sf-listings/airbnb-cleaned-mlflow.csv")
X_train, X_test, y_train, y_test = train_test_split(df.drop(["price"], axis=1), df[["price"]].values.ravel(), random_state=42)
rf = RandomForestRegressor(n_estimators=100, max_depth=5)
rf.fit(X_train, y_train)
with mlflow.start_run(run_name="RF Model") as run:
mlflow.sklearn.log_model(rf, "model")
mlflow.log_metric("mse", mean_squared_error(y_test, rf.predict(X_test)))
runID = run.info.run_uuid
# COMMAND ----------
# MAGIC %md
# MAGIC Create a unique model name so you don't clash with other workspace users.
# COMMAND ----------
import uuid
model_name = f"airbnb_rf_model_{uuid.uuid4().hex[:10]}"
model_name
# COMMAND ----------
# MAGIC %md
# MAGIC Register the model.
# COMMAND ----------
model_uri = "runs:/{run_id}/model".format(run_id=runID)
model_details = mlflow.register_model(model_uri=model_uri, name=model_name)
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC **Open the *Models* tab on the left of the screen to explore the registered model.** Note the following:<br><br>
# MAGIC
# MAGIC * It logged who trained the model and what code was used
# MAGIC * It logged a history of actions taken on this model
# MAGIC * It logged this model as a first version
# MAGIC
# MAGIC <div><img src="https://files.training.databricks.com/images/eLearning/ML-Part-4/model-registry-1.png" style="height: 400px; margin: 20px"/></div>
# COMMAND ----------
# MAGIC %md
# MAGIC Check the status. It will initially be in `PENDING_REGISTRATION` status.
# COMMAND ----------
from mlflow.tracking.client import MlflowClient
client = MlflowClient()
model_version_details = client.get_model_version(name=model_name, version=1)
model_version_details.status
# COMMAND ----------
# MAGIC %md
# MAGIC Now add a model description
# COMMAND ----------
client.update_registered_model(
name=model_details.name,
description="This model forecasts Airbnb housing list prices based on various listing inputs."
)
# COMMAND ----------
# MAGIC %md
# MAGIC Add a version-specific description.
# COMMAND ----------
client.update_model_version(
name=model_details.name,
version=model_details.version,
description="This model version was built using sklearn."
)
# COMMAND ----------
# MAGIC %md
# MAGIC ### Deploying a Model
# MAGIC
# MAGIC The MLflow Model Registry defines several model stages: `None`, `Staging`, `Production`, and `Archived`. Each stage has a unique meaning. For example, `Staging` is meant for model testing, while `Production` is for models that have completed the testing or review processes and have been deployed to applications.
# MAGIC
# MAGIC Users with appropriate permissions can transition models between stages. In private preview, any user can transition a model to any stage. In the near future, administrators in your organization will be able to control these permissions on a per-user and per-model basis.
# MAGIC
# MAGIC If you have permission to transition a model to a particular stage, you can make the transition directly by using the `MlflowClient.update_model_version()` function. If you do not have permission, you can request a stage transition using the REST API; for example: ```%sh curl -i -X POST -H "X-Databricks-Org-Id: <YOUR_ORG_ID>" -H "Authorization: Bearer <YOUR_ACCESS_TOKEN>" https://<YOUR_DATABRICKS_WORKSPACE_URL>/api/2.0/preview/mlflow/transition-requests/create -d '{"comment": "Please move this model into production!", "model_version": {"version": 1, "registered_model": {"name": "power-forecasting-model"}}, "stage": "Production"}'
# MAGIC ```
# COMMAND ----------
# MAGIC %md
# MAGIC Now that you've learned about stage transitions, transition the model to the `Production` stage.
# COMMAND ----------
import time
time.sleep(10) # In case the registration is still pending
# COMMAND ----------
client.transition_model_version_stage(
name=model_details.name,
version=model_details.version,
stage='Production',
)
# COMMAND ----------
# MAGIC %md
# MAGIC Fetch the model's current status.
# COMMAND ----------
model_version_details = client.get_model_version(
name=model_details.name,
version=model_details.version,
)
print("The current model stage is: '{stage}'".format(stage=model_version_details.current_stage))
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC Fetch the latest model using a `pyfunc`. Loading the model in this way allows us to use the model regardless of the package that was used to train it.
# MAGIC
# MAGIC <img alt="Side Note" title="Side Note" style="vertical-align: text-bottom; position: relative; height:1.75em; top:0.05em; transform:rotate(15deg)" src="https://files.training.databricks.com/static/images/icon-note.webp"/> You can load a specific version of the model too.
# COMMAND ----------
import mlflow.pyfunc
model_version_uri = "models:/{model_name}/1".format(model_name=model_name)
print("Loading registered model version from URI: '{model_uri}'".format(model_uri=model_version_uri))
model_version_1 = mlflow.pyfunc.load_model(model_version_uri)
# COMMAND ----------
# MAGIC %md
# MAGIC Apply the model.
# COMMAND ----------
model_version_1.predict(X_test)
# COMMAND ----------
# MAGIC %md
# MAGIC ### Deploying a New Model Version
# MAGIC
# MAGIC The MLflow Model Registry enables you to create multiple model versions corresponding to a single registered model. By performing stage transitions, you can seamlessly integrate new model versions into your staging or production environments.
# COMMAND ----------
# MAGIC %md
# MAGIC Create a new model version and register that model when it's logged.
# COMMAND ----------
import mlflow
import mlflow.sklearn
import pandas as pd
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from sklearn.model_selection import train_test_split
df = pd.read_csv("/dbfs/mnt/training/airbnb/sf-listings/airbnb-cleaned-mlflow.csv")
X_train, X_test, y_train, y_test = train_test_split(df.drop(["price"], axis=1), df[["price"]].values.ravel(), random_state=42)
rf = RandomForestRegressor(n_estimators=300, max_depth=10)
rf.fit(X_train, y_train)
with mlflow.start_run(run_name="RF Model") as run:
# Specify the `registered_model_name` parameter of the `mlflow.sklearn.log_model()`
# function to register the model with the MLflow Model Registry. This automatically
# creates a new model version
mlflow.sklearn.log_model(
sk_model=rf,
artifact_path="sklearn-model",
registered_model_name=model_name,
)
mlflow.log_metric("mse", mean_squared_error(y_test, rf.predict(X_test)))
runID = run.info.run_uuid
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC Check the UI to see the new model version.
# MAGIC
# MAGIC <div><img src="https://files.training.databricks.com/images/eLearning/ML-Part-4/model-registry-2.png" style="height: 400px; margin: 20px"/></div>
# COMMAND ----------
# MAGIC %md
# MAGIC Use the search functionality to grab the latest model version.
# COMMAND ----------
model_version_infos = client.search_model_versions(f"name = '{model_name}'")
new_model_version = max([model_version_info.version for model_version_info in model_version_infos])
# COMMAND ----------
# MAGIC %md
# MAGIC Add a description to this new version.
# COMMAND ----------
client.update_model_version(
name=model_name,
version=new_model_version,
description="This model version is a random forest containing 300 decision trees and a max depth of 10 that was trained in scikit-learn."
)
# COMMAND ----------
# MAGIC %md
# MAGIC Put this new model version into `Staging`
# COMMAND ----------
import time
time.sleep(10) # In case the registration is still pending
# COMMAND ----------
client.transition_model_version_stage(
name=model_name,
version=new_model_version,
stage="Staging",
)
# COMMAND ----------
# MAGIC %md
# MAGIC Sicne this model is now in staging, you can execute an automated CI/CD pipeline against it to test it before going into production. Once that is completed, you can push that model into production.
# COMMAND ----------
client.transition_model_version_stage(
name=model_name,
version=new_model_version,
stage="Production",
)
# COMMAND ----------
# MAGIC %md
# MAGIC ### Archiving and Deleting
# MAGIC
# MAGIC You can now archive and delete old versions of the model.
# COMMAND ----------
client.transition_model_version_stage(
name=model_name,
version=1,
stage="Archived",
)
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC Delete version 1.
# MAGIC
# MAGIC <img alt="Side Note" title="Side Note" style="vertical-align: text-bottom; position: relative; height:1.75em; top:0.05em; transform:rotate(15deg)" src="https://files.training.databricks.com/static/images/icon-note.webp"/> You cannot delete a model that is not first archived.
# COMMAND ----------
client.delete_model_version(
name=model_name,
version=1
)
# COMMAND ----------
# MAGIC %md
# MAGIC Archive version 2 of the model too.
# COMMAND ----------
client.transition_model_version_stage(
name=model_name,
version=2,
stage="Archived",
)
# COMMAND ----------
# MAGIC %md
# MAGIC Now delete the entire registered model.
# COMMAND ----------
client.delete_registered_model(model_name)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Review
# MAGIC **Question:** How does MLflow tracking differ from the model registry?
# MAGIC **Answer:** Tracking is meant for experimentation and development. The model registry is designed to take a model from tracking and put it through staging and into production. This is often the point that a data engineer or a machine learning engineer takes responsibility for the depoloyment process.
# MAGIC
# MAGIC **Question:** Why do I need a model registry?
# MAGIC **Answer:** Just as MLflow tracking provides end-to-end reproducibility for the machine learning training process, a model registry provides reproducibility and governance for the deployment process. Since production systems are mission critical, components can be isolated with ACL's so only specific individuals can alter production models. Version control and CI/CD workflow integration is also a critical dimension of deploying models into production.
# MAGIC
# MAGIC **Question:** What can I do programatically versus using the UI?
# MAGIC **Answer:** Most operations can be done using the UI or in pure Python. A model must be tracked using Python, but from that point on everything can be done either way. For instance, a model logged using the MLflow tracking API can then be registered using the UI and can then be pushed into production.
# COMMAND ----------
# MAGIC %md
# MAGIC ##  Classroom-Cleanup<br>
# MAGIC
# MAGIC Run the **`Classroom-Cleanup`** cell below to remove any artifacts created by this lesson.
# COMMAND ----------
# MAGIC %run "./Includes/Classroom-Cleanup"
# COMMAND ----------
# MAGIC %md
# MAGIC ##  Next Steps
# MAGIC
# MAGIC Start the labs for this lesson, [Model Management Lab]($./Labs/05-Lab)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Additional Topics & Resources
# MAGIC
# MAGIC **Q:** Where can I find out more information on MLflow Model Registry?
# MAGIC **A:** Check out <a href="https://mlflow.org/docs/latest/registry.html" target="_blank">the MLflow documentation</a>
# COMMAND ----------
# MAGIC %md-sandbox
# MAGIC © 2020 Databricks, Inc. All rights reserved.<br/>
# MAGIC Apache, Apache Spark, Spark and the Spark logo are trademarks of the <a href="http://www.apache.org/">Apache Software Foundation</a>.<br/>
# MAGIC <br/>
# MAGIC <a href="https://databricks.com/privacy-policy">Privacy Policy</a> | <a href="https://databricks.com/terms-of-use">Terms of Use</a> | <a href="http://help.databricks.com/">Support</a>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.