content stringlengths 5 1.05M |
|---|
#!/usr/bin/env python
import sys
# read file line by line
with open(sys.argv[1],"r") as file:
for line in file:
print(type(line))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from dump_env import dump
def _create_parser():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--template', default='',
type=str, help='Adds template path')
parser.add_argument('-p', '--prefix', default='',
type=str, help='Adds prefix')
return parser
def main():
"""
Runs dump-env script.
Examples:
This example will dump all environ variables::
$ dump-env
This example will dump all environ variables starting with `PIP_`::
$ dump-env -p 'PIP_'
This example will dump everything from `.env.template` file
and all env variables with `SECRET_` prefix into a `.env` file::
$ dump-env -p 'SECRET_' -t .env.template > .env
"""
parser = _create_parser()
args = parser.parse_args()
variables = dump(args.template, args.prefix)
for key, value in variables.items():
print('{0}={1}'.format(key, value))
|
import random
from src.hw3.col import o
from copy import deepcopy
"""
all : Seems like a Sample.
bin : Seems like a Col.
"""
class FFT():
def __init__(self, all, conf, branch,branches,stop = None, level=0):
self.my = conf
stop = stop or 2*len(all.rows)**self.my.bins
tmp = all.clone()
for row in all.rows:
tmp.add(row)
tmp = tmp.divs()
tmp = sorted(tmp)
best, rest = tmp[0], tmp[-1]
bins = [bin for xbest,xrest in zip(best.x, rest.x)
for bin in xbest.discretize(xrest, self.my)]
bestIdea = None
worstIdea = None
# print("bins", bins)
bestValues = self.values("plan", bins)
worstValues = self.values("monitor", bins)
if len(bestValues)>0 and len(worstValues)>0:
bestIdea = bestValues[-1][1]
worstIdea = worstValues[-1][1]
for yes,no,idea in [(1,0,bestIdea), (0,1,worstIdea)]:
leaf,tree = all.clone(), all.clone()
for row in all.rows:
if self.match(idea, row):
leaf.add(row)
else:
tree.add(row)
b1 = deepcopy(branch)
b1 += [o(at=idea.at, lo=idea.lo, hi=idea.hi,
type=yes, txt="if "+self.show(idea)+" then",
then=leaf.ys(), n=len(leaf.rows))]
# print("b1: ", b1)
# print("len of tree rows: ", len(tree.rows)) [1909.9, 17.9, 35.6]
# print(stop)
# print(len(tree.rows), stop)
if len(tree.rows) <= stop or level > random.randrange(5,9,1):
b1 += [o(type=no, txt=" ", then=tree.ys(), n= len(tree.rows))]
branches += [b1]
else:
# print("Recurse FFT")
FFT(tree,conf,b1,branches,stop=stop,level=level+1)
def match(self, bin, row):
# print("row: ", row)
v=row[bin.at] # Q: where does `at` come from?
if v=="?" : return True # Q: what should we do for missing values
elif bin.first: return v <= bin.hi
elif bin.last : return v >= bin.lo
else : return bin.lo <= v <= bin.hi
def show(self,bin):
if bin.lo == bin.hi: return f"{bin.name} == {bin.lo}" # Q: how we detect symbolic ranges
elif bin.first: return f"{bin.name} <= {bin.hi}"
elif bin.last : return f"{bin.name} >= {bin.lo}"
else : return f"{bin.lo} <= {bin.name} <= {bin.hi}"
def value(self, rule, bin):
s = self.my.support
rules = o(plan = lambda b,r: b**s/(b+r) if b>r else 0, # good things to make you smile
monitor = lambda b,r: r**s/(b+r) if r>b else 0, # bad things to make your cry.
novel = lambda b,r: 1/(b+r)) # Q: when would i select for "novel"?
if bin.rests == 0 or bin.bests == 0:
return rules[rule](bin.best, bin.rest)
return rules[rule](bin.best/bin.bests, bin.rest/bin.rests)
def values(self,rule,bins):
bins = [(self.value(rule,bin), bin) for bin in bins]
tmp = [(n,bin) for n,bin in bins if n > 0]
# for val in tmp:
# print("values: ", val)
# print("length of tmp: ", len(tmp))
return sorted(tmp, key=lambda tuple: tuple[0]) #What is the first here? It should be a sorting rule |
from django import template
from django.conf import settings
from django.core.cache import cache
from django.db.models import Count
from django.template import Variable
from django.utils.safestring import mark_safe
from mptt.utils import get_cached_trees
from ..models import (
Post, Category
)
register = template.Library()
class CategoryNode(template.Node):
def __init__(self, nodes, roots):
self.nodes = nodes
self.roots = Variable(roots)
self.tree_query_set = None
def _render_category(self, context, category):
nodes = []
context.push()
for child in category.get_children():
nodes.append(self._render_category(context, child))
context['category'] = category
context['children'] = mark_safe(''.join(nodes))
rendered = self.nodes.render(context)
context.pop()
return rendered
def render(self, context):
roots = self.roots.resolve(context)
nodes = [self._render_category(context, category) for category in roots]
return ''.join(nodes)
@register.tag
def navbar_blog_categories(parser, token):
try:
# split_contents() knows not to split quoted strings.
tag_name, roots = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'{} tag does not require an additional argument.'.format(token.split_contents()[0])
)
nodes = parser.parse(('end_navbar_blog_categories',))
parser.delete_first_token()
return CategoryNode(nodes, roots)
@register.simple_tag
def get_blog_category_roots(blog_slug):
cache_key = 'blog.templatetags.blog_tags.get_blog_category_roots({})'.format(blog_slug)
cache_time = settings.CACHES['default']['TIMEOUT']
roots = cache.get(cache_key)
if not roots:
try:
roots = get_cached_trees(Category.objects
.filter(blog__slug=blog_slug)
.order_by('tree_id', 'lft'))
cache.set(cache_key, roots, cache_time)
except Category.DoesNotExist:
roots = None
return roots
@register.tag
def blog_categories(parser, token):
try:
# split_contents() knows not to split quoted strings.
tag_name, obj = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'{} tag does not require an additional argument.'.format(token.split_contents()[0])
)
nodes = parser.parse(('end_blog_categories',))
parser.delete_first_token()
return CategoryNode(nodes, obj)
@register.simple_tag
def get_recent_posts(blog_slug, count=5):
cache_key = 'blog.templatetags.blog_tags.get_recent_posts({})'.format(blog_slug)
cache_time = settings.CACHES['default']['TIMEOUT']
posts = cache.get(cache_key)
if not posts:
posts = Post.objects \
.select_related('blog') \
.blog(blog_slug) \
.published() \
.order_by('-published')[:count]
cache.set(cache_key, posts, cache_time)
return posts
@register.simple_tag
def get_most_common_tags(blog_slug, min=1, count=10):
cache_key = 'blog.templatetags.blog_tags.get_most_common_tags({})'.format(blog_slug)
cache_time = settings.CACHES['default']['TIMEOUT']
extra_filters = {
'post__blog__slug': blog_slug,
'post__status': Post.STATUS_CHOICES.published,
'post__is_removed': False,
}
tags = cache.get(cache_key)
if not tags:
tags = Post.tags.most_common(min, extra_filters)[:count]
cache.set(cache_key, tags, cache_time)
return tags
@register.simple_tag
def get_similar_posts(post, blog_slug, count=4):
cache_key = 'blog.templatetags.blog_tags.get_similar_posts({})'.format(blog_slug)
cache_time = settings.CACHES['default']['TIMEOUT']
posts = cache.get(cache_key)
if not posts:
posts = Post.objects.published() \
.filter(tags__in=post.tags.values_list('id', flat=True)) \
.exclude(id=post.id) \
.annotate(same_tags=Count('tags')) \
.order_by('-same_tags', '-published')[:count]
cache.set(cache_key, posts, cache_time)
return posts
|
import pytesseract
from PIL import Image
from io import BytesIO
def data_to_text(data: bytes, args: dict) -> str:
img = Image.open(BytesIO(data))
configs = format_url_params(args)
return pytesseract.image_to_string(img, config=configs['config'], lang=configs['lang'] )
def format_url_params(args) -> dict:
params = dict()
params['config'] = ''
if args.get('psm'):
params['config'] = f'--psm {args.get("psm")} '
if args.get('oem'):
params['config'] += f'--oem {args.get("oem")} '
if args.get('lang'):
params['lang'] = f'{args.get("lang")}'
else:
params['lang'] = 'eng'
return params |
from django.apps import AppConfig
class UsageStatsAppConfig(AppConfig):
name = 'topobank.usage_stats'
def ready(self):
from .utils import register_metrics
register_metrics()
|
from fastapi import FastAPI
from .routers import operators, users, redirect
app = FastAPI()
app.include_router(
operators.router,
prefix="/api/operators",
tags=["operators"],
responses={404: {"description": "Not found"}},
)
app.include_router(
users.router,
prefix="/api/users",
tags=["users"],
responses={404: {"description": "Not found"}},
)
# default router for any other request, will be redirected to a "real"
# test server, which allows to start mocking only some things
app.include_router(
redirect.router,
prefix="/api",
tags=["redirect"],
responses={404: {"description": "Not found"}},
)
|
import nose, random
import .rp_with_graphs as rp
import functools
def make_votes_from_cand_stren(candidates, strengths, num_votes, noise=3):
votes = []
nc = len(candidates)
for _ in range(num_votes):
vote = {}
for candidate in candidates:
vote[candidate] = max(min(strengths[candidate] + \
random.randint(-noise, noise), nc), 0)
votes.append(vote)
return votes
def make_candidates(num_candidates):
candidates = list(range(1, num_candidates + 1))
strengths = {}
for candidate in candidates:
strengths[candidate] = random.randint(1, num_candidates)
return candidates, strengths
def make_random_candidates_votes(num_candidates, num_votes, noise=3):
c, s = make_candidates(num_candidates)
v = make_votes_from_cand_stren(c, s, num_votes, noise=noise)
return c, v, s
NUMTESTS = 250
FULLGRAPH = False
def helper_running(nc, nv, nt):
for _ in range(nt):
c, v, s = make_random_candidates_votes(nc, nv)
w = rp.run(c, v, full_graph=FULLGRAPH)
assert(len(w)!= 0 and w.issubset(c))
def test_running_small():
NUMCAND = range(1, 6)
NUMVOTES = range(1, 6)
for nc in NUMCAND:
for nv in NUMVOTES:
helper_running(nc, nv, NUMTESTS)
def test_running_large():
NUMCAND = [10, 20]
NUMVOTES = [30, 50, 200]
SCALED = int(NUMTESTS ** .5)
for nc in NUMCAND:
for nv in NUMVOTES:
helper_running(nc, nv, SCALED)
def test_accuracy():
NUMCAND = 10
NUMVOTES = 100
MARGIN = .75
NOISE = 5
SCALED = int(NUMTESTS / 10)
for _ in range(SCALED):
c, s = make_candidates(NUMCAND)
win_stren = max(s.values())
strongest = [c for c in s if s[c] == win_stren]
winners = []
for _ in range(SCALED):
v = make_votes_from_cand_stren(c, s, NUMVOTES, noise=NOISE)
winners.extend(rp.run(c, v, full_graph=FULLGRAPH))
for candidate in strongest:
win_odds = [winners.count(c)/SCALED for c in strongest]
try:
for i in win_odds:
assert(i > MARGIN/(len(strongest) ** 2))
assert(sum(win_odds) > MARGIN)
except AssertionError:
stren_list = sorted([(k, v) for k, v in s.items()], \
key=lambda x: -x[1])
print("strengths")
print(stren_list[:5], "(some omitted)")
print("win incidence")
print([(i, round(winners.count(i)/SCALED, 5)) \
for (i, _) in stren_list[:5]])
print("cause of error")
print(f"{candidate} with {winners.count(candidate)/SCALED}")
print(f"margin was {MARGIN/len(strongest)}")
print()
raise AssertionError
def test_full_matches_fast():
NUMCAND = 10
NUMVOTES = 100
SCALED = int(NUMTESTS / 10)
for _ in range(SCALED):
c, v, s = make_random_candidates_votes(NUMCAND, NUMVOTES)
full = rp.run(c, v, True)
fast = rp.run(c, v, False)
try:
assert(full == fast)
except AssertionError:
print(full, fast)
def test_tied():
NUMCAND = 10
NUMVOTES = 10
for _ in range(NUMTESTS):
c, v, s = make_random_candidates_votes(NUMCAND, NUMVOTES, noise=0)
win_stren = max(s.values())
strongest = [c for c in s if s[c] == win_stren]
assert(len(rp.run(c, v)) == len(strongest))
def test_full_order():
NUMCAND = 10
NUMVOTES = 100
for _ in range(NUMTESTS):
c, v, s = make_random_candidates_votes(NUMCAND, NUMVOTES)
full = rp.full_order(c, v)
comb = functools.reduce(set.union, full, set())
print(full, comb, c)
assert(set(c) == comb)
if __name__ == "__main__":
nose.runmodule()
|
import docker
client = docker.from_env()
for container in client.containers.list():
print(container.id)
for image in client.images.list():
print(image)
|
"""
https://archive.ics.uci.edu/ml/datasets/Heart+Disease
1. #3 (age)
2. #4 (sex)
3. #9 (cp)
4. #10 (trestbps)
5. #12 (chol)
6. #16 (fbs)
7. #19 (restecg)
8. #32 (thalach)
9. #38 (exang)
10. #40 (oldpeak)
11. #41 (slope)
12. #44 (ca)
13. #51 (thal)
14. #58 (num) (the predicted attribute)
ex: ['62', '0', '1', '140', '0', '?', '0', '143', '0', '0', '?', '?', '3', '2']
Here I only choose feature 1~8 for example.
"""
with open("processed.cleveland.data") as f:
lines = f.readlines()
columns = ["age", "sex", "cp", "trestbps", "chol", "fbs", "restecg", "thalach", "target"]
features_targets = []
targets = []
for line in lines:
x = line.strip().split(',')
y = x[-1]
if '?' in x[:8]: # skip missing data
continue
ft = [float(i) for i in x[:8]+[y]]
# print(ft)
features_targets.append(ft)
"""
The first line of the CSV file must contain column names.
Each line after that contains one example from the dataset,
with values for each of the columns defined on the first line.
The pipe character ("|") deliminates separate feature values
in a list of feature values for a given feature.
"""
with open("test.csv", 'w') as f:
f.write(','.join(columns) + '\n')
for feature in features_targets:
feature = [str(i) for i in feature]
f.write(','.join(feature) + '\n')
# launch tensorboard and fill [git/]tensorboardX/examples/test.csv in the WIT page and see the data distribution.
exit()
# For interactive inference, you may need the data in tfrecord format.
import tensorflow as tf
def to_examples(features_targets, columns=None):
examples = []
for row in features_targets:
example = tf.train.Example()
for i, col in enumerate(columns):
example.features.feature[col].float_list.value.append(row[i])
# example.features.feature[col].bytes_list.value.append(row[col].encode('utf-8'))
examples.append(example)
return examples
writer = tf.io.TFRecordWriter('test.tfrecord')
for example in to_examples(features_targets, columns):
writer.write(example.SerializeToString())
writer.close()
# fill [git/]tensorboardX/examples/test.tfrecord in the WIT page and see the data distribution.
|
import pandas as pd
import pytest
from botocore.stub import ANY
from envparse import env
from faker import Faker
from seller_stats.category_list_updates import CategoryListUpdates
fake = Faker()
Faker.seed(0)
def make_categories(len_1, len_2, diff_count):
lists = [[], []]
# заполняем первый лист (старые категории)
for _ in range(len_1):
lists[0].append({
'wb_category_name': fake.company(),
'wb_category_url': fake.url(),
})
# заполняем второй лист категориями, которые должны совпадать
for i in range(len_2 - diff_count):
lists[1].append(lists[0][i])
# добиваем второй лист категориями, которые должны отличаться
for _ in range(len_2 - len(lists[1])):
lists[1].append({
'wb_category_name': fake.company(),
'wb_category_url': fake.url(),
})
return lists
@pytest.fixture()
def comparator():
return CategoryListUpdates()
@pytest.fixture()
def comparator_random():
lists = make_categories(10, 20, 15)
comparator = CategoryListUpdates(lists[0], lists[1])
return comparator
@pytest.fixture()
def comparator_empty():
lists = make_categories(10, 10, 0)
comparator = CategoryListUpdates(lists[0], lists[1])
return comparator
def test_load_from_lists(comparator):
lists = make_categories(1, 2, 1)
comparator.load_from_list(lists[0], lists[1])
assert comparator.categories_old is lists[0]
assert comparator.categories_new is lists[1]
@pytest.mark.parametrize('lists, diff_added_cnt, diff_removed_cnt, diff_full_cnt', [
[make_categories(1, 2, 1), 1, 0, 1], # когда есть одна новая категория
[make_categories(1, 2, 2), 2, 1, 3], # когда все категории новые
[make_categories(10, 10, 0), 0, 0, 0], # когда все категории старые
[make_categories(10, 5, 5), 5, 10, 15], # когда категорий меньше и все новые
[make_categories(10, 5, 0), 0, 5, 5], # когда категорий меньше и все старые
[make_categories(10, 15, 8), 8, 3, 11], # когда категорий больше и частично новые
])
def test_compare_two_lists_added_categories_count(comparator, lists, diff_added_cnt, diff_removed_cnt, diff_full_cnt):
comparator.load_from_list(lists[0], lists[1])
comparator.calculate_diff()
assert comparator.get_categories_count('added') is diff_added_cnt
assert comparator.get_categories_count('removed') is diff_removed_cnt
assert comparator.get_categories_count('full') is diff_full_cnt
def test_get_category_count_raises_exception(comparator_random):
comparator_random.calculate_diff()
with pytest.raises(Exception) as execinfo:
comparator_random.get_categories_count()
assert 'type is not defined' in str(execinfo.value)
def test_get_categories_unique_count_raises_exception(comparator_random):
comparator_random.calculate_diff()
with pytest.raises(Exception) as execinfo:
comparator_random.get_categories_unique_count()
assert 'type is not defined' in str(execinfo.value)
def test_all_fields_present(comparator_random):
expected_columns = CategoryListUpdates._columns.sort()
comparator_random.calculate_diff()
assert list(comparator_random.diff['added'].columns).sort() == expected_columns
assert list(comparator_random.diff['removed'].columns).sort() == expected_columns
assert list(comparator_random.diff['full'].columns).sort() == expected_columns
@pytest.mark.parametrize('_type', [None, pd.DataFrame()])
def test_fill_types_with(_type):
comparator = CategoryListUpdates()
filled = comparator.fill_types_with(_type)
assert filled['added'] is _type
assert filled['removed'] is _type
assert filled['full'] is _type
@pytest.mark.parametrize('category_name, expected_url', [
['Спортивная обувь', 'https://www.wildberries.ru/catalog/0/search.aspx?search=%D0%A1%D0%BF%D0%BE%D1%80%D1%82%D0%B8%D0%B2%D0%BD%D0%B0%D1%8F%20%D0%BE%D0%B1%D1%83%D0%B2%D1%8C'],
['Распродажа обуви: -25% промокод', 'https://www.wildberries.ru/catalog/0/search.aspx?search=%D0%A0%D0%B0%D1%81%D0%BF%D1%80%D0%BE%D0%B4%D0%B0%D0%B6%D0%B0%20%D0%BE%D0%B1%D1%83%D0%B2%D0%B8%3A%20-25%25%20%D0%BF%D1%80%D0%BE%D0%BC%D0%BE%D0%BA%D0%BE%D0%B4'],
['Blu-Ray проигрыватели', 'https://www.wildberries.ru/catalog/0/search.aspx?search=Blu-Ray%20%D0%BF%D1%80%D0%BE%D0%B8%D0%B3%D1%80%D1%8B%D0%B2%D0%B0%D1%82%D0%B5%D0%BB%D0%B8'],
['Рюкзаки, сумки, чехлы', 'https://www.wildberries.ru/catalog/0/search.aspx?search=%D0%A0%D1%8E%D0%BA%D0%B7%D0%B0%D0%BA%D0%B8%2C%20%D1%81%D1%83%D0%BC%D0%BA%D0%B8%2C%20%D1%87%D0%B5%D1%85%D0%BB%D1%8B'],
])
def test_search_url_field_generator(category_name, expected_url):
comparator = CategoryListUpdates()
generated_url = comparator.generate_search_url(category_name)
assert generated_url == expected_url
@pytest.mark.parametrize('_type', ['added', 'removed', 'full'])
def test_search_url_field_present(comparator_random, _type):
comparator_random.calculate_diff()
assert 'wb_category_search_url' in comparator_random.diff[_type].columns
assert 'www.wildberries.ru' in comparator_random.diff[_type].iloc[0].at['wb_category_search_url']
@pytest.mark.parametrize('category_url, expected_type', [
['https://www.wildberries.ru/catalog/novinki/chehly-dlya-prezervativov', 'Новинки'],
['https://www.wildberries.ru/promotions/novogodniy-promokod/gelevye-poloski', 'Промо'],
['https://www.wildberries.ru/catalog/krasota/uhod-za-kozhey/uhod-za-litsom/bandazhi-kosmeticheskie', 'Обычная'],
])
def test_category_type_field_generator(category_url, expected_type):
comparator = CategoryListUpdates()
generated_type = comparator.generate_category_type(category_url)
assert generated_type == expected_type
@pytest.mark.parametrize('_type', ['added', 'removed', 'full'])
def test_category_type_field_present(comparator_random, _type):
comparator_random.calculate_diff()
assert 'wb_category_type' in comparator_random.diff[_type].columns
assert comparator_random.diff[_type].iloc[0].at['wb_category_type'] in ['Новинки', 'Промо', 'Обычная']
def test_dump_to_s3_file_incorrect_params(comparator_random):
comparator_random.calculate_diff()
with pytest.raises(Exception) as execinfo:
comparator_random.dump_to_s3_file()
assert 'type is not defined' in str(execinfo.value)
def test_get_s3_file_name_incorrect_params(comparator_random):
comparator_random.calculate_diff()
with pytest.raises(Exception) as execinfo:
comparator_random.get_s3_file_name()
assert 'type is not defined' in str(execinfo.value)
@pytest.mark.parametrize('_type', ['added', 'removed', 'full'])
def test_export_file_to_s3(comparator_random, s3_stub, _type):
s3_stub.add_response(
'put_object',
expected_params={
'Bucket': env('AWS_S3_BUCKET_NAME'),
'Body': ANY,
'Key': ANY,
},
service_response={},
)
comparator_random.calculate_diff()
comparator_random.dump_to_s3_file(_type=_type)
assert _type in comparator_random.get_s3_file_name(_type=_type)
@pytest.mark.parametrize('_type, expected_prefix', [
['added', 'added_'],
['removed', 'removed_'],
['full', 'full_'],
])
def test_export_file_prefix(comparator_random, s3_stub, _type, expected_prefix):
s3_stub.add_response(
'put_object',
expected_params={
'Bucket': env('AWS_S3_BUCKET_NAME'),
'Body': ANY,
'Key': ANY,
},
service_response={},
)
comparator_random.calculate_diff()
comparator_random.dump_to_s3_file(_type=_type)
assert expected_prefix in comparator_random.get_s3_file_name(_type=_type)
|
# Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
from base64 import b64encode
from collections import namedtuple
from datetime import datetime
from io import StringIO
from os import environ, path
from textwrap import indent
from sys import stdout
import yaml
from nbconvert.exporters import Exporter
from nbconvert.filters import ipython2python
from .utils import (env_keys, iter_env_lines, parse_config_line,
parse_mount_line, normalize_name)
from .archive import parse_archive_line
from .config import (new_config, update_in, get_in, set_env, set_commands,
Volume, meta_keys)
from .import magic as magic_module
here = path.dirname(path.abspath(__file__))
Magic = namedtuple('Magic', 'name args lines is_cell')
magic_handlers = {} # name -> function
env_files = set()
archive_settings = {}
is_comment = re.compile(r'\s*#.*').match
# # nuclio: return
is_return = re.compile(r'#\s*nuclio:\s*return').search
# # nuclio: ignore
has_ignore = re.compile(r'#\s*nuclio:\s*ignore').search
has_start = re.compile(r'#\s*nuclio:\s*start-code').search
has_end = re.compile(r'#\s*nuclio:\s*end-code').search
handler_decl = 'def {}(context, event):'
indent_prefix = ' '
line_magic = '%nuclio'
cell_magic = '%' + line_magic
handlers = []
class MagicError(Exception):
pass
def create_logger():
handler = logging.StreamHandler(stdout)
handler.setFormatter(
logging.Formatter('[%(name)s] %(asctime)s %(message)s'))
logger = logging.getLogger('nuclio.export')
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
log = create_logger()
class NuclioExporter(Exporter):
"""Export to nuclio handler"""
# Add "File -> Download as" menu in the notebook
export_from_notebook = 'Nuclio'
@property
def output_mimetype(self):
if archive_settings:
return 'application/zip'
else:
return 'application/yaml'
def _file_extension_default(self):
"""Return default file extension"""
return '.yaml'
def from_notebook_node(self, nb, resources=None, **kw):
config = new_config()
nbname = name = get_in(resources, 'metadata.name') # notebook name
if name:
config['metadata']['name'] = normalize_name(name)
config['spec']['handler'] = handler_name()
io = StringIO()
print(header(), file=io)
for cell in filter(is_code_cell, nb['cells']):
code = cell['source']
if has_ignore(code):
continue
if has_end(code):
break
if has_start(code):
# if we see indication of start, we ignore all previous cells
io = StringIO()
print(header(), file=io)
code = filter_comments(code)
if not code.strip():
continue
lines = code.splitlines()
if cell_magic in code:
self.handle_cell_magic(lines, io, config)
continue
self.handle_code_cell(lines, io, config)
process_env_files(env_files, config)
py_code = io.getvalue()
handler_path = environ.get(env_keys.handler_path)
if handler_path:
with open(handler_path) as fp:
py_code = fp.read()
efiles = []
if archive_settings:
if archive_settings['notebook'] and nbname:
archive_settings['files'] += [nbname + '.ipynb']
efiles = ','.join(archive_settings['files'])
config['metadata']['annotations'][meta_keys.extra_files] = efiles
if env_keys.code_target_path in environ:
code_path = environ.get(env_keys.code_target_path)
with open(code_path, 'w') as fp:
fp.write(py_code)
fp.close()
elif efiles and env_keys.drop_nb_outputs not in environ:
outputs = {'handler.py': py_code,
'function.yaml': gen_config(config)}
for filename in efiles:
with open(filename) as fp:
data = fp.read()
outputs[filename] = data
resources['outputs'] = outputs
else:
data = b64encode(py_code.encode('utf-8')).decode('utf-8')
update_in(config, 'spec.build.functionSourceCode', data)
config = gen_config(config)
resources['output_extension'] = '.yaml'
return config, resources
def find_cell_magic(self, lines):
"""Return index of first line that has %%nuclio"""
for i, line in enumerate(lines):
if cell_magic in line:
return i
return -1
def handle_cell_magic(self, lines, io, config):
i = self.find_cell_magic(lines)
if i == -1:
raise MagicError('cannot find {}'.format(cell_magic))
lines = lines[i:]
name, args = parse_magic_line(lines[0])
magic = Magic(name, args, lines[1:], is_cell=True)
handler = magic_handlers.get(magic.name)
if not handler:
if magic.name not in magic_module.commands:
raise NameError(
'unknown nuclio command: {}'.format(magic.name))
else:
log.warning('skipping %s - not implemented', magic.name)
code = ''
else:
code = handler(magic, config)
if code:
print(ipython2python(code), file=io)
def handle_code_cell(self, lines, io, config):
buf = []
for line in lines:
if is_comment(line):
continue
if '%nuclio' not in line:
# ignore command or magic commands (other than %nuclio)
if not (line.startswith('!') or line.startswith('%')):
buf.append(line)
continue
if buf:
print(ipython2python('\n'.join(buf)), file=io)
buf = []
name, args = parse_magic_line(line)
magic = Magic(name, args, [], is_cell=False)
handler = magic_handlers.get(magic.name)
if not handler:
raise NameError(
'unknown nuclio command: {}'.format(magic.name))
out = handler(magic, config)
if out:
print(ipython2python(out), file=io)
if buf:
print(ipython2python('\n'.join(buf)), file=io)
def header():
name = exporter_name()
now = datetime.now()
return '# Generated by {} on {:%Y-%m-%d %H:%M}\n'.format(name, now)
def exporter_name():
return '{}.{}'.format(NuclioExporter.__module__, NuclioExporter.__name__)
def gen_config(config):
return header() + yaml.dump(config, default_flow_style=False)
def parse_magic_line(line):
"""Parse a '%nuclio' command. Return name, args
>>> parse_magic_line('%nuclio config a="b"')
('config', ['a="b"'])
"""
if line_magic not in line:
return None
line = line.strip()
match = re.search(r'^%?%nuclio\s+(\w+)\s*', line)
if not match:
raise MagicError(line)
cmd = match.group(1)
args = line[match.end():].strip()
return cmd, args
def magic_handler(fn):
magic_handlers[fn.__name__] = fn
return fn
@magic_handler
def env(magic, config):
argline = magic.args.strip()
if argline.startswith('--local-only') or argline.startswith('-l'):
return ''
if argline.startswith('--config-only'):
argline = argline.replace('--config-only', '').strip()
if argline.startswith('-c'):
argline = argline.replace('-c', '').strip()
set_env(config, [argline] + magic.lines)
return ''
@magic_handler
def cmd(magic, config):
argline = magic.args.strip()
if argline.startswith('--config-only'):
argline = argline.replace('--config-only', '').strip()
if argline.startswith('-c'):
argline = argline.replace('-c', '').strip()
set_commands(config, [argline] + magic.lines)
return ''
@magic_handler
def env_file(magic, config):
for line in [magic.args] + magic.lines:
file_name = line.strip()
if file_name[:1] in ('', '#'):
continue
if not path.isfile(file_name):
log.warning('skipping %s - not found', file_name)
continue
env_files.add(file_name)
return ''
def process_env_files(env_files, config):
# %nuclio env_file magic will populate this
from_env = json.loads(environ.get(env_keys.env_files, '[]'))
for fname in (env_files | set(from_env)):
with open(fname) as fp:
set_env(config, iter_env_lines(fp))
def is_code_cell(cell):
return cell['cell_type'] == 'code'
def is_code_line(line):
"""A code line is a non empty line that don't start with #"""
return line.strip()[:1] not in {'#', '%', ''}
def add_return(line):
"""Add return to a line"""
match = re.search(r'\w', line)
if not match:
# TODO: raise?
return line
return line[:match.start()] + 'return ' + line[match.start():]
@magic_handler
def handler(magic, config):
name = magic.args if magic.args else next_handler_name()
if env_keys.handler_name not in environ:
module, _ = config['spec']['handler'].split(':')
config['spec']['handler'] = '{}:{}'.format(module, name)
code = '\n'.join(magic.lines)
return handler_code(name, code)
def handler_code(name, code):
lines = [handler_decl.format(name)]
code = indent(code, indent_prefix)
for line in code.splitlines():
if is_return(line):
line = add_return(line)
lines.append(line)
# Add return to last code line (if not there)
for i, line in enumerate(lines[::-1]):
if not is_code_line(line):
continue
if 'return' not in line:
lines[len(lines)-i-1] = add_return(line)
break
return '\n'.join(lines)
@magic_handler
def build(magic, config):
return ''
@magic_handler
def deploy(magic, config):
return ''
@magic_handler
def help(magic, config):
return ''
@magic_handler
def show(magic, config):
return ''
@magic_handler
def mount(magic, config):
args, rest = parse_mount_line(magic.args)
if len(rest) != 2:
raise MagicError(
'2 arguments must be provided (mount point and remote path)')
volume = Volume(rest[0], rest[1], typ=args.type, name=args.name,
key=args.key, readonly=args.readonly)
volume.render(config)
return ''
@magic_handler
def add(magic, config):
global archive_settings
args, rest = parse_archive_line(magic.args)
files = args.file + magic.lines
for filename in files:
filename = filename.strip()
if not path.isfile(filename):
raise MagicError('file {} doesnt exist'.format(filename))
archive_settings = {'files': files, 'notebook': args.add_notebook}
return ''
@magic_handler
def config(magic, config):
for line in [magic.args] + magic.lines:
line = line.strip()
if not line or line[0] == '#':
continue
key, op, value = parse_config_line(line)
append = op == '+='
update_in(config, key, value, append)
return ''
def next_handler_name():
if handlers:
name = 'handler_{}'.format(len(handlers))
else:
name = 'handler'
handlers.append(name)
return name
def module_name(py_file):
"""
>>> module_name('/path/to/handler.py')
'handler'
"""
base = path.basename(py_file)
module, _ = path.splitext(base)
return module
def handler_name():
handler_path = environ.get(env_keys.handler_path)
if handler_path:
module = module_name(handler_path)
else:
module = 'handler'
name = environ.get(env_keys.handler_name, 'handler')
return '{}:{}'.format(module, name)
def filter_comments(code):
lines = (line for line in code.splitlines() if not is_comment(line))
return '\n'.join(lines)
|
from boa3.boa3 import Boa3
from boa3.exception import CompilerError, CompilerWarning
from boa3.model.builtin.builtin import Builtin
from boa3.neo.vm.opcode.Opcode import Opcode
from boa3.neo.vm.type.Integer import Integer
from boa3.neo.vm.type.String import String
from boa3_test.tests.boa_test import BoaTest
from boa3_test.tests.test_classes.TestExecutionException import TestExecutionException
from boa3_test.tests.test_classes.testengine import TestEngine
class TestException(BoaTest):
default_folder: str = 'test_sc/exception_test'
EXCEPTION_EMPTY_MESSAGE = String(Builtin.Exception.default_message).to_bytes()
def test_raise_exception_empty_message(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(14).to_byte_array(signed=True, min_length=1)
+ Opcode.PUSHDATA1 # raise Exception
+ Integer(len(self.EXCEPTION_EMPTY_MESSAGE)).to_byte_array(signed=True, min_length=1)
+ self.EXCEPTION_EMPTY_MESSAGE
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseExceptionEmptyMessage.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=self.EXCEPTION_EMPTY_MESSAGE):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_exception_with_message(self):
exception_message = String('raised an exception').to_bytes()
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(24).to_byte_array(signed=True, min_length=1)
+ Opcode.PUSHDATA1 # raise Exception('raised an exception')
+ Integer(len(exception_message)).to_byte_array(signed=True, min_length=1)
+ exception_message
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseExceptionWithMessage.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=self.EXCEPTION_EMPTY_MESSAGE):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_exception_without_call(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(14).to_byte_array(signed=True, min_length=1)
+ Opcode.PUSHDATA1 # raise Exception
+ Integer(len(self.EXCEPTION_EMPTY_MESSAGE)).to_byte_array(signed=True, min_length=1)
+ self.EXCEPTION_EMPTY_MESSAGE
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseExceptionWithoutCall.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=self.EXCEPTION_EMPTY_MESSAGE):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_variable_exception(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.PUSHDATA1 # x = Exception
+ Integer(len(self.EXCEPTION_EMPTY_MESSAGE)).to_byte_array(signed=True, min_length=1)
+ self.EXCEPTION_EMPTY_MESSAGE
+ Opcode.STLOC0
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(4).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # raise x
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseVariableException.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=self.EXCEPTION_EMPTY_MESSAGE):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_exception_variable_message(self):
message = 'raised an exception'
exception_message = String(message).to_bytes()
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.PUSHDATA1 # x = 'raised an exception'
+ Integer(len(exception_message)).to_byte_array(signed=True, min_length=1)
+ exception_message
+ Opcode.STLOC0
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(24).to_byte_array(signed=True, min_length=1)
+ Opcode.PUSHDATA1 # raise Exception(x)
+ Integer(len(exception_message)).to_byte_array(signed=True, min_length=1)
+ exception_message
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseExceptionVariableMessage.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=message):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_specific_exception(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x00'
+ b'\x01'
+ Opcode.LDARG0 # if arg0 < 0
+ Opcode.PUSH0
+ Opcode.LT
+ Opcode.JMPIFNOT
+ Integer(14).to_byte_array(signed=True, min_length=1)
+ Opcode.PUSHDATA1 # raise ValueError
+ Integer(len(self.EXCEPTION_EMPTY_MESSAGE)).to_byte_array(signed=True, min_length=1)
+ self.EXCEPTION_EMPTY_MESSAGE
+ Opcode.THROW
+ Opcode.RET
)
path = self.get_contract_path('RaiseSpecificException.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
self.run_smart_contract(engine, path, 'test_raise', 10)
with self.assertRaises(TestExecutionException, msg=self.EXCEPTION_EMPTY_MESSAGE):
self.run_smart_contract(engine, path, 'test_raise', -10)
def test_raise_mismatched_type(self):
path = self.get_contract_path('RaiseMismatchedType.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_try_except_without_exception(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.TRY # try:
+ Integer(7).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(0).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDARG0 # x = arg
+ Opcode.STLOC0
+ Opcode.JMP # except:
+ Integer(5).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.PUSH0 # x = 0
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(2).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptWithoutException.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(10, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(-110, result)
def test_try_except_base_exception(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.TRY # try:
+ Integer(7).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(0).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDARG0 # x = arg
+ Opcode.STLOC0
+ Opcode.JMP # except BaseException:
+ Integer(5).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.PUSH0 # x = 0
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(2).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptBaseException.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(10, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(-110, result)
def test_try_except_specific_exception(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.TRY # try:
+ Integer(7).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(0).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDARG0 # x = arg
+ Opcode.STLOC0
+ Opcode.JMP # except ValueError:
+ Integer(5).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.PUSH0 # x = 0
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(2).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptSpecificException.py')
output = self.assertCompilerLogs(CompilerWarning.UsingSpecificException, path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(10, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(-110, result)
def test_try_except_with_name(self):
path = self.get_contract_path('TryExceptWithName.py')
self.assertCompilerLogs(CompilerError.NotSupportedOperation, path)
def test_try_except_finally(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.LDARG0
+ Opcode.PUSH4
+ Opcode.DIV
+ Opcode.STLOC0
+ Opcode.TRY # try:
+ Integer(9).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(15).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDLOC0 # x += arg
+ Opcode.LDARG0
+ Opcode.ADD
+ Opcode.STLOC0
+ Opcode.JMP # except ValueError:
+ Integer(6).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.LDLOC0 # x = -x
+ Opcode.NEGATE
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(7).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # finally
+ Opcode.PUSH2 # x *= 2
+ Opcode.MUL
+ Opcode.STLOC0
+ Opcode.ENDFINALLY
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptFinally.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(24, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(-274, result)
def test_try_except_else(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.TRY # try:
+ Integer(7).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(0).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDARG0 # x = arg
+ Opcode.STLOC0
+ Opcode.JMP # except BaseException:
+ Integer(7).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.PUSH0 # x = 0
+ Opcode.STLOC0
+ Opcode.JMP # else:
+ Integer(5).to_byte_array(signed=True, min_length=1)
+ Opcode.LDARG0 # x = -arg
+ Opcode.NEGATE
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(2).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptElse.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(-10, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(110, result)
def test_try_except_else_finally(self):
expected_output = (
Opcode.INITSLOT # function signature
+ b'\x01'
+ b'\x01'
+ Opcode.LDARG0
+ Opcode.PUSH4
+ Opcode.DIV
+ Opcode.STLOC0
+ Opcode.TRY # try:
+ Integer(9).to_byte_array(signed=True, min_length=1) # jmp to exception
+ Integer(21).to_byte_array(signed=True, min_length=1) # jmp to finally if exists
+ Opcode.LDLOC0 # x += arg
+ Opcode.LDARG0
+ Opcode.ADD
+ Opcode.STLOC0
+ Opcode.JMP # except ValueError:
+ Integer(8).to_byte_array(signed=True, min_length=1)
+ Opcode.DROP
+ Opcode.LDLOC0 # x = -x
+ Opcode.NEGATE
+ Opcode.STLOC0
+ Opcode.JMP # else:
+ Integer(6).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # x += arg
+ Opcode.LDARG0
+ Opcode.ADD
+ Opcode.STLOC0
+ Opcode.ENDTRY
+ Integer(7).to_byte_array(signed=True, min_length=1)
+ Opcode.LDLOC0 # finally
+ Opcode.PUSH2 # x *= 2
+ Opcode.MUL
+ Opcode.STLOC0
+ Opcode.ENDFINALLY
+ Opcode.LDLOC0 # return x
+ Opcode.RET
)
path = self.get_contract_path('TryExceptElseFinally.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test_try_except', 10)
self.assertEqual(44, result)
result = self.run_smart_contract(engine, path, 'test_try_except', -110)
self.assertEqual(-494, result)
|
import io
import os
import sys
from configparser import ConfigParser
import click
from beets import config
from beets.util import confit
from summertunes.cli.run_mpv import run_mpv
from summertunes.cli.run_serve import run_serve
from summertunes.config_defaults import CONFIG_DEFAULTS
import beetsplug.web
config.resolve()
CONFIG_DEFAULTS['beets_web_port'] = config['web']['port'].get(8337)
for k in CONFIG_DEFAULTS:
try:
CONFIG_DEFAULTS[k] = config['summertunes'][k].get()
except confit.NotFoundError:
pass
PATH_APP_DIR = click.get_app_dir('summertunes')
PATH_CONFIG = os.path.join(click.get_app_dir('summertunes'), 'summertunes.conf')
CONTEXT_SETTINGS = dict(
help_option_names=['-h', '--help'],
default_map={
'mpv': CONFIG_DEFAULTS,
'serve': CONFIG_DEFAULTS,
}
)
def option_mpv_websocket_port(func):
return click.option(
'--mpv-websocket-port', default=3001, help='Port to expose mpv websocket on'
)(func)
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option(
'--no-config-prompt', default=False, is_flag=True,
help='If passed, never ask to create a config if none exists')
@click.pass_context
def cli(ctx, no_config_prompt):
"""
Summertunes is a web interface for the Beets local music database and mpv
audio/video player that gives you an iTunes-like experience in your web
browser.
To run Summertunes, you'll need to run two commands in separate
terminals:
1. 'beet web', using the latest version of Beets (at this time, unreleased
HEAD)
2. 'summertunes mpv', which runs mpv and exposes its socket interface over
a websocket.
"""
@cli.command()
@option_mpv_websocket_port
@click.option(
'--mpv-socket-path', default='/tmp/mpv_socket',
help="Path to use for mpv's UNIX socket")
def mpv(mpv_websocket_port, mpv_socket_path):
"""Run an instance of mpv, configured to be reachable by 'summertunes serve'"""
run_mpv(mpv_websocket_port, mpv_socket_path)
@cli.command([])
@click.option(
'--dev-server-port', default=3000, help='Port to expose server on')
@click.option(
'--beets-web-port', default=8337, help="Port that 'beet web' is running on")
@click.option(
'--last-fm-api-key', default=None,
help='last.fm API key for fetching album art')
@click.option(
'--dev/--no-dev', default=False,
help='If true, run using "npm start" instead of Python static file server. Default False.')
@click.option(
'--mpv-enabled/--no-mpv-enabled', default=False,
help="""If true, tell the client how to find the mpv websocket. Default
True. Use --no-mpv-enabled if you are not running mpv.""")
@option_mpv_websocket_port
def serve(dev_server_port, beets_web_port, last_fm_api_key, dev, mpv_enabled, mpv_websocket_port):
"""Serve the Summertunes web interface"""
run_serve(dev_server_port, beets_web_port, last_fm_api_key, dev, mpv_enabled, mpv_websocket_port)
|
from __future__ import absolute_import, unicode_literals
from celery import shared_task
# from rest_framework import status
# from rest_framework.response import Response
import sys
import os
sys.path.append('..')
from process_pdf import read
__all__ = ['process_info']
@shared_task
def log(data):
print(data)
@shared_task
def read_pdf(data):
log(data)
return read(os.getcwd() + '/' + '/'.join(data.split('/')[2:]))
@shared_task
def read_job_description(data):
return data.split(' ')
def process_info(data):
pdf = read_pdf(data['resume'])
return {
'resume_data': [word for page in pdf for word in page],
'job_data': read_job_description(data['description'])
}
|
frase = input("Escreva uma frase: ")
fraseNova = ""
for chr in frase:
if chr != " ":
fraseNova += chr.upper()
print(fraseNova)
#https://pt.stackoverflow.com/q/340130/101
|
import tkinter as tk
from tkinter import ttk
from thonny import get_workbench, ui_utils
import traceback
from thonny.ui_utils import ems_to_pixels, CommonDialog
class ConfigurationDialog(CommonDialog):
last_shown_tab_index = 0
def __init__(self, master, page_records):
super().__init__(master)
width = ems_to_pixels(53)
height = ems_to_pixels(43)
self.geometry("%dx%d" % (width, height))
self.title(_("Thonny options"))
self.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=1)
main_frame = ttk.Frame(self) # otherwise there is wrong color background with clam
main_frame.grid(row=0, column=0, sticky=tk.NSEW)
main_frame.columnconfigure(0, weight=1)
main_frame.rowconfigure(0, weight=1)
self._notebook = ttk.Notebook(main_frame)
self._notebook.grid(row=0, column=0, columnspan=3, sticky=tk.NSEW, padx=10, pady=10)
self._ok_button = ttk.Button(main_frame, text=_("OK"), command=self._ok, default="active")
self._cancel_button = ttk.Button(main_frame, text=_("Cancel"), command=self._cancel)
self._ok_button.grid(row=1, column=1, padx=(0, 11), pady=(0, 10))
self._cancel_button.grid(row=1, column=2, padx=(0, 11), pady=(0, 10))
self._page_records = []
for key, title, page_class, order in sorted(page_records, key=lambda r: (r[3], r[0])):
try:
spacer = ttk.Frame(self)
spacer.rowconfigure(0, weight=1)
spacer.columnconfigure(0, weight=1)
page = page_class(spacer)
page.key = key
self._page_records.append((key, title, page))
page.grid(sticky=tk.NSEW, pady=(15, 10), padx=15)
self._notebook.add(spacer, text=title)
except Exception:
traceback.print_exc()
self.bind("<Return>", self._ok, True)
self.bind("<Escape>", self._cancel, True)
self._notebook.select(self._notebook.tabs()[ConfigurationDialog.last_shown_tab_index])
def select_page(self, key):
for i, tab in enumerate(self._notebook.tabs()):
if self._page_records[i][0] == key:
self._notebook.select(tab)
def _ok(self, event=None):
for key, title, page in self._page_records:
try:
if page.apply() == False:
return
except Exception:
get_workbench().report_exception("Error when applying options in " + title)
self.destroy()
def _cancel(self, event=None):
for key, title, page in self._page_records:
try:
page.cancel()
except Exception:
get_workbench().report_exception("Error when cancelling options in " + title)
self.destroy()
def destroy(self):
ConfigurationDialog.last_shown_tab_index = self._notebook.index(self._notebook.select())
super().destroy()
class ConfigurationPage(ttk.Frame):
"""This is an example dummy implementation of a configuration page.
It's not required that configuration pages inherit from this class
(can be any widget), but the class must have constructor with single parameter
for getting the master."""
def __init__(self, master):
ttk.Frame.__init__(self, master)
def add_checkbox(
self, flag_name, description, row=None, column=0, padx=0, pady=0, columnspan=1, tooltip=None
):
variable = get_workbench().get_variable(flag_name)
checkbox = ttk.Checkbutton(self, text=description, variable=variable)
checkbox.grid(
row=row, column=column, sticky=tk.W, padx=padx, pady=pady, columnspan=columnspan
)
if tooltip is not None:
ui_utils.create_tooltip(checkbox, tooltip)
def add_combobox(
self, option_name, values, row=None, column=0, padx=0, pady=0, columnspan=1, width=None
):
variable = get_workbench().get_variable(option_name)
combobox = ttk.Combobox(
self,
exportselection=False,
textvariable=variable,
state="readonly",
height=15,
width=width,
values=values,
)
combobox.grid(
row=row, column=column, sticky=tk.W, pady=pady, padx=padx, columnspan=columnspan
)
def add_entry(self, option_name, row=None, column=0, pady=0, padx=0, columnspan=1, **kw):
variable = get_workbench().get_variable(option_name)
entry = ttk.Entry(self, textvariable=variable, **kw)
entry.grid(row=row, column=column, sticky=tk.W, pady=pady, columnspan=columnspan, padx=padx)
def apply(self):
"""Apply method should return False, when page contains invalid
input and configuration dialog should not be closed."""
pass
def cancel(self):
"""Called when dialog gets cancelled"""
pass
|
# links.py
'''
The elements of genealogy are those things which get their own database table, for the most part, and are endowed with unique ID numbers. The
element category names are listed in the constant `ELEMENTS`. The most
key elements such as persons, places, events, notes, images and others
will be referenced in one or more specialized junction tables or foreign
key tables. More straightforward elements can all be lumped together in
one junction table in the database called `links_links`. The two plurals
in the table name indicate that it's a many-to-many table. For example, a
to-do can be linked to any number of projects and a project can be linked
to any number of to-dos. In order to do this, a separate row in a database table is used for each link. So in links_links, generally there will be only two values besides the ID, and these values will be foreign keys, i.e. references to primary keys or ID numbers from primary tables.
There are also tables that store one-to-many relationships, such as
`finding_places` which associates one finding (conclusion: event or
attribute) with a whole nested hierarchy of places. Elements with
one-to-one relationships can be stored together in a general table,
as values or as foreign keys referencing a primary key in a primary
table.
Primary tables exist for each of the elements so that each unique member
of each element category will have its own primary key or unique ID number
within that category. To link two elements together, they get their own
row in links_links or another junction table, and the other columns in the
table will all be null except for links_links_id, a primary key which might
never be used.
Some special elements exist which have two sets of names because I didn't
want to type long words. Conclusions and assertions are basic design
elements which exist separate from each other, but conclusions can
optionally be based on assertions while assertions must be based on
citations which must be based on sources. Sources can be linked to
repositories optionally. Conclusions are separated into events and
attributes in the GUI depending on whether or not the element is
officially associated with a date (event) or not (attribute). Attributes
can become events by having a date added and events can be changed to
attributes by having the date removed or added optionally to a
`particulars` or `note` column where it won't be officially recognized as
a date. In the code, conclusions are called `findings` and assertions are
called `claims`.
Some elements are pre-loaded with types in primary type tables, while some
are completely open-ended so aren't linked to types, such as projects and to-dos which can be linked to each other and anything else, but are not linked to any underlying primary type table. Type tables never have a foreign key in them, but their primary key is always used as a foreign key in other tables. Primary type tables underlie many somewhat primary tables such as the finding table which stores events and attributes. It includes an event_type_id column for foreign keys. The findings_persons table includes foreign key references to finding table IDs and kin type IDs (or "relationships" as they're called in ELEMENTS.)
Event types are used equally for conclusions and assertions (findings and claims.)
There's such a thing as splitting hairs too much. Treebard doesn't track
jurisdiction types such as "county", "parish", "township", "city", etc. as it
would be nearly impossible to do it well but adds nothing to the research.
I could think of no reason to classify sources by type. Is it a census or a gravestone? Isn't it obvious? But I might re-think this when I start working
on that part of the design. I think it would be silly to have types for citations. Is it a page number or a line number or a chapter number? Just say say it, that's what citations are. "Volume 5, Chapter 7, page 162".
On the other hand, while the word "county" is part of the place name in Ireland and the US, in England it's just "Leicester", not "Leicester County". So jurisdicational categories would add some detail, but I don't think they would add detail that should be used to perform logic operations. A column could be added to the place table for jurisdiction, but it would just be a detail to display, not the basis for Treebard to draw any conclusions. Similarly, gender exists as a category but in our modern times we can no longer get away with using gender to assign spousal terminology. The user has to be given free reign to assign his own descriptions to a relationship, so relationships (called "kin" in the code) are one category that the user can add new members to as he sees fit.
Treebard recognizes three genders: male, female and unknown. If another is added for the purpose of political correctness, it would be something non-descript such as "other" so as to not take sides or tie the design to the political arguments popular in 2021. We recognize biologically-imposed categories but don't use gender to enforce anything except the roles of biologal mother and father in regards to offspring.
'''
ELEMENTS = (
"persons", "places", "assertions", "events", "attributes", "citations", "names", "sources", "projects", "to-dos", "contacts", "images", "notes", "repositories", "reports", "charts", "media", "relationships", "roles") |
from .config import ADDRESS, PORT
from .service import Service
import click
@click.command('mui-server')
@click.option('--port', '-p',
type=int, default=PORT,
help=f'Set service port number. Defaults to {PORT}.')
@click.option('--address', '-a',
type=str, default=ADDRESS,
help=f'Set service IP address. Defaults to "{ADDRESS}".')
def mui_server(port, address):
"""
Starts a service which exposes a RESTful API to the Multiply UI.
"""
service = Service(address, port)
service.start()
def main(args=None):
mui_server.main(args=args)
if __name__ == '__main__':
main()
|
import pandas as pd
import numpy as np
import config
import os
import joblib
from sklearn import metrics
from sklearn import preprocessing
from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier, GradientBoostingClassifier
import time
import warnings
from sklearn.model_selection import StratifiedKFold
warnings.filterwarnings('ignore')
def data_cleaning(train, test):
train_df = train.copy()
test_df = test.copy()
# num_features
num_feas = ['age', 'wage per hour', 'capital gains', 'capital losses',
'dividends from stocks', 'num persons worked for employer',
'own business or self employed', 'weeks worked in year']
## only for this competition, mapping them to 0 and 1 as suggested by the organizer
## fill all NaNs
train_df[train_df == ' ?'] = np.nan # the dataset has labelled all NaNs with ? already
test_df[test_df == ' ?'] = np.nan
train_df.y = train_df.y.map({' - 50000.':0,' 50000+.':1 })
## extract all features
features = [i for i in train_df.columns if i not in ('y','kfold', 'id')]
## convert all cat cols to string and fill nans with NONE
for col in features:
if col not in num_feas:
train_df.loc[:,col] = train_df[col].astype(str).fillna('NONE')
test_df.loc[:,col] = test_df[col].astype(str).fillna('NONE')
## label encode the features
for col in features:
if col not in num_feas:
lbl = preprocessing.LabelEncoder()
lbl.fit(train_df[col])
train_df.loc[:, col] = lbl.transform(train_df[col])
test_df.loc[:, col] = lbl.transform(test_df[col])
return train_df, test_df
def stacking(X, y, X_submission):
# 5折交叉验证
n_folds = 5
nseed = 42
# 正例和负例的比例
spw = float(len(y)-sum(y))/float(sum(y))
clfs = [LGBMClassifier(n_estimators=200, learning_rate=0.1, max_depth=8, num_leaves=51, min_child_weight=2.5,
subsample=0.8, subsample_freq=1, colsample_bytree=0.8, objective='binary', reg_alpha=1e-05, reg_lambda=0.8,
scale_pos_weight=spw, metric='auc', boosting='gbdt', seed=nseed, n_jobs=-1),
LGBMClassifier(n_estimators=100, learning_rate=0.01, max_depth=8, num_leaves=51, min_child_weight=2.5,
subsample=0.7, subsample_freq=1, colsample_bytree=0.7, objective='binary', reg_alpha=1e-05, reg_lambda=1,
scale_pos_weight=spw, metric='auc', boosting='gbdt', seed=nseed, n_jobs=-1),
RandomForestClassifier(n_estimators=500, max_depth=8, bootstrap=True, min_samples_leaf=50,
oob_score=True, class_weight='balanced', criterion='gini', random_state=nseed, n_jobs=-1),
RandomForestClassifier(n_estimators=500, max_depth=8, bootstrap=True, min_samples_leaf=50,
oob_score=True, class_weight='balanced', criterion='entropy', random_state=nseed, n_jobs=-1),
ExtraTreesClassifier(n_estimators=500, max_depth=8, min_samples_leaf=50, class_weight='balanced',
criterion='gini', random_state=nseed, n_jobs=-1),
ExtraTreesClassifier(n_estimators=500, max_depth=8, min_samples_leaf=50, class_weight='balanced',
criterion='entropy', random_state=nseed, n_jobs=-1),
GradientBoostingClassifier(learning_rate=0.01, max_depth=8, max_features='sqrt', n_estimators=500,
subsample=0.8, min_samples_split=50, min_samples_leaf=10, random_state=nseed),
XGBClassifier(n_estimators=500, learning_rate=0.1, max_depth=6, min_child_weight=4.5, gamma=0.5,
subsample=0.6, colsample_bytree=0.6, objective='binary:logistic', reg_alpha=1e-05, reg_lambda=1,
scale_pos_weight=spw, eval_metric='auc', seed=nseed, n_jobs=-1),
XGBClassifier(n_estimators=200, learning_rate=0.01, max_depth=8, min_child_weight=2.5, gamma=0.1,
subsample=0.7, colsample_bytree=0.6, objective='binary:logistic', reg_alpha=1e-05, reg_lambda=1,
scale_pos_weight=spw, eval_metric='auc', seed=nseed, n_jobs=-1)
]
clf_name = ['lgb1','lgb2','rfc1','rfc2','etc1','etc2','gbt','xgb1','xgb2']
print("Creating train and test sets for stacking.")
dataset_blend_train = np.zeros((X.shape[0], len(clfs)))
dataset_blend_test = np.zeros((X_submission.shape[0], len(clfs)))
skf = StratifiedKFold(n_splits=n_folds, shuffle=False, random_state=nseed)
for j, clf in enumerate(clfs):
print('train %d cls: %s' % (j, clf))
bbtic = time.time()
dataset_blend_test_j = np.zeros((X_submission.shape[0], n_folds))
for i, (train, test) in enumerate(skf.split(X, y)):
print("Fold", i)
X_train = X[train]
y_train = y[train]
X_test = X[test]
y_test = y[test]
clf.fit(X_train, y_train)
dataset_blend_train[test, j] = clf.predict_proba(X_test)[:, 1]
dataset_blend_test_j[:, i] = clf.predict_proba(X_submission)[:, 1]
dataset_blend_test[:, j] = dataset_blend_test_j.mean(1)
pd.DataFrame(dataset_blend_train, columns=clf_name).to_csv('input/dataset_stack_train.csv', index=None)
pd.DataFrame(dataset_blend_test, columns=clf_name).to_csv('input/dataset_stack_test.csv', index=None)
print('%d cls %s cost %ds' % (j, clf.__class__.__name__, time.time() - bbtic))
return None
train = pd.read_csv('input/train.csv').set_index('id')
test = pd.read_csv('input/test_no_label.csv').set_index('id')
train_clean, test_clean = data_cleaning(train, test)
train_clean.to_csv('input/cleaned_train_label_encoded.csv', index=False)
test_clean.to_csv('input/cleaned_test_label_encoded.csv', index=False)
y = train_clean.y.values
X = train_clean.drop('y',axis=1).values
X_submission = test_clean.values
stacking(X,y,X_submission)
|
# Copyright 2021 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
from recipe_engine import post_process
PYTHON_VERSION_COMPATIBILITY = "PY2+3"
DEPS = [
'assertions',
]
def RunSteps(api):
api.assertions.assertCountEqual([0, 1], (1, 0))
def GenTests(api):
yield api.test(
'basic',
api.post_process(post_process.StatusSuccess),
api.post_process(post_process.DropExpectation),
)
|
"""Artifact TQL Filter"""
# standard library
from enum import Enum
# first-party
from tcex.api.tc.v3.api_endpoints import ApiEndpoints
from tcex.api.tc.v3.filter_abc import FilterABC
from tcex.api.tc.v3.tql.tql import Tql
from tcex.api.tc.v3.tql.tql_operator import TqlOperator
from tcex.api.tc.v3.tql.tql_type import TqlType
class ArtifactFilter(FilterABC):
"""Filter Object for Artifacts"""
@property
def _api_endpoint(self) -> str:
"""Return the API endpoint."""
return ApiEndpoints.ARTIFACTS.value
def analytics_score(self, operator: Enum, analytics_score: int) -> None:
"""Filter Analytics Score based on **analyticsScore** keyword.
Args:
operator: The operator enum for the filter.
analytics_score: The intel score of the artifact.
"""
self._tql.add_filter('analyticsScore', operator, analytics_score, TqlType.INTEGER)
def case_id(self, operator: Enum, case_id: int) -> None:
"""Filter Case ID based on **caseId** keyword.
Args:
operator: The operator enum for the filter.
case_id: The ID of the case associated with this artifact.
"""
self._tql.add_filter('caseId', operator, case_id, TqlType.INTEGER)
def date_added(self, operator: Enum, date_added: str) -> None:
"""Filter Date Added based on **dateAdded** keyword.
Args:
operator: The operator enum for the filter.
date_added: The date the artifact was added to the system.
"""
date_added = self.utils.any_to_datetime(date_added).strftime('%Y-%m-%dT%H:%M:%S')
self._tql.add_filter('dateAdded', operator, date_added, TqlType.STRING)
@property
def has_case(self):
"""Return **CaseFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.cases.case_filter import CaseFilter
cases = CaseFilter(Tql())
self._tql.add_filter('hasCase', TqlOperator.EQ, cases, TqlType.SUB_QUERY)
return cases
@property
def has_group(self):
"""Return **GroupFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.groups.group_filter import GroupFilter
groups = GroupFilter(Tql())
self._tql.add_filter('hasGroup', TqlOperator.EQ, groups, TqlType.SUB_QUERY)
return groups
@property
def has_indicator(self):
"""Return **IndicatorFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.indicators.indicator_filter import IndicatorFilter
indicators = IndicatorFilter(Tql())
self._tql.add_filter('hasIndicator', TqlOperator.EQ, indicators, TqlType.SUB_QUERY)
return indicators
@property
def has_note(self):
"""Return **NoteFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.notes.note_filter import NoteFilter
notes = NoteFilter(Tql())
self._tql.add_filter('hasNote', TqlOperator.EQ, notes, TqlType.SUB_QUERY)
return notes
@property
def has_task(self):
"""Return **TaskFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.tasks.task_filter import TaskFilter
tasks = TaskFilter(Tql())
self._tql.add_filter('hasTask', TqlOperator.EQ, tasks, TqlType.SUB_QUERY)
return tasks
def id(self, operator: Enum, id: int) -> None: # pylint: disable=redefined-builtin
"""Filter ID based on **id** keyword.
Args:
operator: The operator enum for the filter.
id: The ID of the artifact.
"""
self._tql.add_filter('id', operator, id, TqlType.INTEGER)
def indicator_active(self, operator: Enum, indicator_active: bool) -> None:
"""Filter Active Status based on **indicatorActive** keyword.
Args:
operator: The operator enum for the filter.
indicator_active: A flag indicating whether or not the artifact is active.
"""
self._tql.add_filter('indicatorActive', operator, indicator_active, TqlType.BOOLEAN)
def note_id(self, operator: Enum, note_id: int) -> None:
"""Filter Note ID based on **noteId** keyword.
Args:
operator: The operator enum for the filter.
note_id: The ID of the note associated with this artifact.
"""
self._tql.add_filter('noteId', operator, note_id, TqlType.INTEGER)
def source(self, operator: Enum, source: str) -> None:
"""Filter Source based on **source** keyword.
Args:
operator: The operator enum for the filter.
source: The source of the artifact.
"""
self._tql.add_filter('source', operator, source, TqlType.STRING)
def summary(self, operator: Enum, summary: str) -> None:
"""Filter Summary based on **summary** keyword.
Args:
operator: The operator enum for the filter.
summary: The summary of the artifact.
"""
self._tql.add_filter('summary', operator, summary, TqlType.STRING)
def task_id(self, operator: Enum, task_id: int) -> None:
"""Filter Task ID based on **taskId** keyword.
Args:
operator: The operator enum for the filter.
task_id: The ID of the task associated with this artifact.
"""
self._tql.add_filter('taskId', operator, task_id, TqlType.INTEGER)
def type(self, operator: Enum, type: str) -> None: # pylint: disable=redefined-builtin
"""Filter typeName based on **type** keyword.
Args:
operator: The operator enum for the filter.
type: The type name of the artifact.
"""
self._tql.add_filter('type', operator, type, TqlType.STRING)
def type_name(self, operator: Enum, type_name: str) -> None:
"""Filter typeName based on **typeName** keyword.
Args:
operator: The operator enum for the filter.
type_name: The type name of the artifact.
"""
self._tql.add_filter('typeName', operator, type_name, TqlType.STRING)
|
symbols = [' ', 'O', 'X']
EMPTY = 0
J1 = 1
J2 = 2
NB_CELLS=9
class grid:
cells = []
def __init__(self):
self.cells = []
for i in range(NB_CELLS):
self.cells.append(EMPTY)
def play(self, player, cellNum):
assert(0<= cellNum and cellNum < NB_CELLS)
assert(self.cells[cellNum] == EMPTY)
self.cells[cellNum] = player
""" Display the state of the game
Example of output :
-------
|O| |X|
-------
|X|O| |
-------
| | |O|
-------
"""
def display(self):
print("-------------")
for i in range(3):
print("|",symbols[self.cells[i*3]], "|", symbols[self.cells[i*3+1]], "|", symbols[self.cells[i*3+2]], "|");
print("-------------")
""" Test if 'player' wins the game"""
def winner(self, player):
assert(player==J1 or player==J2)
# horizontal line
for y in range(3):
if self.cells[y*3] == player and self.cells[y*3+1] == player and self.cells[y*3+2] == player:
return True
# vertical line
for x in range(3):
if self.cells[x] == player and self.cells[3+x] == player and self.cells[6+x] == player:
return True
#diagonals :
if self.cells[0] == player and self.cells[4] == player and self.cells[8] == player:
return True
if self.cells[2] == player and self.cells[4] == player and self.cells[6] == player:
return True
return False
""" Return the state of the game: -1 if the game is not over, EMPTY if DRAW; J1 if player 1 wins and J2 if player 2 wins.
"""
def gameOver(self):
if self.winner(J1):
return J1
if self.winner(J2):
return J2
for i in range(NB_CELLS):
if(self.cells[i]== EMPTY):
return -1
return 0
|
z = 0
i = 0
while i <10:
print(i)
z += i
i += 1
if i == 5:
print("Total I :", z)
break
|
import os, shutil
os.system("cd "+ os.path.join("..", "front") +" && yarn build")
try:
shutil.rmtree("dist")
except:
pass
shutil.copytree(os.path.join("..", "front", "build"), "dist")
|
import FWCore.ParameterSet.Config as cms
pfClustersFromCombinedCaloHF = cms.EDProducer("L1TPFCaloProducer",
debug = cms.untracked.int32(0),
ecalCandidates = cms.VInputTag(),
ecalClusterer = cms.PSet(
energyShareAlgo = cms.string('fractions'),
energyWeightedPosition = cms.bool(True),
grid = cms.string('phase2'),
minClusterEt = cms.double(0.5),
seedEt = cms.double(0.5),
zsEt = cms.double(0.4)
),
emCorrector = cms.string(''),
hadCorrector = cms.string('L1Trigger/Phase2L1ParticleFlow/data/hfcorr_110X.root'),
hadCorrectorEmfMax = cms.double(-1.0),
hcCorrector = cms.string(''),
hcalCandidates = cms.VInputTag(cms.InputTag("hgcalBackEndLayer2Producer","HGCalBackendLayer2Processor3DClustering")),
hcalClusterer = cms.PSet(
energyShareAlgo = cms.string('fractions'),
energyWeightedPosition = cms.bool(True),
grid = cms.string('phase2'),
minClusterEt = cms.double(0.8),
seedEt = cms.double(0.5),
zsEt = cms.double(0.4)
),
hcalDigis = cms.VInputTag(cms.InputTag("simHcalTriggerPrimitiveDigis")),
hcalDigisBarrel = cms.bool(False),
hcalDigisHF = cms.bool(True),
hcalHGCTowers = cms.VInputTag(),
hcalHGCTowersHadOnly = cms.bool(False),
linker = cms.PSet(
algo = cms.string('flat'),
energyShareAlgo = cms.string('fractions'),
energyWeightedPosition = cms.bool(True),
grid = cms.string('phase2'),
hoeCut = cms.double(0.1),
minClusterEt = cms.double(1.0),
minHadronEt = cms.double(1.0),
minHadronRawEt = cms.double(1.0),
minPhotonEt = cms.double(1.0),
noEmInHGC = cms.bool(False),
seedEt = cms.double(1.0),
zsEt = cms.double(0.0)
),
phase2barrelCaloTowers = cms.VInputTag(),
resol = cms.PSet(
etaBins = cms.vdouble(3.5, 4.0, 4.5, 5.0),
kind = cms.string('calo'),
offset = cms.vdouble(-1.125, 1.22, 1.514, 1.414),
scale = cms.vdouble(0.868, 0.159, 0.148, 0.194)
)
)
|
import random
import string
from typing import Any, Dict, Set
def custom_name_generate(prohibited_names: Set[str], dict_names: Dict[str, Any]) -> str:
for _ in range(1000):
rand_name = "custom_" + "".join(random.choice(string.ascii_letters + string.digits) for _ in range(10)) # nosec
if rand_name not in prohibited_names and rand_name not in dict_names:
return rand_name
raise RuntimeError("Cannot generate proper names") # pragma: no cover
|
from aws_cdk import (
Stack,
aws_iam as iam,
)
from constructs import Construct
class RoleArnStack(Stack):
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
# CDK will automatically fill out the rest of the ARN with the context provided
# by the stack. Neat!
permissions_boundary_policy_arn = Stack.of(self).format_arn(
region="", # IAM policies have no region in the ARN so make this empty
service="iam",
resource="policy",
resource_name="PermissionsBoundaryPolicy",
)
# Here's a better method. We can make a Policy construct, and get the ARN from that
permissions_boundary_policy = iam.Policy.from_policy_name(
self,
"PermissionsBoundaryPolicyFromPolicyName",
"PermissionsBoundaryPolicy"
)
permissions_boundary_policy_arn = permissions_boundary_policy.policy_name
example_role = iam.Role(
self,
"ExampleRole",
assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"),
description="A fun example role",
# We don't need the ARN actually, CDK magic will fill it in from the construct
# Note that the Perm Boundary Aspect overrides this.
permissions_boundary=permissions_boundary_policy,
)
# Let's be dangerous and give it full admin access
example_role.add_managed_policy(
iam.ManagedPolicy.from_aws_managed_policy_name("AdministratorAccess")
)
|
from setuptools import *
with open('requirements.txt', 'r') as F:
requirements = F.readlines()
setup(name='ignoramus',
version='0.1',
description='A command line tool for generating .gitignore files.',
url='https://github.com/cornjuliox/ignoramus',
author='Enrico Jr Tuvera',
author_email='enricojr.tuvera@gmail.com',
packages=find_packages(),
include_package_data=True,
install_requires=requirements,
zip_safe=False,
entry_points='''
[console_scripts]
ignoramus=ignoramus.app:supermain
''')
|
import cv2
import numpy as np
from imutils import contours
# Load image, grayscale, blur, Otsu's threshold
image = cv2.imread('1.png')
original = image.copy()
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (3,3), 0)
thresh = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]
invert = 255 - thresh
height, width = image.shape[:2]
# Dilate with a horizontal kernel to connect text contours
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (10,2))
dilate = cv2.dilate(thresh, kernel, iterations=2)
# Extract each line contour
lines = []
cnts = cv2.findContours(dilate, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
(cnts, _) = contours.sort_contours(cnts, method="top-to-bottom")
for c in cnts:
x,y,w,h = cv2.boundingRect(c)
cv2.rectangle(image, (0, y), (width, y+h), (36,255,12), 2)
line = original[y:y+h, 0:width]
line = cv2.cvtColor(line, cv2.COLOR_BGR2GRAY)
lines.append(line)
# Append white space in between each line
space = np.ones((1, width), dtype=np.uint8) * 255
result = np.zeros((0, width), dtype=np.uint8)
result = np.concatenate((result, space), axis=0)
for line in lines:
result = np.concatenate((result, line), axis=0)
result = np.concatenate((result, space), axis=0)
cv2.imshow('result', result)
cv2.imshow('image', image)
cv2.imshow('dilate', dilate)
cv2.waitKey()
|
# http://stackoverflow.com/a/12465861/5288758
from builtins import range
import sys
from PyQt4 import QtGui
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
import matplotlib.pyplot as plt
import random
class Window(QtGui.QDialog):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
# a figure instance to plot on
self.figure = plt.figure()
# this is the Canvas Widget that displays the `figure`
# it takes the `figure` instance as a parameter to __init__
self.canvas = FigureCanvas(self.figure)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.toolbar = NavigationToolbar(self.canvas, self)
# Just some button connected to `plot` method
self.button = QtGui.QPushButton('Plot')
self.button.clicked.connect(self.plot)
# set the layout
layout = QtGui.QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
layout.addWidget(self.button)
self.setLayout(layout)
def plot(self):
''' plot some random stuff '''
# random data
data = [random.random() for i in range(10)]
# create an axis
ax = self.figure.add_subplot(111)
# discards the old graph
ax.hold(False)
# plot data
ax.plot(data, '*-')
# refresh canvas
self.canvas.draw()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
main = Window()
main.show()
sys.exit(app.exec_()) |
# coding:utf8
import torch
from torch import nn
from collections import namedtuple
import torch.nn.functional as F
BeamCandidate = namedtuple('BeamCandidate',
['state', 'log_prob_sum', 'log_prob_seq', 'last_word_id', 'word_id_seq'])
class Decoder(nn.Module):
def __init__(self, idx2word, settings):
super(Decoder, self).__init__()
self.idx2word = idx2word
self.pad_id = idx2word.index('<PAD>')
self.unk_id = idx2word.index('<UNK>')
self.sos_id = idx2word.index('<SOS>') if '<SOS>' in idx2word else self.pad_id
self.eos_id = idx2word.index('<EOS>') if '<EOS>' in idx2word else self.pad_id
self.vocab_size = len(idx2word)
self.word_embed = nn.Sequential(nn.Embedding(self.vocab_size, settings['emb_dim'],
padding_idx=self.pad_id),
nn.ReLU(),
nn.Dropout(settings['dropout_p']))
self.fc_embed = nn.Sequential(nn.Linear(settings['fc_feat_dim'], settings['emb_dim']),
nn.ReLU(),
nn.Dropout(settings['dropout_p']))
self.rnn = nn.LSTMCell(settings['emb_dim'], settings['rnn_hid_dim'])
self.rnn_drop = nn.Dropout(settings['dropout_p'])
self.classifier = nn.Linear(settings['rnn_hid_dim'], self.vocab_size)
def forward(self, *args, **kwargs):
mode = kwargs.get('mode', 'xe')
if 'mode' in kwargs:
del kwargs['mode']
return getattr(self, 'forward_' + mode)(*args, **kwargs)
def _forward_step(self, it, state):
word_embs = self.word_embed(it) # bs*word_emb
state = self.rnn(word_embs, state) # bs*rnn_hid
output = self.rnn_drop(state[0])
output = self.classifier(output) # bs*vocab
logprobs = F.log_softmax(output, dim=1) # bs*vocab
return logprobs, state
def forward_xe(self, fc_feats, captions, ss_prob=0.0):
batch_size = fc_feats.size(0)
fc_feats = self.fc_embed(fc_feats) # bz*emb_dim
state = self.rnn(fc_feats)
outputs = []
for i in range(captions.size(1) - 1):
if self.training and i >= 1 and ss_prob > 0.0: # otherwise no need to sample
sample_prob = fc_feats.new(batch_size).uniform_(0, 1)
sample_mask = sample_prob < ss_prob
if sample_mask.sum() == 0:
it = captions[:, i].clone() # bs
else:
sample_ind = sample_mask.nonzero().view(-1)
it = captions[:, i].clone() # bs
prob_prev = outputs[i - 1].detach().exp() # bs*vocab, fetch prev distribution
it.index_copy_(0, sample_ind, torch.multinomial(prob_prev, 1).view(-1).index_select(0, sample_ind))
else:
it = captions[:, i].clone() # bs
logprobs, state = self._forward_step(it, state)
outputs.append(logprobs)
outputs = torch.stack(outputs, dim=1) # [bs, max_len, vocab_size]
return outputs
def forward_rl(self, fc_feats, sample_max, max_seq_len):
batch_size = fc_feats.size(0)
fc_feats = self.fc_embed(fc_feats) # bz*emb_dim
state = self.rnn(fc_feats)
seq = fc_feats.new_zeros((batch_size, max_seq_len), dtype=torch.long)
seq_logprobs = fc_feats.new_zeros((batch_size, max_seq_len))
seq_masks = fc_feats.new_zeros((batch_size, max_seq_len))
it = fc_feats.new_zeros(batch_size, dtype=torch.long).fill_(self.sos_id) # first input <SOS>
unfinished = it == self.sos_id
for t in range(max_seq_len):
logprobs, state = self._forward_step(it, state)
if sample_max:
sample_logprobs, it = torch.max(logprobs, 1)
else:
prob_prev = torch.exp(logprobs)
it = torch.multinomial(prob_prev, 1)
sample_logprobs = logprobs.gather(1, it) # gather the logprobs at sampled positions
it = it.view(-1).long()
sample_logprobs = sample_logprobs.view(-1)
seq_masks[:, t] = unfinished
it = it * unfinished.type_as(it) # bs
seq[:, t] = it
seq_logprobs[:, t] = sample_logprobs
unfinished = unfinished * (it != self.eos_id)
if unfinished.sum() == 0:
break
return seq, seq_logprobs, seq_masks
def sample(self, fc_feat, max_seq_len=16, beam_size=3, decoding_constraint=1):
self.eval()
fc_feat = fc_feat.view(1, -1) # 1*2048
fc_feat = self.fc_embed(fc_feat) # 1*emb_dim
state = self.rnn(fc_feat)
# state, log_prob_sum, log_prob_seq, last_word_id, word_id_seq
candidates = [BeamCandidate(state, 0., [], self.sos_id, [])]
for t in range(max_seq_len):
tmp_candidates = []
end_flag = True
for candidate in candidates:
state, log_prob_sum, log_prob_seq, last_word_id, word_id_seq = candidate
if t > 0 and last_word_id == self.eos_id:
tmp_candidates.append(candidate)
else:
end_flag = False
it = fc_feat.new_tensor([last_word_id], dtype=torch.long)
logprobs, state = self._forward_step(it, state) # [1, vocab_size]
logprobs = logprobs.squeeze(0)
if self.pad_id != self.eos_id:
logprobs[self.pad_id] += float('-inf') # do not generate <PAD>, <SOS> and <UNK>
logprobs[self.sos_id] += float('-inf')
logprobs[self.unk_id] += float('-inf')
if decoding_constraint: # do not generate last step word
logprobs[last_word_id] += float('-inf')
output_sorted, index_sorted = torch.sort(logprobs, descending=True)
for k in range(beam_size):
log_prob, word_id = output_sorted[k], index_sorted[k] # tensor, tensor
log_prob = float(log_prob)
word_id = int(word_id)
tmp_candidates.append(BeamCandidate(state, log_prob_sum + log_prob,
log_prob_seq + [log_prob],
word_id, word_id_seq + [word_id]))
candidates = sorted(tmp_candidates, key=lambda x: x.log_prob_sum, reverse=True)[:beam_size]
if end_flag:
break
# captions, scores
captions = [' '.join([self.idx2word[idx] for idx in candidate.word_id_seq if idx != self.eos_id])
for candidate in candidates]
scores = [candidate.log_prob_sum for candidate in candidates]
return captions, scores
def get_optim_and_crit(self, lr, weight_decay=0):
return torch.optim.Adam(self.parameters(), lr=lr, weight_decay=weight_decay), \
XECriterion()
class XECriterion(nn.Module):
def __init__(self):
super(XECriterion, self).__init__()
def forward(self, pred, target, lengths):
max_len = max(lengths)
mask = pred.new_zeros(len(lengths), max_len)
for i, l in enumerate(lengths):
mask[i, :l] = 1
loss = - pred.gather(2, target.unsqueeze(2)).squeeze(2) * mask
loss = torch.sum(loss) / torch.sum(mask)
return loss
|
# Generated by Django 3.2.5 on 2021-11-26 12:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cmscontexts', '0011_alter_webpath_options'),
('cmspublications', '0020_auto_20211103_1100'),
('taggit', '0003_taggeditem_add_unique_index'),
]
operations = [
migrations.CreateModel(
name='Calendar',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('is_active', models.BooleanField(default=False)),
('name', models.CharField(max_length=256)),
('slug', models.SlugField(unique=True)),
('description', models.TextField(
blank=True, default='', max_length=2048)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendar_created_by', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendar_modified_by', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Calendars',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('order', models.IntegerField(blank=True, default=10, null=True)),
('is_active', models.BooleanField(default=False)),
('date_start', models.DateTimeField()),
('date_end', models.DateTimeField()),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='event_created_by', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='event_modified_by', to=settings.AUTH_USER_MODEL)),
('publication', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.SET_NULL, to='cmspublications.publication')),
('tags', taggit.managers.TaggableManager(help_text='A comma-separated list of tags.',
through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),
],
options={
'verbose_name_plural': 'Calendar events',
'ordering': ['date_start'],
},
),
migrations.CreateModel(
name='CalendarLocalization',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('order', models.IntegerField(blank=True, default=10, null=True)),
('is_active', models.BooleanField(default=False)),
('language', models.CharField(choices=[('ar', 'Arabic'), ('en', 'English'), ('es', 'Spanish'), (
'fr', 'French'), ('it', 'Italian'), ('pt', 'Portuguese')], default='en', max_length=12)),
('name', models.CharField(blank=True, default='', max_length=256)),
('description', models.TextField(
blank=True, default='', max_length=2048)),
('calendar', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='unicms_calendar.calendar')),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarlocalization_created_by', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarlocalization_modified_by', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Calendar Localization',
},
),
migrations.CreateModel(
name='CalendarContext',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('order', models.IntegerField(blank=True, default=10, null=True)),
('is_active', models.BooleanField(default=False)),
('section', models.CharField(blank=True, choices=[('pre-head', 'Pre-Head'), ('head', 'Head'), ('menu-1', 'Menu-1'), ('menu-2', 'Menu-2'), ('menu-3', 'Menu-3'), ('menu-4', 'Menu-4'), ('banner', 'Banner'), ('slider-2', 'Slider-2'), ('pre-footer', 'Pre-Footer'), ('footer', 'Footer'), ('post-footer', 'Post-Footer'), ('breadcrumbs', 'Breadcrumbs'), ('section-1', (('1-top-a', 'Section 1 - Top A'), ('1-top-b', 'Section 1 - Top B'), ('1-top-c', 'Section 1 - Top C'), ('1-mid-top-a', 'Section 1 - Middle Top A'), ('1-mid-top-b', 'Section 1 - Middle Top B'), ('1-mid-top-c', 'Section 1 - Middle Top C'), ('1-left-a', 'Section 1 - Left A'), ('1-left-b', 'Section 1 - Left B'), ('1-center-top-a', 'Section 1 - Center Top A'), ('1-center-top-b', 'Section 1 - Center Top B'), ('1-center-top-c', 'Section 1 - Center Top C'), ('1-center-content', 'Section 1 - Center Content'), ('1-center-bottom-a', 'Section 1 - Center Bottom A'), ('1-center-bottom-b', 'Section 1 - Center Bottom B'), ('1-center-bottom-c', 'Section 1 - Center Bottom C'), ('1-right-a', 'Section 1 - Right A'), ('1-right-b', 'Section 1 - Right B'), ('1-mid-bottom-a', 'Section 1 - Middle Bottom A'), ('1-mid-bottom-b', 'Section 1 - Middle Bottom B'), ('1-mid-bottom-c', 'Section 1 - Middle Bottom C'), ('1-bottom-a', 'Section 1 - Bottom A'), ('1-bottom-b', 'Section 1 - Bottom B'), ('1-bottom-c', 'Section 1 - Bottom C'))), ('section-2', (('2-top-a', 'Section 2 - Top A'), ('2-top-b', 'Section 2 - Top B'), ('2-top-c', 'Section 2 - Top C'), ('2-mid-top-a', 'Section 2 - Middle Top A'), ('2-mid-top-b', 'Section 2 - Middle Top B'), ('2-mid-top-c', 'Section 2 - Middle Top C'), ('2-left-a', 'Section 2 - Left A'), ('2-left-b', 'Section 2 - Left B'), ('2-center-top-a', 'Section 2 - Center Top A'), ('2-center-top-b', 'Section 2 - Center Top B'), (
'2-center-top-c', 'Section 2 - Center Top C'), ('2-center-content', 'Section 2 - Center Content'), ('2-center-bottom-a', 'Section 2 - Center Bottom A'), ('2-center-bottom-b', 'Section 2 - Center Bottom B'), ('2-center-bottom-c', 'Section 2 - Center Bottom C'), ('2-right-a', 'Section 2 - Right A'), ('2-right-b', 'Section 2 - Right B'), ('2-mid-bottom-a', 'Section 2 - Middle Bottom A'), ('2-mid-bottom-b', 'Section 2 - Middle Bottom B'), ('2-mid-bottom-c', 'Section 2 - Middle Bottom C'), ('2-bottom-a', 'Section 2 - Bottom A'), ('2-bottom-b', 'Section 2 - Bottom B'), ('2-bottom-c', 'Section 2 - Bottom C'))), ('section-3', (('3-top-a', 'Section 3 - Top A'), ('3-top-b', 'Section 3 - Top B'), ('3-top-c', 'Section 3 - Top C'), ('3-mid-top-a', 'Section 3 - Middle Top A'), ('3-mid-top-b', 'Section 3 - Middle Top B'), ('3-mid-top-c', 'Section 3 - Middle Top C'), ('3-left-a', 'Section 3 - Left A'), ('3-left-b', 'Section 3 - Left B'), ('3-center-top-a', 'Section 3 - Center Top A'), ('3-center-top-b', 'Section 3 - Center Top B'), ('3-center-top-c', 'Section 3 - Center Top C'), ('3-center-content', 'Section 3 - Center Content'), ('3-center-bottom-a', 'Section 3 - Center Bottom A'), ('3-center-bottom-b', 'Section 3 - Center Bottom B'), ('3-center-bottom-c', 'Section 3 - Center Bottom C'), ('3-right-a', 'Section 3 - Right A'), ('3-right-b', 'Section 3 - Right B'), ('3-mid-bottom-a', 'Section 3 - Middle Bottom A'), ('3-mid-bottom-b', 'Section 3 - Middle Bottom B'), ('3-mid-bottom-c', 'Section 3 - Middle Bottom C'), ('3-bottom-a', 'Section 3 - Bottom A'), ('3-bottom-b', 'Section 3 - Bottom B'), ('3-bottom-c', 'Section 3 - Bottom C')))], help_text='Specify the container section in the template where this block would be rendered.', max_length=60, null=True)),
('calendar', models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to='unicms_calendar.calendar')),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarcontext_created_by', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarcontext_modified_by', to=settings.AUTH_USER_MODEL)),
('webpath', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='cmscontexts.webpath')),
],
options={
'verbose_name_plural': 'Calendar Contexts',
'ordering': ['webpath__fullpath', 'order'],
},
),
migrations.CreateModel(
name='CalendarEvent',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('order', models.IntegerField(blank=True, default=10, null=True)),
('is_active', models.BooleanField(default=False)),
('calendar', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='unicms_calendar.calendar')),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarevent_created_by', to=settings.AUTH_USER_MODEL)),
('event', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='unicms_calendar.event')),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='calendarevent_modified_by', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Calendar event relations',
'ordering': ['calendar__pk', 'event__date_start'],
'unique_together': {('event', 'calendar')},
},
),
]
|
# Question 6
# Festival budget for 2000 is announced INR 10000/- and every year it will increase by 10%. write a recursive function to calculate total festival expense till 2020.
def fes(year,budget):
if year == 2020:
return budget
else:
year +=1
return fes(year,budget + 10)
print(fes(2000,10000))
|
# Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of parameters to calculate Si from Live Ocean NO3 and to correct NO3 values
"""
def set_parameters(parameter_set):
"""Set the parameters given the parameter set name
arg: string: parameter_set
returns a dictionary of parameters
"""
LO_to_SSC_parameters = {
# Original base paramter set: using NO3 directly from Live Ocean and
# a linear fit to get Si from NO3
"v201702": {
"NO3": {"smax": 100.0, "nmax": 120.0},
"Si": {"a": 6.46, "b": 1.35, "c": 0.0, "sigma": 1.0, "tsa": 29},
},
# Parameter set that corrects the highest Live Ocean NO3 values and
# improves the Si parametrization by including salinity
"v201905": {
"NO3": {"smax": 25.880, "nmax": 46.050},
"Si": {"a": 1.756, "b": 1.556, "c": -7.331, "sigma": 1.631, "tsa": 32.4929},
},
}
return LO_to_SSC_parameters[parameter_set]
|
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
import pprint
import six
class LiveEncoding(object):
@poscheck_model
def __init__(self,
stream_key=None,
encoder_ip=None,
application=None):
# type: (string_types, string_types, string_types) -> None
self._stream_key = None
self._encoder_ip = None
self._application = None
self.discriminator = None
if stream_key is not None:
self.stream_key = stream_key
if encoder_ip is not None:
self.encoder_ip = encoder_ip
if application is not None:
self.application = application
@property
def openapi_types(self):
types = {
'stream_key': 'string_types',
'encoder_ip': 'string_types',
'application': 'string_types'
}
return types
@property
def attribute_map(self):
attributes = {
'stream_key': 'streamKey',
'encoder_ip': 'encoderIp',
'application': 'application'
}
return attributes
@property
def stream_key(self):
# type: () -> string_types
"""Gets the stream_key of this LiveEncoding.
Stream key of the live encoder (required)
:return: The stream_key of this LiveEncoding.
:rtype: string_types
"""
return self._stream_key
@stream_key.setter
def stream_key(self, stream_key):
# type: (string_types) -> None
"""Sets the stream_key of this LiveEncoding.
Stream key of the live encoder (required)
:param stream_key: The stream_key of this LiveEncoding.
:type: string_types
"""
if stream_key is not None:
if not isinstance(stream_key, string_types):
raise TypeError("Invalid type for `stream_key`, type has to be `string_types`")
self._stream_key = stream_key
@property
def encoder_ip(self):
# type: () -> string_types
"""Gets the encoder_ip of this LiveEncoding.
IP address of the live encoder (required)
:return: The encoder_ip of this LiveEncoding.
:rtype: string_types
"""
return self._encoder_ip
@encoder_ip.setter
def encoder_ip(self, encoder_ip):
# type: (string_types) -> None
"""Sets the encoder_ip of this LiveEncoding.
IP address of the live encoder (required)
:param encoder_ip: The encoder_ip of this LiveEncoding.
:type: string_types
"""
if encoder_ip is not None:
if not isinstance(encoder_ip, string_types):
raise TypeError("Invalid type for `encoder_ip`, type has to be `string_types`")
self._encoder_ip = encoder_ip
@property
def application(self):
# type: () -> string_types
"""Gets the application of this LiveEncoding.
This will indicate the application 'live'
:return: The application of this LiveEncoding.
:rtype: string_types
"""
return self._application
@application.setter
def application(self, application):
# type: (string_types) -> None
"""Sets the application of this LiveEncoding.
This will indicate the application 'live'
:param application: The application of this LiveEncoding.
:type: string_types
"""
if application is not None:
if not isinstance(application, string_types):
raise TypeError("Invalid type for `application`, type has to be `string_types`")
self._application = application
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if value is None:
continue
if isinstance(value, list):
if len(value) == 0:
continue
result[self.attribute_map.get(attr)] = [y.value if isinstance(y, Enum) else y for y in [x.to_dict() if hasattr(x, "to_dict") else x for x in value]]
elif hasattr(value, "to_dict"):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, "to_dict") else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LiveEncoding):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
# -*- encoding=utf-8 -*-
# Copyright 2016 David Cary; licensed under the Apache License, Version 2.0
"""RCV related classes and functions for exceptions and errors
"""
import sys
import traceback
class _RcvError(Exception):
"""A base class for Exceptions related to RCV
This class is not intended for direct use, just used as a base class
for other exceptions.
"""
def __init__(self, message, other_values=[], base_exception=None):
"""Initialize with a base message, other values, and an exception
Arguments:
----------
message
A string identifying the general nature of the error.
other_values
A list or tuple of typically tuple pairs, usually each pair consists
of a label and a value. The list or tuple may contain items that
are not tuple pairs.
Default value: an empty list
base_exception
An exception is subsumed by this one.
Default value: None
"""
Exception.__init__(self, message)
self.message = str(message)
self.other_values = other_values
self.base_exception = base_exception
self.base_exception_description = ''
if (self.base_exception is not None and
isinstance(self.base_exception, Exception)):
self.base_exception_description = describe_exc(self.base_exception)
def _show_base_exception(self):
"""Select what to show about the base_exception"""
return str(self.base_exception)
def __str__(self):
"""Convert to a string"""
result = [self.message]
if self.other_values:
result.append(self._other_values_as_str(self.other_values))
if self.base_exception:
result.append(indent_message(self._show_base_exception()))
result = '\n'.join(result)
return result
def _other_values_as_str(self, other_values, indent_by=2):
result = []
if type(other_values) in (list, tuple):
for value_item in other_values:
if type(value_item) in (list, tuple) and len(value_item) == 2:
value = value_item[1]
result.append('{:25}= {}'.
format(str(value_item[0]), repr(value)))
else:
result.append(repr(value_item))
else:
result.append(repr(other_values))
result = '\n'.join(result)
result = indent_message(result, indent_by)
return result
class RcvValueError(_RcvError):
"""An exception class to identify invalid argument data.
Used for argument data that is normally supplied from outside the
package.
"""
def __init__(self, message, other_values=[], base_exception=None):
"""Initialize with a base message, other values, and an exception
See base class for a description of the arguments. Default value
for other_values is [], for base_exception is None.
"""
_RcvError.__init__(self, message, other_values, base_exception)
class RcvImplementationError(_RcvError):
"""An error class for possible RCV implementation errors"""
def __init__(self, message, other_values=[], base_exception=None):
"""Initialize with a base message, other values, and an exception
See base class for a description of the arguments. Default value
for other_values is [], for base_exception is None.
"""
_RcvError.__init__(self, message, other_values, base_exception)
def _show_base_exception(self):
"""Select what to show about the base_exception"""
return str(self.base_exception_description)
def describe_exc(exc):
"""Create a multi-line description of an exception and its traceback"""
parts = []
parts.append('Exception description:')
ex_type, ex_value, ex_tb = sys.exc_info()
parts.append(' %s:' % str(ex_type).split('\'')[1])
parts.append(indent_message(str(exc), 4))
formatted_tb = traceback.format_tb(ex_tb)
parts.append('Raised at:')
parts.append(formatted_tb[-1][:-1])
parts.append('Caught at:')
formatted_stack = traceback.format_stack()
parts.append(formatted_stack[-2][:-1])
parts.append('Stack before try:')
parts.append(''.join(traceback.format_stack()[:-2])[:-1])
parts.append('Stack after try:')
parts.append(''.join(formatted_tb)[:-1])
del ex_tb
parts.append('END Exception description')
indented_parts = (parts[:3] +
[indent_message(part) for part in parts[3:-1]] +
[parts[-1]])
result = "\n".join(indented_parts)
return result
def indent_message(message, indent_by=2):
indent_str = ' ' * indent_by
if message and message[-1] == '\n':
tail = '\n'
main_message = message[:-1]
else:
tail = ''
main_message = message
new_message = (indent_str + main_message.replace('\n', '\n' + indent_str) +
tail)
return new_message
|
from collections import deque
class Solution:
def reorderList(self, head: Optional[ListNode]) -> None:
if not (head or head.next) :
return None
arr = []
def helper(node):
while node :
arr.append(node)
node = node.next
return
helper(head)
q = deque(arr)
cur = temp = ListNode()
i = 0
while q :
if i%2 == 0 :
x = q.popleft()
else :
x = q.pop()
x.next = None
cur.next = x
cur = cur.next
i+=1
return temp.next
|
import numpy as np
from numpy.testing import assert_equal
import hypothesis
from hypothesis import given
from hypothesis import strategies as st
from hypothesis.extra import numpy as hynp
@given(hynp.arrays(np.int64, 10),
st.integers(),
st.integers(),
)
def test_clip_scalar_ints(arr, amin, amax):
result = np.clip(arr, amin, amax)
# preserve shape on clip
assert result.shape == arr.shape
# we don't actually check if amin < amax;
# instead, we test for the property described
# in numpy/core/code_generators/ufunc_docstrings.py
# see also: related discussion in NumPy gh-12519
expected = np.minimum(amax, np.maximum(arr, amin))
# we also have to convert to lists for the comparison
# here because we can have object type in output
# which causes issues
assert result.tolist() == expected.tolist()
# next, move to the case of floats & arrays
# for the clip limits
@given(hynp.arrays(np.float64, 10),
hynp.arrays(np.float64, 10),
hynp.arrays(np.float64, 10),
)
def test_clip_array_floats(arr, amin, amax):
result = np.clip(arr, amin, amax)
# preserve shape on clip
assert result.shape == arr.shape
expected = np.minimum(amax, np.maximum(arr, amin))
assert_equal(result, expected)
# next, mix floats and ints & scalars & arrays
@given(hynp.arrays(np.int64, 10),
st.floats(),
hynp.arrays(np.int32, 10),
)
def test_clip_mixed_arr_scalar(arr, amin, amax):
result = np.clip(arr, amin, amax)
# preserve shape on clip
assert result.shape == arr.shape
expected = np.minimum(amax, np.maximum(arr, amin))
assert_equal(result, expected)
# try datetime64 clipping
@given(hynp.arrays(np.int64, 10),
st.integers(min_value=np.iinfo(np.int64).min,
max_value=np.iinfo(np.int64).max),
st.integers(min_value=np.iinfo(np.int64).min,
max_value=np.iinfo(np.int64).max),
)
def test_clip_timedelta64(arr, amin, amax):
arr = np.array(arr.tolist(), dtype='m8')
result = np.clip(arr, amin, amax)
# preserve shape on clip
assert result.shape == arr.shape
# the usual equivalence condition for clip()
expected = np.minimum(amax, np.maximum(arr, amin))
assert_equal(result, expected)
|
# test_constants.py
# Copyright 2012 Roger Marsh
# Licence: See LICENCE (BSD licence)
"""constants tests"""
import unittest
from .. import constants
class Constants(unittest.TestCase):
def test_001_constants_001(self):
self.assertEqual(
sorted(
k
for k in dir(constants)
if not k.startswith("__") and not k.endswith("__")
),
[
"ALTDOWN",
"CAPSLOCKDOWN",
"CONTROLDOWN",
"NUMLOCKDOWN",
"SHIFTDOWN",
"START_MSWINDOWS",
],
)
self.assertEqual(constants.ALTDOWN, 8)
self.assertEqual(constants.CAPSLOCKDOWN, 2)
self.assertEqual(constants.CONTROLDOWN, 4)
self.assertEqual(constants.NUMLOCKDOWN, 16)
self.assertEqual(constants.SHIFTDOWN, 1)
self.assertEqual(constants.START_MSWINDOWS, 64)
if __name__ == "__main__":
runner = unittest.TextTestRunner
loader = unittest.defaultTestLoader.loadTestsFromTestCase
runner().run(loader(Constants))
|
from bspider.config.default_settings import EXCHANGE_NAME
from bspider.core import BaseMonitor, Project
from bspider.utils.sign import Sign
from .async_downloader import AsyncDownloader
class DownloaderMonitor(BaseMonitor):
exchange = EXCHANGE_NAME[1]
def get_work_obj(self, project: Project, sign: Sign):
return AsyncDownloader(project, sign, self.log_fn)
|
Version = '$Header$';
from Components.Sources.Source import Source
from Components.Sources.ServiceList import ServiceList
from Components.config import config
from Tools.Directories import resolveFilename, SCOPE_CONFIG, SCOPE_HDD
from enigma import eServiceReference
from re import sub
from time import strftime, localtime, time
class WAPfunctions(Source):
LISTTIME = 0
REPEATED = 1
SERVICELIST = 2
OPTIONLIST = 3
FILLVALUE = 4
LOCATIONLIST = 5
TAGLIST = 6
DELETEOLD = 7
lut = { "Name":0,
"Value":1,
"Selected":2
}
def __init__(self, session, func=LISTTIME):
self.func = func
Source.__init__(self)
self.session = session
self.result = ( "unknown command (%s)" % (self.func), )
def handleCommand(self, cmd):
print "WAPfunctions: handleCommand", cmd
if self.func is self.LISTTIME:
self.result = self.fillListTime(cmd)
elif self.func is self.REPEATED:
self.result = self.fillRepeated(cmd)
elif self.func is self.SERVICELIST:
self.result = self.serviceList(cmd)
elif self.func is self.OPTIONLIST:
self.result = self.fillOptionList(cmd)
elif self.func is self.FILLVALUE:
self.result = self.fillValue(cmd)
elif self.func is self.LOCATIONLIST:
self.result = self.locationList(cmd)
elif self.func is self.TAGLIST:
self.result = self.tagList(cmd)
elif self.func is self.DELETEOLD:
self.result = self.deleteOldSaved(cmd)
else:
self.result = ( "unknown command cmd(%s) self.func(%s)" % (cmd, self.func), )
def fillListTime(self, param):
print "fillListTime", param
input = 0
start = 1
end = 1
timeNow = time()
timePlusTwo = timeNow + 7200
if 'begin' in param:
begin = param['begin'] or 0
begin = int(begin)
del param['begin']
if begin > 0:
timeNow = begin
if 'end' in param:
end = param['end'] or 0
end = int(end)
del param['end']
if end > 0:
timePlusTwo = end
t = {}
t["sday"] = t["day"] = strftime("%d", localtime(timeNow))
t["smonth"] = t["month"] = strftime("%m", localtime(timeNow))
t["syear"] = t["year"] = strftime("%Y", localtime(timeNow))
t["smin"] = strftime("%M", localtime(timeNow))
t["shour"] = strftime("%H", localtime(timeNow))
t["emin"] = strftime("%M", localtime(timePlusTwo))
t["ehour"] = strftime("%H", localtime(timePlusTwo))
key = ""
for i in param:
p = str(i)
if p != "sRef" and param[p] != None:
key = p
if key == "smin" or key == "emin" :
start = 0
end = 59
elif key == "shour" or key == "ehour":
start = 0
end = 23
elif key == "day" or key == "sday":
start = 1
end = 31
elif key == "month" or key == "smonth":
start = 1
end = 12
else:
start = int(t[key])
end = int(t[key]) + 2
if param[key] == "now" or param[key] == "end" or param[key] == "begin":
input = int(t[key])
else:
input = param[key] or 0
input = int(input)
self.result = self.fillOptionListAny(input, start, end)
return self.result
def fillOptionListAny(self, input, start, end):
returnList = []
for i in range(start, end + 1, 1):
returnList1 = []
j = str(i)
if len(j) == 1:
j = "0%s" % j
returnList1.extend((j, i))
if i == input:
returnList1.append("selected")
else:
returnList1.append("")
returnList.append(returnList1)
return returnList
def fillRepeated(self, param):
print "fillRepeated", param
repeated = param or 0
repeated = int(repeated)
self.lut = {"Name":0
, "Value":1
, "Description":2
, "Selected":3
}
mo = ["mo", 1, "Mo "]#"Monday"]
tu = ["tu", 2, "Tu "]#"Tuesday"]
we = ["we", 4, "We "]#"Wednesday"]
th = ["th", 8, "Th "]#"Thursday"]
fr = ["fr", 16, "Fr "]#"Friday"]
sa = ["sa", 32, "Sa "]#"Saturday"]
su = ["su", 64, "Su "]#"Sunday"]
mf = ["mf", 31, "Mo-Fr"]
ms = ["ms", 127, "Mo-Su"]
if repeated == 127:
repeated = repeated - 127
ms.append("checked")
else:
ms.append("")
if repeated >= 64:
repeated = repeated - 64
su.append("checked")
else:
su.append("")
if repeated >= 32:
repeated = repeated - 32
sa.append("checked")
else:
sa.append("")
if repeated == 31:
repeated = repeated - 31
mf.append("checked")
else:
mf.append("")
if repeated >= 16:
repeated = repeated - 16
fr.append("checked")
else:
fr.append("")
if repeated >= 8:
repeated = repeated - 8
th.append("checked")
else:
th.append("")
if repeated >= 4:
repeated = repeated - 4
we.append("checked")
else:
we.append("")
if repeated >= 2:
repeated = repeated - 2
tu.append("checked")
else:
tu.append("")
if repeated == 1:
repeated = repeated - 1
mo.append("checked")
else:
mo.append("")
return [
mo,
tu,
we,
th,
fr,
sa,
su,
mf,
ms,
]
def serviceListOne(self, bouquet, selref):
ref = eServiceReference(bouquet)
self.servicelist = ServiceList(ref, command_func=self.getServiceList, validate_commands=False)
self.servicelist.setRoot(ref)
returnList = []
for (ref2, name) in self.servicelist.getServicesAsList():
print "ref2: (", ref2, ") name: (", name, ")"
returnListPart = [
name,
ref2
]
if ref2 == str(selref):
returnListPart.append("selected")
self.sRefFound = 1
else:
returnListPart.append("")
returnList.append(returnListPart)
return returnList
def serviceList(self, param):
print "serviceList: ", param
sRef = str(param["sRef"])
bouquet = str(param["bouquet"])
self.sRefFound = 0
if bouquet == '':
returnList = []
bouquet = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.tv" ORDER BY bouquet'
ref = eServiceReference(bouquet)
self.servicelist = ServiceList(ref, command_func=self.getServiceList, validate_commands=False)
self.servicelist.setRoot(ref)
for (ref2, name) in self.servicelist.getServicesAsList():
part = self.serviceListOne(ref2, sRef)
if part:
returnList = returnList + [["-- " + name + " --", "<" + name + ">", ""]] + part
bouquet = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.radio" ORDER BY bouquet'
ref = eServiceReference(bouquet)
self.servicelist = ServiceList(ref, command_func=self.getServiceList, validate_commands=False)
self.servicelist.setRoot(ref)
for (ref2, name) in self.servicelist.getServicesAsList():
part = self.serviceListOne(ref2, sRef)
if part:
returnList = returnList + [["-- " + name + " --", "<" + name + ">", ""]] + part
else:
returnList = self.serviceListOne(bouquet, sRef)
if self.sRefFound == 0 and sRef != '':
returnListPart = ["Inserted", sRef, "selected"]
returnList = [returnListPart] + returnList
#print returnList
return returnList
def getServiceList(self, ref):
self.servicelist.root = ref
def locationList(self, param):
print "locationList", param
dirname = param
lst = config.movielist.videodirs.value
if not dirname:
dirname = resolveFilename(SCOPE_HDD)
if not dirname in lst:
lst = [dirname] + lst
returnList = [[lst[i], i, dirname == lst[i] and "selected" or ""] for i in range(len(lst))]
return returnList
def tagList(self, param):
print "tagList", param
tag = param
try:
file = open(resolveFilename(SCOPE_CONFIG, "movietags"))
taglist = [x.rstrip() for x in file]
while "" in taglist:
taglist.remove("")
file.close()
except IOError, ioe:
taglist = []
if not tag in taglist:
taglist = [tag] + taglist
if not "" in taglist:
taglist.append("")
returnList = [[taglist[i], i, tag == taglist[i] and "selected" or ""] for i in range(len(taglist))]
return returnList
def fillOptionList(self, param):
print "fillOptionList", param
if "justplay" in param:
number = param["justplay"] or 0
number = int(number)
return (
("Record", 0, number == 0 and "selected" or ""),
("Zap", 1, number == 1 and "selected" or "")
)
elif "afterevent" in param:
number = param["afterevent"] or 0
number = int(number)
return (
("Nothing", 0, number == 0 and "selected" or ""),
("Standby", 1, number == 1 and "selected" or ""),
("Deepstandby/Shutdown", 2, number == 2 and "selected" or ""),
("Auto", 3, number == 3 and "selected" or "")
)
else:
return ()
def deleteOldSaved(self, param):
print "deleteOldSaved", param
returnList = [
("deleteOldOnSave", param["deleteOldOnSave"], ""),
("command", param["command"], "")
]
if int(param["deleteOldOnSave"]) == 1:
returnList.extend((
("channelOld", param["sRef"], ""),
("beginOld", param["begin"], ""),
("endOld", param["end"], "")
))
return returnList
def fillValue(self, param):
print "fillValue: ", param
return (("", param, ""),)
def getText(self):
(result, text) = self.result
return text
def filterXML(self, item):
item = item.replace("&", "&").replace("<", "<").replace('"', '"').replace(">", ">")
return item
text = property(getText)
list = property(lambda self: self.result)
|
import os
from flask import Flask
from flask_smorest import Api
from .characters.rest import blp_characters
from .comments.rest import blp_comments
from .db import db
from .episodes.rest import blueprint_episodes
class Config:
API_TITLE = "Rick and Morty API"
API_VERSION = "v1"
OPENAPI_VERSION = "3.0.2"
OPENAPI_URL_PREFIX = "/api"
OPENAPI_SWAGGER_UI_PATH = "/swagger"
OPENAPI_SWAGGER_UI_VERSION = "3.24.2"
OPENAPI_SWAGGER_UI_URL = "https://cdnjs.cloudflare.com/ajax/libs/swagger-ui/3.24.2/"
OPENAPI_REDOC_PATH = "redoc"
def init_db(app: Flask):
"""Setup database."""
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get("DATABASE_URL", "sqlite:///data.db")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.init_app(app)
@app.before_first_request
def create_tables() -> None:
db.create_all()
def create_app() -> Flask:
"""Create Flask application."""
app = Flask(__name__)
app.config.from_object(Config)
api = Api(app)
api.register_blueprint(blp_characters)
api.register_blueprint(blueprint_episodes)
api.register_blueprint(blp_comments)
init_db(app)
return app
app = create_app()
if __name__ == "__main__":
app.run(debug=True)
|
# -*- coding: utf-8 -*-
"""
flask.app
~~~~~~~~~
App for flask-snippets
:copyright: (c) 2013 by fsp.
:license: BSD.
"""
from flask import Flask
app = Flask(__name__)
app.debug = True
app.secret_key = 'flask-snippets-fsp'
if __name__ == "__main__":
app.run()
|
import os
import shutil
import pytest
import sys
pytest_plugins = "pytester"
@pytest.fixture
def exes(testdir, request):
"""
Returns a fixture that can be used to obtain the executables found
in the same directory as the test requesting it. The executables
are copied first to testdir's tmpdir location to ensure tests don't
interfere with each other.
"""
class Executables:
def get(self, name, new_name=None):
if not new_name:
new_name = os.path.basename(name)
source = os.path.join(request.node.fspath.dirname, self.exe_name(name))
dest = testdir.tmpdir.join(self.exe_name(new_name))
shutil.copy(str(source), str(dest))
return str(dest)
def exe_name(self, name):
if sys.platform.startswith("win"):
name += ".exe"
return name
return Executables()
|
from django.db import models
from django.contrib.postgres import fields as postgres_fields
class Diary(models.Model):
id = models.PositiveIntegerField(primary_key=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
version = models.BigIntegerField(default=0)
data = postgres_fields.JSONField(default='{}')
class Meta:
db_table = 'diaries'
|
import os
import subprocess
import sys
from pathlib import Path
from Download import FileDownload, YesNoQuestion
from io import BytesIO
from urllib.request import urlopen
from zipfile import ZipFile
# Get vulkan sdk from environment variables
# if it exists we don't need to install vulkan
VK_SDK = os.environ.get('VULKAN_SDK')
VK_SDK_INSTALLER_URL = 'https://sdk.lunarg.com/sdk/download/1.3.204.1/windows/vulkan_sdk.exe'
OP_VULKAN_VERSION = '1.3.204.1'
VK_SDK_EXE_PATH = 'Opium/external/VulkanSDK/VulkanSDK.exe'
def Install_VK_SDK():
print('Downloading {} to {}'.format(VK_SDK_INSTALLER_URL, VK_SDK_EXE_PATH))
FileDownload(VK_SDK_INSTALLER_URL, VK_SDK_EXE_PATH)
print("Finished!")
print("Now Vulkan SDK installer will start...")
os.startfile(os.path.abspath(VK_SDK_EXE_PATH))
print("Re-run this script after installation")
def Install_VK_Prompt():
print("Would you like to install the Vulkan SDK")
install = YesNoQuestion()
if (install):
Install_VK_SDK()
quit()
def Check_VK_SDK():
if (VK_SDK is None):
print("Could not find a Vulkan SDK in environment variables!")
Install_VK_Prompt()
return False
elif (OP_VULKAN_VERSION not in VK_SDK):
print(f"Located Vulkan SDK at {VK_SDK}")
print(f"Version of Vulkan SDK does not match! (Opium requires {OP_VULKAN_VERSION})")
Install_VK_Prompt()
return False
print(f"Correct Vulkan SDK is located at {VK_SDK}")
return True |
# -*- coding: utf-8 -*-
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import unittest
import A
import B
class Trac43_1 (unittest.TestCase):
def testCode (self):
x = A.a2('a2m-value', 'legal')
self.assertEqual('a2m-value', x.a2member)
self.assertEqual(B.bst.legal, x.a2b)
myobj = B.b1(x, 'legal')
self.assertEqual(myobj.a2elt, x)
x2 = A.a2('anotherValue', 'legal')
myobj.a2elt = x2
self.assertEqual('anotherValue', myobj.a2elt.a2member)
self.assertEqual(B.bst.legal, myobj.a2elt.a2b)
unittest.main()
|
import queue
from unittest import TestCase
from aworker.task import Worker
from aworker.queue.memory import MemoryQueue
from aworker.serializers import PickleSerializer
from aworker.exceptions import DuplicateTaskError
def x2(x):
return x * 2
async def async_x2(x):
return x * 2
class WorkerTestCase(TestCase):
def setUp(self):
self.queue_instance = queue.Queue()
self.mq = MemoryQueue(config=dict(queue=self.queue_instance))
self.worker = Worker(self.mq)
def test_register(self):
x2_task = self.worker.register('x2')(x2)
self.assertEqual(x2_task.task_name, 'x2')
self.assertIn('x2', self.worker.tasks)
ax2_task = self.worker.register('ax2')(async_x2)
self.assertEqual(ax2_task.task_name, 'ax2')
self.assertIn('ax2', self.worker.tasks)
def test_register_duplicate(self):
self.worker.register('x2')(x2)
with self.assertRaises(DuplicateTaskError):
self.worker.register('x2')(async_x2)
def test_memory_queue(self):
x2_task = self.worker.register('x2')(x2)
ax2_task = self.worker.register('ax2')(async_x2)
task_info1 = x2_task.later(4)
task_info_queue1 = self.queue_instance.get()
self.assertEqual(task_info1, task_info_queue1)
task_info2 = ax2_task.later(3)
task_info_queue2 = self.queue_instance.get()
self.assertEqual(task_info2, task_info_queue2)
def test_pickled_memory_queue(self):
self.worker.queue.serializer=PickleSerializer()
x2_task = self.worker.register('x2')(x2)
task_info = x2_task.later(4)
ps = PickleSerializer()
task_info_queue = self.queue_instance.get()
self.assertEqual(ps.unpack(task_info_queue), task_info)
|
import sys
import os
import argparse
import onnx
import json
import mxnet as mx
from onnx import helper
from onnx import TensorProto
from onnx import numpy_helper
import onnxruntime
import cv2
print('mxnet version:', mx.__version__)
print('onnx version:', onnx.__version__)
assert mx.__version__ >= '1.8', 'mxnet version should >= 1.8'
assert onnx.__version__ >= '1.2.1', 'onnx version should >= 1.2.1'
import numpy as np
from mxnet.contrib import onnx as onnx_mxnet
def create_map(graph_member_list):
member_map={}
for n in graph_member_list:
member_map[n.name]=n
return member_map
parser = argparse.ArgumentParser(description='convert mxnet model to onnx')
# general
parser.add_argument('params', default='./r100a/model-0000.params', help='mxnet params to load.')
parser.add_argument('output', default='./r100a.onnx', help='path to write onnx model.')
parser.add_argument('--eps', default=1.0e-8, type=float, help='eps for weights.')
parser.add_argument('--input-shape', default='3,112,112', help='input shape.')
parser.add_argument('--check', action='store_true')
parser.add_argument('--input-mean', default=0.0, type=float, help='input mean for checking.')
parser.add_argument('--input-std', default=1.0, type=float, help='input std for checking.')
args = parser.parse_args()
input_shape = (1,) + tuple( [int(x) for x in args.input_shape.split(',')] )
params_file = args.params
pos = params_file.rfind('-')
prefix = params_file[:pos]
epoch = int(params_file[pos+1:pos+5])
sym_file = prefix + "-symbol.json"
assert os.path.exists(sym_file)
assert os.path.exists(params_file)
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
nodes = json.loads(sym.tojson())['nodes']
bn_fixgamma_list = []
for nodeid, node in enumerate(nodes):
if node['op'] == 'BatchNorm':
attr = node['attrs']
fix_gamma = False
if attr is not None and 'fix_gamma' in attr:
if str(attr['fix_gamma']).lower()=='true':
fix_gamma = True
if fix_gamma:
bn_fixgamma_list.append(node['name'])
#print(node, fix_gamma)
print('fixgamma list:', bn_fixgamma_list)
layer = None
#layer = 'conv_2_dw_relu' #for debug
if layer is not None:
all_layers = sym.get_internals()
sym = all_layers[layer + '_output']
eps = args.eps
arg = {}
aux = {}
invalid = 0
ac = 0
for k in arg_params:
v = arg_params[k]
nv = v.asnumpy()
nv = nv.astype(np.float32)
#print(k, nv.shape)
if k.endswith('_gamma'):
bnname = k[:-6]
if bnname in bn_fixgamma_list:
nv[:] = 1.0
ac += nv.size
invalid += np.count_nonzero(np.abs(nv)<eps)
nv[np.abs(nv) < eps] = 0.0
arg[k] = mx.nd.array(nv, dtype='float32')
arg_params = arg
invalid = 0
ac = 0
for k in aux_params:
v = aux_params[k]
nv = v.asnumpy().astype(np.float32)
ac += nv.size
invalid += np.count_nonzero(np.abs(nv)<eps)
nv[np.abs(nv) < eps] = 0.0
aux[k] = mx.nd.array(nv, dtype='float32')
aux_params = aux
all_args = {}
all_args.update(arg_params)
all_args.update(aux_params)
converted_model_path = onnx_mxnet.export_model(sym, all_args, [input_shape], np.float32, args.output, opset_version=11)
model = onnx.load(args.output)
graph = model.graph
input_map = create_map(graph.input)
node_map = create_map(graph.node)
init_map = create_map(graph.initializer)
#fix PRelu issue
for input_name in input_map.keys():
if input_name.endswith('_gamma'):
node_name = input_name[:-6]
if not node_name in node_map:
continue
node = node_map[node_name]
if node.op_type!='PRelu':
continue
_input_shape = input_map[input_name].type.tensor_type.shape.dim
input_dim_val=_input_shape[0].dim_value
graph.initializer.remove(init_map[input_name])
weight_array = numpy_helper.to_array(init_map[input_name])
b=[]
for w in weight_array:
b.append(w)
new_nv = helper.make_tensor(input_name, TensorProto.FLOAT, [input_dim_val,1,1], b)
graph.initializer.extend([new_nv])
for init_name in init_map.keys():
weight_array = numpy_helper.to_array(init_map[init_name])
assert weight_array.dtype==np.float32
if init_name in input_map:
graph.input.remove(input_map[init_name])
#support batch-inference
graph.input[0].type.tensor_type.shape.dim[0].dim_param = 'None'
onnx.save(model, args.output)
#start to check correctness
if args.check:
im_size = tuple(input_shape[2:])+(3,)
img = np.random.randint(0, 256, size=im_size, dtype=np.uint8)
input_size = tuple(input_shape[2:4][::-1])
input_std = args.input_std
input_mean = args.input_mean
#print(img.shape, input_size)
img = cv2.dnn.blobFromImage(img, 1.0/input_std, input_size, (input_mean, input_mean, input_mean), swapRB=True)
ctx = mx.cpu()
model = mx.mod.Module(symbol=sym, context=ctx, label_names = None)
model.bind(for_training=False, data_shapes=[('data', input_shape)])
_, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch) #reload original params
model.set_params(arg_params, aux_params)
data = mx.nd.array(img)
db = mx.io.DataBatch(data=(data,))
model.forward(db, is_train=False)
x1 = model.get_outputs()[-1].asnumpy()
session = onnxruntime.InferenceSession(args.output, None)
input_name = session.get_inputs()[0].name
output_name = session.get_outputs()[0].name
x2 = session.run([output_name], {input_name : img})[0]
print(x1.shape, x2.shape)
print(x1.flatten()[:20])
print(x2.flatten()[:20])
|
hex='hhhh'
token='aaaa-bbbb-ccccccccccc-ddddddd-eeee'
base_url = 'http://cdn.mydomain.com'
platform = 'small' # small|large|flash|adn
lambda_region = 'us-east-1'
sourcebucket = 'mybucket'
sdb_region = 'us-east-1'
sdb_domain = 'purge-edgecast'
sdb_item='bucket-map' |
# Generated by Django 3.0.7 on 2020-06-05 04:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('files', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='file',
name='fileobject',
field=models.FileField(default=0, upload_to=''),
preserve_default=False,
),
]
|
def score(word):
up = word.upper()
count = 0
for letter in up:
if letter == 'A' or letter == 'E' or letter == 'I' or letter == 'O' or letter == 'U' or \
letter == 'L' or letter == 'N' or letter == 'R' or letter == 'S' or letter == 'T':
count += 1
elif letter == 'D' or letter == 'G':
count += 2
elif letter == 'B' or letter == 'C' or letter == 'M' or letter == 'P':
count += 3
elif letter == 'F' or letter == 'H' or letter == 'V' or letter == 'W' or letter == 'Y':
count += 4
elif letter == 'K':
count += 5
elif letter == 'J' or letter == 'X':
count += 8
elif letter == 'Q' or letter == 'Z':
count += 10
else:
count += 0
return count
|
from .db import db
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
class User(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
first_name = db.Column(db.String(40), nullable = True)
last_name = db.Column(db.String(40), nullable = True)
user_name = db.Column(db.String(40), nullable = False, unique = True)
email = db.Column(db.String(255), nullable = False, unique = True)
phone_number = db.Column(db.String(30), nullable = True, unique = True)
bio = db.Column(db.Text, nullable=True)
mileage = db.Column(db.Integer, nullable = False, default = 0)
hashed_password = db.Column(db.String(255), nullable = False)
booking_calendar = db.relationship('BookingCalendar', back_populates='user')
review = db.relationship('Review', back_populates='user')
@property
def password(self):
return self.hashed_password
@password.setter
def password(self, password):
self.hashed_password = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password, password)
def to_dict(self):
return {
"first_name": self.first_name,
"last_name": self.last_name,
"username": self.user_name,
"bio": self.bio,
"mileage": self.mileage
}
# TODO: add a setter property to update mileage
|
import torch
import logging
def calculate_accuracy_prototypes(y_pred, y_train, plabels):
with torch.no_grad():
idx = torch.argmin(y_pred, axis=1)
return torch.true_divide(torch.sum(y_train == plabels[idx]), len(y_pred)) * 100
def predict_label(y_pred,plabels):
with torch.no_grad():
return plabels[torch.argmin(y_pred,1)]
def setup_logger(args,name="datl"):
level = logging.INFO
format = ' %(message)s'
[logging.root.removeHandler(handler) for handler in logging.root.handlers[:]]
dataset = str(args.source_dir.split("/")[-2])+"_"+str(args.target_dir.split("/")[-2])
handlers = [logging.FileHandler(args.log_path+"log_"+name+"_"+dataset+".txt"), logging.StreamHandler()]
logging.basicConfig(level = level, format = format, handlers = handlers)
return logging |
## find the key in rowwise and column wise sorted matrix..!!!!
## Initialize our asume mid element to topmost right corner..
def search_matrix(mat,key,n,m):
i = 0
j = m-1
while(i>=0 and i<n and j>= 0 and j<m):
if (arr[i][j] == key):
return i,j
elif(arr[i][j]>key):
j-=1
elif(arr[i][j]<key):
i+=1
return -1
## Driver code..!!!
if __name__ == '__main__':
n,m = list(map(int,input().split()))
key = int(input())
mat = [[list(map(int,input().split()))]for i in range(n)]
print('index of key',search_matrix(mat,key,n,m))
|
import pyqrcode
# .PNG
source = "www.github.com/diablo/"
url = pyqrcode.create(source)
url.png('qrcode.png', scale = 8)
url.show()
# .SVG
# source = "www.github.com/CorvoDev/"
# url = pyqrcode.create(source)
# url.svg('qrcode.svg', scale = 8)
# url.show()
|
import pytest, sys
from pytest_in_60_minutes import mathFunc
from pytest_in_60_minutes.mathFunc import StudentDB
################################################
# Test the "add" function
################################################
# skip only when python version is older than version 3.3
@pytest.mark.skipif(sys.version_info < (3,3), reason="do not run number add test")
def testAdd():
assert mathFunc.add(7,3) == 10
assert mathFunc.add(7) == 9
assert mathFunc.add(10.5, 25.5) == 36.0
print(">>>>>>>", mathFunc.add(7,3))
@pytest.mark.strings
def testAddStrings():
result = mathFunc.add("Hello", " World!")
assert result == "Hello World!"
assert type(result) is str # String type
assert "Heldo" not in result
@pytest.mark.parametrize('value1, value2, result',
[(7,3,10),
("Hello", " World!", "Hello World!"),
(10.5, 25.5, 36)
])
def testAddByGroup(value1, value2, result):
assert mathFunc.add(value1, value2) == result
################################################
# Test the "multiply" function
################################################
@pytest.mark.number
def testMultiply():
assert mathFunc.multiply(5,5) == 25
assert mathFunc.multiply(5) == 10
assert mathFunc.multiply(7) == 14
@pytest.mark.strings
def testMultiplyStrings():
assert mathFunc.multiply("Hello ", 3) == "Hello Hello Hello "
result = mathFunc.multiply("Hello ")
assert result == "Hello Hello "
assert type(result) is str # String type
assert "Hello" in result
################################################
# Test for name matching in StudentDB json database
# using the pytest fixture
################################################
# scope="module" allows setup and teardown to be executed only one time
@pytest.fixture(scope="module")
def db():
print("----setup----")
db = StudentDB()
db.connect("data.json")
yield db
print("----teardown----")
db.close()
def testScottData(db):
scottData = db.getData("Scott")
assert scottData["id"] == 1
assert scottData["name"] == "Scott"
assert scottData["result"] == "pass"
def testMarkData(db):
scottData = db.getData("Mark")
assert scottData["id"] == 2
assert scottData["name"] == "Mark"
assert scottData["result"] == "fail"
|
"""Lens serializers."""
# Rest framework
from rest_framework import serializers
# Models
from apps.products.models import Lens
from apps.products.models.features import Feature
# Serializers
from .features import FeatureModelSerializer
class LensModelSerializer(serializers.ModelSerializer):
"""Lens model serializer."""
feature = FeatureModelSerializer(read_only=True, many=True)
class Meta:
"""Meta class."""
model = Lens
fields = (
'name',
'manufacturer',
'sku',
'brand',
'lens_aperture',
'focal_distance',
'feature',
)
class CreateLenSerializer(serializers.Serializer):
"""Create lens product serializer."""
name = serializers.CharField(max_length=60)
manufacturer = serializers.CharField(max_length=60)
sku = serializers.CharField(max_length=20)
brand = serializers.CharField(max_length=20)
lens_aperture = serializers.CharField(max_length=7)
focal_distance = serializers.CharField(max_length=20)
def create(self, validated_data):
lens = Lens.objects.create(**validated_data)
return lens
class AddLenFeatureSerializer(serializers.Serializer):
"""Add feature serializer."""
name = serializers.CharField(max_length=50)
description = serializers.CharField(max_length=255)
def create(self, validated_data):
"""Create Feature and added to the product that
come from the context."""
feature = Feature.objects.create(**validated_data)
lens = self.context['len']
lens.feature.add(feature)
return lens |
# %% [markdown]
# # Pre-sail checklist - Ocean protocol component status
# With simulated Kubernetes endpoints deployed, this script will make a simple
# HTTP request to each, and report the status returned.
#
# %%
# Standard imports
import requests
import os
import logging
from pathlib import Path
from mantaray_utilities import logging as manta_logging
#%%
# For this test, set the configuration environment variable for kubernetes.
# here it is hard-coded for IPython execution, but in general, it is set in your system environment.
# manta_config.name_deployment_type()
# os.environ['USE_K8S_CLUSTER'] = 'true'
manta_logging.logger.setLevel('INFO')
# Get the configuration file path for this environment
OCEAN_CONFIG_PATH = Path(os.environ['OCEAN_CONFIG_PATH'])
assert OCEAN_CONFIG_PATH.exists()
logging.info("OCEAN_CONFIG_PATH:{}".format(OCEAN_CONFIG_PATH))
import configparser
config = configparser.ConfigParser()
config.read(OCEAN_CONFIG_PATH)
# %%
# The endpoints (microservices) are defined in the below dictionary
#%%
# For now, the endpoints are hard-coded by the dev-ops team.
endpoints_dict = {
'aquarius': config['resources']['aquarius.url'],
'brizo': config['resources']['brizo.url'],
'Ethereum node': config['keeper-contracts']['keeper.url'],
'secret_store': config['keeper-contracts']['secret_store.url'],
}
swagger_pages = dict()
swagger_pages['aquarius Swagger documentation'] = endpoints_dict['aquarius'] + '/api/v1/docs/'
swagger_pages['brizo Swagger documentation'] = endpoints_dict['brizo'] + '/api/v1/docs/'
def check_endpoint(endpoint_name, endpoint_url, verb='GET', ):
"""HTTP Request on the given URL"""
res = requests.request(verb, endpoint_url)
logging.debug("{} : returns {}".format(endpoint_name, res.status_code))
return res
# %%
# The microscervices for MetaData storage (aquarius) and for service negotiation (brizo) have Swagger documentation :)
#%%
print("Aquarius MetaData storage API:", swagger_pages['aquarius Swagger documentation'])
print("Brizo Access API:", swagger_pages['brizo Swagger documentation'])
# %% [markdown]
# Finally, we will iterate over the endpoint and check for a response
#
#%%
flag_fail = False
for endpoint in endpoints_dict:
with manta_logging.LoggerCritical():
print("Checking {} at {}".format(endpoint, endpoints_dict[endpoint]))
try:
res = check_endpoint(endpoint, endpoints_dict[endpoint])
if 'Content-Type' in res.headers:
if res.headers['Content-Type'] == 'application/json':
if 'software' in res.json().keys() and 'version' in res.json().keys():
print("\t Success: {} v{}".format(res.json()['software'], res.json()['version']))
else:
print("\t Success")
else:
print("\t Success")
except:
flag_fail = True
print('\t Failed!')
if flag_fail:
print("Failure in a component, please contact an administrator on our Gitter channel - https://gitter.im/oceanprotocol/Lobby")
|
from django.urls import path
from musica import views
app_name='musica'
urlpatterns = [
path('<int:id>/<slug:slug_da_musica>',views.index,name='musicadetail')
] |
import sys
sys.path.append("..")
sys.path.append("../main")
from main import utils
import unittest
import random
import numpy as np
class Cepstrum_pitch_estim_test(unittest.TestCase):
"""
Unit test to test if the speaker is a man in the audio
"""
def test_01(self):
pitch_men = utils.cepstrum_pitch_estim("data/bdl_a")
self.assertTrue(pitch_men >= 60, "")
self.assertTrue(pitch_men <= 170, "")
"""
Unit test to test if the speaker is a women in the audio
"""
def test_02(self):
pitch_women = utils.cepstrum_pitch_estim("data/slt_a")
self.assertTrue(pitch_women >= 171, "")
self.assertTrue(pitch_women <= 300, "")
if __name__ == "__main__":
unittest.main() |
#!/usr/bin/env
# -*- coding: utf-8 -*-
"""
Copyright 2017-2018 Jagoba Pérez-Gómez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask import Flask, send_from_directory, session
from flask_oauthlib.client import OAuth
from flask_mail import Mail
from logging.handlers import SMTPHandler
from neomodel import config
from src.application.config import NEO4J_CONFIG, GOOGLE_OAUTH_ID, GOOGLE_OAUTH_SECRET
from src.infrastructure.dependency_injector import DependencyInjector
from web.routing.admin import admin_page
from web.routing.client import client_page
from web.routing.api import api_page
from web.routing.user_authentication import user_authentication
import logging
import os
def download_dependencies():
import nltk
nltk.download('stopwords')
nltk.download('punkt')
def setup_database():
if os.environ['DEBUG'] == 'True':
config.DATABASE_URL = 'bolt://%s:%s@localhost:7687' % (NEO4J_CONFIG['user'], NEO4J_CONFIG['passwd'])
else:
BOLT_URL = os.environ['GRAPHENEDB_BOLT_URL']
BOLT_USER = os.environ['GRAPHENEDB_BOLT_USER']
BOLT_PASSWORD = os.environ['GRAPHENEDB_BOLT_PASSWORD']
config.DATABASE_URL = 'bolt://%s:%s@%s' % (BOLT_USER, BOLT_PASSWORD, BOLT_URL[BOLT_URL.find('//') + 2:])
def setup_email(app):
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = '465'
app.config['MAIL_USE_SSL'] = True
app.config['MAIL_USERNAME'] = os.environ['MAIL_USERNAME']
app.config['MAIL_PASSWORD'] = os.environ['MAIL_PASSWORD']
mail = Mail(app)
app.config['email'] = mail
def setup_logging(app):
# Set up Flask logging
app.config['PROPAGATE_EXCEPTIONS'] = True
formatter = logging.Formatter(
'[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s'
)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
app.logger.addHandler(stream_handler)
if os.environ['DEBUG'] == 'True':
app.logger.setLevel(logging.DEBUG)
else:
app.logger.setLevel(logging.WARNING)
# mail_handler = SMTPHandler(
# mailhost='smtp',
# fromaddr=app.config['MAIL_USERNAME'],
# credentials=app.config['MAIL_PASSWORD'],
# toaddrs='jagobapg@protonmail.com',
# subject='[FATAL ERROR] Restaurant Review App'
# )
# mail_handler.setFormatter(formatter)
# mail_handler.setLevel(logging.CRITICAL)
# app.logger.addHandler(mail_handler)
# Set up Python logging
logging.basicConfig(
format='%(levelname)s:%(message)s',
level=logging.ERROR
)
def setup_google_oauth(app):
app.config['GOOGLE_ID'] = GOOGLE_OAUTH_ID
app.config['GOOGLE_SECRET'] = GOOGLE_OAUTH_SECRET
oauth = OAuth(app)
google = oauth.remote_app(
'google',
consumer_key=app.config.get('GOOGLE_ID'),
consumer_secret=app.config.get('GOOGLE_SECRET'),
request_token_params={
'scope': 'email'
},
base_url='https://www.googleapis.com/oauth2/v1/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://accounts.google.com/o/oauth2/token',
authorize_url='https://accounts.google.com/o/oauth2/auth',
)
app.config['GOOGLE'] = google
return google
def register_blueprints(app):
app.register_blueprint(admin_page, url_prefix='/admin')
app.register_blueprint(client_page, url_prefix='/')
app.register_blueprint(api_page, url_prefix='/api')
app.register_blueprint(user_authentication, url_prefix='/auth')
def load_basic_data(app):
dependency_injector = app.config['dependency_injector']
loader = dependency_injector.get('app.fixtures.basic_structure_loader')
loader.execute()
# Initialise database and application
setup_database()
download_dependencies()
app = Flask(__name__)
app.secret_key = os.environ['SECRET']
setup_logging(app)
setup_email(app)
google = setup_google_oauth(app)
register_blueprints(app)
app.config['dependency_injector'] = DependencyInjector()
load_basic_data(app)
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
# General requests
@app.route('/static/<path:path>')
def get_static(path):
return send_from_directory('js', path)
@app.route('/images/<path:path>')
def get_images(path):
return send_from_directory('static/images', path)
@app.route('/file/<path:path>')
def get_file(path):
return send_from_directory('static/files', path)
|
import torch
import numpy as np
import torch.nn as nn
from vae_network import *
import torch.nn.functional as F
from scipy.special import softmax
class NAS():
def __init__(self, input_dim, c_dim, size, layer_upper_bound = 5, neuron_upper_bound = 1000,
neuron_lower_bound = 50, latent_upper_bound = 100, latent_lower_bound = 20,
trainloader = None, testloader = None):
self.input_dim = input_dim
self.cond_dim = c_dim
self.size = size
self.layer_upper_bound = layer_upper_bound
self.neuron_upper_bound = neuron_upper_bound
self.neuron_lower_bound = neuron_lower_bound
self.latent_upper_bound = latent_upper_bound - c_dim
self.latent_lower_bound = latent_lower_bound
self.encoder_layer_population = np.random.randint(0, 2, (size, self.layer_upper_bound))
self.decoder_layer_population = np.random.randint(0, 2, (size, self.layer_upper_bound))
self.encoder_neuron_population = np.random.randint(self.neuron_lower_bound, self.neuron_upper_bound, (size, self.layer_upper_bound))
self.decoder_neuron_population = np.random.randint(self.neuron_lower_bound, self.neuron_upper_bound, (size, self.layer_upper_bound))
self.latent_population = np.random.randint(self.latent_lower_bound, self.latent_upper_bound, size)
self.trainloader = trainloader
self.testloader = testloader
self.fitness = []
self.best_model = None
def check_layer_constraints(self, candidate):
if min(candidate) < 0 or max(candidate) > 1:
return False
return True
def check_neuron_constraints(self, candidate):
if min(candidate) >= self.neuron_lower_bound and max(candidate) <= self.neuron_upper_bound:
return True
return False
def check_latent_constraints(self, candidate):
if candidate >= self.latent_lower_bound and candidate <= self.latent_upper_bound:
return True
return False
def bit_flipping(self, child):
mutated_child = []
for i in child:
if np.random.uniform(0, 1) > 0.5:
mutated_child.append(np.invert(i))
else:
mutated_child.append(i)
return mutated_child
def creep_mutation(self, child):
creep = np.random.randint(-3, 3)
return child + creep
def uniform_crossover(self, parent_1, parent_2):
mask = np.random.uniform(0, 1, len(parent_1)) > 0.5
child_1 = (mask * parent_1) + (~mask * parent_2)
child_2 = (mask * parent_2) + (~mask * parent_1)
return child_1, child_2
def check_fitness(self, model, epochs = 2, verbose = False):
## Loss function for VAE
def loss_function(recon_x, x, mu, logvar):
BCE = F.binary_cross_entropy(recon_x, x.view(-1, 784), reduction='sum')
KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
return BCE + KLD
optimizer = torch.optim.Adam(model.parameters(), lr= 0.001)
for epoch in range(epochs):
running_loss = 0.0
for i, data in enumerate(self.trainloader):
x, y = data
#x, y = x.cuda(), y.cuda()
optimizer.zero_grad()
recon, mu, logvar = model(x, y)
loss = loss_function(recon, x, mu, logvar)
loss.backward()
optimizer.step()
running_loss += loss.item()
if verbose:
print("Epoch {}, Loss {}".format(epoch, running_loss/len(self.trainloader)))
test_loss = 0
model.eval()
for i, data in enumerate(self.testloader):
x, y = data
#x, y = x.cuda(), y.cuda()
recon, mu, logvar = model(x, y)
loss = loss_function(recon, x, mu, logvar)
test_loss += loss.item()
return 1/(test_loss/len(self.testloader))
## Decide to stop genetic algorithm or not
def early_stop(self, trend, num_values = 30):
fit = np.array(trend[-num_values:])
key = fit[0]
if np.sum(fit==key) == len(fit):
return True
return False
def start(self, n_generations = 250, n_mating = 1, cr_prob = 1.0, mut_prob = 1.0, verbose = True, warmup = 100):
fitness_trail = []
trend = []
## fitness calculation for initial candidates
for i in range(self.size):
temp_model = Network(self.input_dim, self.encoder_layer_population[i], self.decoder_layer_population[i],
self.encoder_neuron_population[i], self.decoder_neuron_population[i], self.latent_population[i],
self.cond_dim)
self.fitness.append(self.check_fitness(temp_model))
## Begining the genetic algorithm
for gen in range(n_generations):
for mate in range(n_mating):
index_1 = np.random.choice([*range(self.size)], p = softmax(self.fitness))
index_2 = np.random.choice([*range(self.size)], p = softmax(self.fitness))
while index_1 == index_2:
index_2 = np.random.choice([*range(self.size)], p = softmax(self.fitness))
## Loading the mating pool
encoder_parent_1 = self.encoder_layer_population[index_1]
encoder_parent_2 = self.encoder_layer_population[index_2]
decoder_parent_1 = self.decoder_layer_population[index_1]
decoder_parent_2 = self.decoder_layer_population[index_2]
encoder_neuron_parent_1 = self.encoder_neuron_population[index_1]
encoder_neuron_parent_2 = self.encoder_neuron_population[index_2]
decoder_neuron_parent_1 = self.decoder_neuron_population[index_1]
decoder_neuron_parent_2 = self.decoder_neuron_population[index_2]
latent_dim_parent_1 = self.latent_population[index_1]
latent_dim_parent_2 = self.latent_population[index_2]
## Performing crossover and mutation
cr_p = np.random.uniform(0,1)
mut_p = np.random.uniform(0,1)
if cr_p < cr_prob:
encoder_child_1, encoder_child_2 = self.uniform_crossover(encoder_parent_1, encoder_parent_2)
decoder_child_1, decoder_child_2 = self.uniform_crossover(decoder_parent_1, decoder_parent_2)
encoder_neuron_child_1, encoder_neuron_child_2 = self.uniform_crossover(encoder_neuron_parent_1, encoder_neuron_parent_2)
decoder_neuron_child_1, decoder_neuron_child_2 = self.uniform_crossover(decoder_neuron_parent_1, decoder_neuron_parent_2)
if mut_p < mut_prob:
encoder_child_1 = self.bit_flipping(encoder_child_1)
encoder_child_2 = self.bit_flipping(encoder_child_2)
decoder_child_1 = self.bit_flipping(decoder_child_1)
decoder_child_2 = self.bit_flipping(decoder_child_2)
encoder_neuron_child_1 = self.creep_mutation(encoder_neuron_child_1)
encoder_neuron_child_2 = self.creep_mutation(encoder_neuron_child_2)
decoder_neuron_child_1 = self.creep_mutation(decoder_neuron_child_1)
decoder_neuron_child_2 = self.creep_mutation(decoder_neuron_child_2)
if mut_p < mut_prob:
latent_dim_child_1 = self.creep_mutation(latent_dim_parent_1)
latent_dim_child_2 = self.creep_mutation(latent_dim_parent_2)
## Check whether the new generation satisfies the constraints
if not self.check_layer_constraints(encoder_child_1):
encoder_child_1 = encoder_parent_1
if not self.check_layer_constraints(encoder_child_2):
encoder_child_2 = encoder_parent_2
if not self.check_neuron_constraints(encoder_neuron_child_1):
encoder_neuron_child_1 = encoder_neuron_parent_1
if not self.check_neuron_constraints(encoder_neuron_child_2):
encoder_neuron_child_2 = encoder_neuron_parent_2
if not self.check_layer_constraints(decoder_child_1):
decoder_child_1 = decoder_parent_1
if not self.check_layer_constraints(encoder_child_2):
decoder_child_2 = decoder_parent_2
if not self.check_neuron_constraints(decoder_neuron_child_1):
decoder_neuron_child_1 = decoder_neuron_parent_1
if not self.check_neuron_constraints(decoder_neuron_child_2):
decoder_neuron_child_2 = decoder_neuron_parent_2
if not self.check_latent_constraints(latent_dim_child_1):
latent_dim_child_1 = latent_dim_parent_1
## Creating and evaluating new model
temp_model = Network(self.input_dim, encoder_child_1, decoder_child_1, encoder_neuron_child_1,
decoder_neuron_child_1, latent_dim_child_1, self.cond_dim)
child_fitness_1 = self.check_fitness(temp_model)
if child_fitness_1 > min(self.fitness):
replace_index = self.fitness.index(min(self.fitness))
self.encoder_layer_population[replace_index] = encoder_child_1
self.decoder_layer_population[replace_index] = decoder_child_1
self.encoder_neuron_population[replace_index] = encoder_neuron_child_1
self.decoder_neuron_population[replace_index] = decoder_neuron_child_1
self.latent_population[replace_index] = latent_dim_child_1
self.fitness[replace_index] = child_fitness_1
temp_model = Network(self.input_dim, encoder_child_2, decoder_child_2, encoder_neuron_child_2,
decoder_neuron_child_2, latent_dim_child_2, self.cond_dim)
child_fitness_2 = self.check_fitness(temp_model)
if child_fitness_2 > min(self.fitness):
replace_index = self.fitness.index(min(self.fitness))
self.encoder_layer_population[replace_index] = encoder_child_2
self.decoder_layer_population[replace_index] = decoder_child_2
self.encoder_neuron_population[replace_index] = encoder_neuron_child_2
self.decoder_neuron_population[replace_index] = decoder_neuron_child_2
self.latent_population[replace_index] = latent_dim_child_2
self.fitness[replace_index] = child_fitness_2
if verbose:
print("Generation {} completed. Best fitness value is {}".format(gen,max(self.fitness)))
fitness_trail.append(max(self.fitness))
if gen > warmup:
if self.early_stop(fitness_trail):
print("Early stopping initiated")
break
## Save the best model
best_index = self.fitness.index(max(self.fitness))
self.best_model = Network(self.input_dim, self.encoder_layer_population[best_index], self.decoder_layer_population[best_index],
self.encoder_neuron_population[best_index], self.decoder_neuron_population[best_index],
self.latent_population[best_index], self.cond_dim)
return fitness_trail |
import os
import sys
ENDPOINTS_PROJECT_DIR = os.path.join(os.path.dirname(__file__),
'endpoints-proto-datastore')
sys.path.append(ENDPOINTS_PROJECT_DIR)
|
# encoding: UTF-8
from datetime import datetime
from pymongo import Connection
from pymongo.errors import *
from eventEngine import *
# 常量定义
OFFSET_OPEN = '0' # 开仓
OFFSET_CLOSE = '1' # 平仓
DIRECTION_BUY = '0' # 买入
DIRECTION_SELL = '1' # 卖出
PRICETYPE_LIMIT = '2' # 限价
########################################################################
class Tick:
"""Tick数据对象"""
#----------------------------------------------------------------------
def __init__(self, symbol):
"""Constructor"""
self.symbol = symbol # 合约代码
self.openPrice = 0 # OHLC
self.highPrice = 0
self.lowPrice = 0
self.lastPrice = 0
self.volume = 0 # 成交量
self.openInterest = 0 # 持仓量
self.upperLimit = 0 # 涨停价
self.lowerLimit = 0 # 跌停价
self.time = '' # 更新时间和毫秒
self.ms= 0
self.bidPrice1 = 0 # 深度行情
self.bidPrice2 = 0
self.bidPrice3 = 0
self.bidPrice4 = 0
self.bidPrice5 = 0
self.askPrice1 = 0
self.askPrice2 = 0
self.askPrice3 = 0
self.askPrice4 = 0
self.askPrice5 = 0
self.bidVolume1 = 0
self.bidVolume2 = 0
self.bidVolume3 = 0
self.bidVolume4 = 0
self.bidVolume5 = 0
self.askVolume1 = 0
self.askVolume2 = 0
self.askVolume3 = 0
self.askVolume4 = 0
self.askVolume5 = 0
########################################################################
class Trade(object):
"""成交数据对象"""
#----------------------------------------------------------------------
def __init__(self, symbol):
"""Constructor"""
self.symbol = symbol # 合约代码
self.orderRef = '' # 报单号
self.tradeID = '' # 成交编号
self.direction = None # 方向
self.offset = None # 开平
self.price = 0 # 成交价
self.volume = 0 # 成交量
########################################################################
class Order(object):
"""报单数据对象"""
#----------------------------------------------------------------------
def __init__(self, symbol):
"""Constructor"""
self.symbol = symbol # 合约代码
self.orderRef = '' # 报单编号
self.direction = None # 方向
self.offset = None # 开平
self.price = 0 # 委托价
self.volumeOriginal = 0 # 报单量
self.volumeTraded = 0 # 已成交数量
self.insertTime = '' # 报单时间
self.cancelTime = '' # 撤单时间
self.frontID = 0 # 前置机编号
self.sessionID = 0 # 会话编号
self.status = '' # 报单状态代码
########################################################################
class StopOrder(object):
"""
停止单对象
用于实现价格突破某一水平后自动追入
即通常的条件单和止损单
"""
#----------------------------------------------------------------------
def __init__(self, symbol, direction, offset, price, volume, strategy):
"""Constructor"""
self.symbol = symbol
self.direction = direction
self.offset = offset
self.price = price
self.volume = volume
self.strategy = strategy
########################################################################
class StrategyEngine(object):
"""策略引擎"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, mainEngine, backtesting=False):
"""Constructor"""
self.__eventEngine = eventEngine
self.mainEngine = mainEngine
self.backtesting = backtesting # 是否在进行回测
# 获取代表今日的datetime
t = datetime.today()
self.today = t.replace(hour=0, minute=0, second=0, microsecond=0)
# 保存所有报单数据的字典
self.__dictOrder = {}
# 保存策略对象的字典
# key为策略名称
# value为策略对象
self.dictStrategy = {}
# 保存合约代码和策略对象映射关系的字典
# key为合约代码
# value为交易该合约的策略列表
self.__dictSymbolStrategy = {}
# 保存报单编号和策略对象映射关系的字典
# key为报单编号
# value为策略对象
self.__dictOrderRefStrategy = {}
# 保存合约代码和相关停止单的字典
# key为合约代码
# value为该合约相关的停止单列表
self.__dictStopOrder = {}
# MongoDB数据库相关
self.__mongoConnected = False
self.__mongoConnection = None
self.__mongoTickDB = None
# 调用函数
self.__connectMongo()
self.__registerEvent()
#----------------------------------------------------------------------
def createStrategy(self, strategyName, strategySymbol, strategyClass, strategySetting):
"""创建策略"""
strategy = strategyClass(strategyName, strategySymbol, self)
self.dictStrategy[strategyName] = strategy
strategy.loadSetting(strategySetting)
# 订阅合约行情,注意这里因为是CTP,所以ExchangeID可以忽略
self.mainEngine.subscribe(strategySymbol, None)
# 注册策略监听
self.registerStrategy(strategySymbol, strategy)
#----------------------------------------------------------------------
def __connectMongo(self):
"""连接MongoDB数据库"""
try:
self.__mongoConnection = Connection()
self.__mongoConnected = True
self.__mongoTickDB = self.__mongoConnection['TickDB']
self.writeLog('策略引擎连接MongoDB成功')
except ConnectionFailure:
self.writeLog('策略引擎连接MongoDB失败')
#----------------------------------------------------------------------
def __recordTick(self, data):
"""将Tick数据插入到MongoDB中"""
if self.__mongoConnected:
symbol = data['InstrumentID']
data['date'] = self.today
self.__mongoTickDB[symbol].insert(data)
#----------------------------------------------------------------------
def loadTick(self, symbol, startDate, endDate=None):
"""从MongoDB中读取Tick数据"""
if self.__mongoConnected:
collection = self.__mongoTickDB[symbol]
# 如果输入了读取TICK的最后日期
if endDate:
cx = collection.find({'date':{'$gte':startDate, '$lte':endDate}})
else:
cx = collection.find({'date':{'$gte':startDate}})
return cx
else:
return None
#----------------------------------------------------------------------
def updateMarketData(self, event):
"""行情更新"""
data = event.dict_['data']
symbol = data['InstrumentID']
# 检查是否存在交易该合约的策略
if symbol in self.__dictSymbolStrategy:
# 创建TICK数据对象并更新数据
tick = Tick(symbol)
tick.openPrice = data['OpenPrice']
tick.highPrice = data['HighestPrice']
tick.lowPrice = data['LowestPrice']
tick.lastPrice = data['LastPrice']
tick.volume = data['Volume']
tick.openInterest = data['OpenInterest']
tick.upperLimit = data['UpperLimitPrice']
tick.lowerLimit = data['LowerLimitPrice']
tick.time = data['UpdateTime']
tick.ms = data['UpdateMillisec']
tick.bidPrice1 = data['BidPrice1']
tick.bidPrice2 = data['BidPrice2']
tick.bidPrice3 = data['BidPrice3']
tick.bidPrice4 = data['BidPrice4']
tick.bidPrice5 = data['BidPrice5']
tick.askPrice1 = data['AskPrice1']
tick.askPrice2 = data['AskPrice2']
tick.askPrice3 = data['AskPrice3']
tick.askPrice4 = data['AskPrice4']
tick.askPrice5 = data['AskPrice5']
tick.bidVolume1 = data['BidVolume1']
tick.bidVolume2 = data['BidVolume2']
tick.bidVolume3 = data['BidVolume3']
tick.bidVolume4 = data['BidVolume4']
tick.bidVolume5 = data['BidVolume5']
tick.askVolume1 = data['AskVolume1']
tick.askVolume2 = data['AskVolume2']
tick.askVolume3 = data['AskVolume3']
tick.askVolume4 = data['AskVolume4']
tick.askVolume5 = data['AskVolume5']
# 首先检查停止单是否需要发出
self.__processStopOrder(tick)
# 将该TICK数据推送给每个策略
for strategy in self.__dictSymbolStrategy[symbol]:
strategy.onTick(tick)
# 将数据插入MongoDB数据库,实盘建议另开程序记录TICK数据
if not self.backtesting:
self.__recordTick(data)
#----------------------------------------------------------------------
def __processStopOrder(self, tick):
"""处理停止单"""
symbol = tick.symbol
lastPrice = tick.lastPrice
upperLimit = tick.upperLimit
lowerLimit = tick.lowerLimit
# 如果当前有该合约上的止损单
if symbol in self.__dictStopOrder:
# 获取止损单列表
listSO = self.__dictStopOrder[symbol] # SO:stop order
# 准备一个空的已发止损单列表
listSent = []
for so in listSO:
# 如果是买入停止单,且最新成交价大于停止触发价
if so.direction == DIRECTION_BUY and lastPrice >= so.price:
# 以当日涨停价发出限价单买入
ref = self.sendOrder(symbol, DIRECTION_BUY, so.offset,
upperLimit, so.volume, strategy)
# 触发策略的止损单发出更新
so.strategy.onStopOrder(ref)
# 将该止损单对象保存到已发送列表中
listSent.append(so)
# 如果是卖出停止单,且最新成交价小于停止触发价
elif so.direction == DIRECTION_SELL and lastPrice <= so.price:
ref = self.sendOrder(symbol, DIRECTION_SELL, so.offset,
lowerLimit, so.volume, strategy)
so.strategy.onStopOrder(ref)
listSent.append(so)
# 从停止单列表中移除已经发单的停止单对象
if listSent:
for so in listSent:
listSO.remove(so)
# 检查停止单列表是否为空,若为空,则从停止单字典中移除该合约代码
if not listSO:
del self.__dictStopOrder[symbol]
#----------------------------------------------------------------------
def updateOrder(self, event):
"""报单更新"""
data = event.dict_['data']
orderRef = data['OrderRef']
# 检查是否存在监听该报单的策略
if orderRef in self.__dictOrderRefStrategy:
# 创建Order数据对象
order = Order(data['InstrumentID'])
order.orderRef = data['OrderRef']
order.direction = data['Direction']
order.offset = data['CombOffsetFlag']
order.price = data['LimitPrice']
order.volumeOriginal = data['VolumeTotalOriginal']
order.volumeTraded = data['VolumeTraded']
order.insertTime = data['InsertTime']
order.cancelTime = data['CancelTime']
order.frontID = data['FrontID']
order.sessionID = data['SessionID']
order.status = data['OrderStatus']
# 推送给策略
strategy = self.__dictOrderRefStrategy[orderRef]
strategy.onOrder(order)
# 记录该Order的数据
self.__dictOrder[orderRef] = data
#----------------------------------------------------------------------
def updateTrade(self, event):
"""成交更新"""
data = event.dict_['data']
orderRef = data['OrderRef']
if orderRef in self.__dictOrderRefStrategy:
# 创建Trade数据对象
trade = Trade(data['InstrumentID'])
trade.orderRef = orderRef
trade.tradeID = data['TradeID']
trade.direction = data['Direction']
trade.offset = data['OffsetFlag']
trade.price = data['Price']
trade.volume = data['Volume']
# 推送给策略
strategy = self.__dictOrderRefStrategy[orderRef]
strategy.onTrade(trade)
#----------------------------------------------------------------------
def sendOrder(self, symbol, direction, offset, price, volume, strategy):
"""
发单(仅允许限价单)
symbol:合约代码
direction:方向,DIRECTION_BUY/DIRECTION_SELL
offset:开平,OFFSET_OPEN/OFFSET_CLOSE
price:下单价格
volume:下单手数
strategy:策略对象
"""
contract = self.mainEngine.selectInstrument(symbol)
if contract:
ref = self.mainEngine.sendOrder(symbol,
contract['ExchangeID'],
price,
PRICETYPE_LIMIT,
volume,
direction,
offset)
self.__dictOrderRefStrategy[ref] = strategy
return ref
#----------------------------------------------------------------------
def cancelOrder(self, orderRef):
"""
撤单
"""
order = self.__dictOrder[orderRef]
symbol = order['InstrumentID']
contract = self.mainEngine.selectInstrument(symbol)
if contract:
self.mainEngine.cancelOrder(symbol,
contract['ExchangeID'],
orderRef,
order['FrontID'],
order['SessionID'])
#----------------------------------------------------------------------
def __registerEvent(self):
"""注册事件监听"""
self.__eventEngine.register(EVENT_MARKETDATA, self.updateMarketData)
self.__eventEngine.register(EVENT_ORDER, self.updateOrder)
self.__eventEngine.register(EVENT_TRADE ,self.updateTrade)
#----------------------------------------------------------------------
def writeLog(self, log):
"""写日志"""
event = Event(type_=EVENT_LOG)
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def registerStrategy(self, symbol, strategy):
"""注册策略对合约TICK数据的监听"""
# 尝试获取监听该合约代码的策略的列表,若无则创建
try:
listStrategy = self.__dictSymbolStrategy[symbol]
except KeyError:
listStrategy = []
self.__dictSymbolStrategy[symbol] = listStrategy
# 防止重复注册
if strategy not in listStrategy:
listStrategy.append(strategy)
#----------------------------------------------------------------------
def placeStopOrder(self, symbol, direction, offset, price, volume, strategy):
"""
下停止单(运行于本地引擎中)
注意这里的price是停止单的触发价
"""
# 创建止损单对象
so = StopOrder(symbol, direction, offset, price, volume, strategy)
# 获取该合约相关的止损单列表
try:
listSO = self.__dictStopOrder[symbol]
except KeyError:
listSO = []
self.__dictStopOrder[symbol] = listSO
# 将该止损单插入列表中
listSO.append(so)
return so
#----------------------------------------------------------------------
def cancelStopOrder(self, so):
"""撤销停止单"""
symbol = so.symbol
try:
listSO = self.__dictStopOrder[symbol]
if so in listSO:
listSO.remove(so)
if not listSO:
del self.__dictStopOrder[symbol]
except KeyError:
pass
#----------------------------------------------------------------------
def startAll(self):
"""启动所有策略"""
for strategy in list(self.dictStrategy.values()):
strategy.start()
#----------------------------------------------------------------------
def stopAll(self):
"""停止所有策略"""
for strategy in list(self.dictStrategy.values()):
strategy.stop()
########################################################################
class StrategyTemplate(object):
"""策略模板"""
#----------------------------------------------------------------------
def __init__(self, name, symbol, engine):
"""Constructor"""
self.name = name # 策略名称(注意唯一性)
self.symbol = symbol # 策略交易的合约
self.engine = engine # 策略引擎对象
self.trading = False # 策略是否启动交易
#----------------------------------------------------------------------
def onTick(self, tick):
"""行情更新"""
raise NotImplementedError
#----------------------------------------------------------------------
def onTrade(self, trade):
"""交易更新"""
raise NotImplementedError
#----------------------------------------------------------------------
def onOrder(self, order):
"""报单更新"""
raise NotImplementedError
#----------------------------------------------------------------------
def onStopOrder(self, orderRef):
"""停止单更新"""
raise NotImplementedError
#----------------------------------------------------------------------
def onBar(self, o, h, l, c, volume, time):
"""K线数据更新"""
raise NotImplementedError
#----------------------------------------------------------------------
def start(self):
"""
启动交易
这里是最简单的改变self.trading
有需要可以重新实现更复杂的操作
"""
self.trading = True
self.engine.writeLog(self.name + '开始运行')
#----------------------------------------------------------------------
def stop(self):
"""
停止交易
同上
"""
self.trading = False
self.engine.writeLog(self.name + '停止运行')
#----------------------------------------------------------------------
def loadSetting(self, setting):
"""
载入设置
setting通常是一个包含了参数设置的字典
"""
raise NotImplementedError
#----------------------------------------------------------------------
def buy(self, price, volume, stopOrder=False):
"""买入开仓"""
if self.trading:
if stopOrder:
so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY,
OFFSET_OPEN, price, volume, self)
return so
else:
ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY,
OFFSET_OPEN, price, volume, self)
return ref
else:
return None
#----------------------------------------------------------------------
def cover(self, price, volume, stopOrder=False):
"""买入平仓"""
if self.trading:
if stopOrder:
so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY,
OFFSET_CLOSE, price, volume, self)
return so
else:
ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY,
OFFSET_CLOSE, price, volume, self)
return ref
else:
return None
#----------------------------------------------------------------------
def sell(self, price, volume, stopOrder=False):
"""卖出平仓"""
if self.trading:
if stopOrder:
so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL,
OFFSET_CLOSE, price, volume, self)
return so
else:
ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL,
OFFSET_CLOSE, price, volume, self)
return ref
else:
return None
#----------------------------------------------------------------------
def short(self, price, volume, stopOrder=False):
"""卖出开仓"""
if self.trading:
if stopOrder:
so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL,
OFFSET_OPEN, price, volume, self)
return so
else:
ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL,
OFFSET_OPEN, price, volume, self)
return ref
else:
return None
#----------------------------------------------------------------------
def cancelOrder(self, orderRef):
"""撤单"""
self.engine.cancelOrder(orderRef)
#----------------------------------------------------------------------
def cancelStopOrder(self, so):
"""撤销停止单"""
self.engine.cancelStopOrder(so)
|
# -*- coding:utf-8 -*-
import argparse
import random
from random import randint
import os
import numpy as np
import shutil
def clc(dir):
ls = os.listdir(dir)
for i in ls:
c_path = os.path.join(dir, i)
os.remove(c_path)
parser = argparse.ArgumentParser(description='Get the data info')
parser.add_argument('-i', '--input',help='path of data', default='/home/syh/pascal_voc/')
args = parser.parse_args()
if __name__ == '__main__':
folder_list= ["train","val","test"]
base_dir = os.path.join(args.input)
for i in range(3):
folder_name = folder_list[i]
xml_dir = base_dir + folder_name + "/Annotations/"
if not os.path.exists(xml_dir):
# print(xml_dir)
os.makedirs(xml_dir)
train_dir = base_dir + folder_list[0] + "/Annotations/"
val_dir = base_dir + folder_list[1] + "/Annotations/"
test_dir = base_dir + folder_list[2] + "/Annotations/"
clc(train_dir)
clc(test_dir)
clc(val_dir)
num = len(os.listdir(os.path.join(base_dir,'Annotations')))
scale = 4
val_num = num // scale
test_num = num // scale
total = np.arange(1,num+1)
valList = random.sample(range(1, num+1), val_num) #val random select
# print(sorted(valList))
# move val_num val pics to splited folder
for i in range(0,val_num):
xmlfile = ("%08d" % (valList[i]))
file_dir = base_dir + "/Annotations/"+'/'+ xmlfile +'.xml'
shutil.copy(file_dir,val_dir)
# take away val ids from total ids
total_val = [] # total ids minus val ids, ids left stay here
for i in range(0,num):
if total[i] not in valList:
total_val.append(total[i])
testlist_temp = random.sample(range(0, num-val_num), test_num) #test random select
# print(sorted(testlist_temp))
testList = []
for k in range(0,test_num):
testList.append(total_val[testlist_temp[k]])
# print(sorted(testList))
# move test_num test pics to splited folder
for i in range(0,test_num):
xmlfile = ("%08d" % (testList[i]))
file_dir = base_dir + "/Annotations/"+'/'+ xmlfile +'.xml'
shutil.copy(file_dir,test_dir)
# take away test ids from total_val ids
total_val_test = [] # total ids minus val&test ids, ids left stay here
for i in range(0,num-val_num):
if total_val[i] not in testList:
total_val_test.append(total_val[i])
# print(sorted(total_val_test))
# move lest train pics to splited folder
for i in range(0,num-val_num-test_num):
xmlfile = ("%08d" % (total_val_test[i]))
file_dir = base_dir + "/Annotations/"+'/'+ xmlfile +'.xml'
shutil.copy(file_dir,train_dir)
print('train:', len(os.listdir(train_dir)))
print('val:', len(os.listdir(val_dir)))
print('test:', len(os.listdir(test_dir)))
'''
python voc2coco/split_data.py -i /home/syh/cornernet-lite/data/voc/
''' |
# %%
import pandas as pd
from tensorflow.keras.layers import DenseFeatures
from tftabular.models import TabNetRegressor
from tftabular.utils import df_to_dataset, get_feature
# %%
names = ['Sex', 'Length', 'Diameter', 'Height', 'While weight', 'Shucked weight', 'Viscera weight', 'Shell weight', 'Rings']
data = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/abalone/abalone.data',
names = names,
dtype = {k: 'float32' for k in names if k != 'Sex'})
# %%
def get_labels(x: pd.Series) -> pd.Series:
"""
Converts strings to unqiue ints for use in Pytorch Embedding
"""
labels, levels = pd.factorize(x)
return pd.Series(labels, name=x.name, index=x.index)
X, y = (data
.drop(columns='Rings')
.assign(Sex = lambda df: df.Sex
.pipe(get_labels)
.add(1)
.astype('int32'))
.rename(columns=lambda s: '_'.join(s.lower()
.split())),
data.Rings)
split = data.shape[0] // 2
X_train, X_valid, y_train, y_valid = X.iloc[:split, :], X.iloc[-split:, :], y.iloc[:split], y.iloc[-split:]
train, valid = df_to_dataset(X_train, y_train), df_to_dataset(X_valid, y_valid)
# %%
columns = [get_feature(f) for k, f in X_train.iteritems()]
feature_column = DenseFeatures(columns, trainable=True)
model = TabNetRegressor(feature_column=feature_column,
virtual_batch_size=None)
model.compile('adam', 'mse')
model.fit(train, epochs=5)
# %%
prediction = model.predict(valid)
explanations = model.explain(dict(X_valid))
features = model.transform(dict(X_valid)) |
import unittest
from request_parsers import keystroke
class KeystrokeTest(unittest.TestCase):
# Intentionally violating style conventions sot hat we can parallel the
# self.assertEqual method.
# pylint: disable=no-self-use
# pylint: disable=invalid-name
def assertKeystrokesEqual(self, expected, actual):
if expected != actual:
raise AssertionError('%s != %s' % (expected, actual))
def test_parses_valid_keystroke_message(self):
self.assertKeystrokesEqual(
keystroke.Keystroke(left_meta_modifier=False,
left_alt_modifier=False,
left_shift_modifier=False,
left_ctrl_modifier=False,
right_alt_modifier=False,
key='A',
code='KeyA'),
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
}))
def test_parses_valid_keystroke_message_with_all_modifiers_pushed(self):
self.assertKeystrokesEqual(
keystroke.Keystroke(left_meta_modifier=True,
left_alt_modifier=True,
left_shift_modifier=True,
left_ctrl_modifier=True,
right_alt_modifier=True,
key='A',
code='KeyA'),
keystroke.parse_keystroke({
'metaKey': True,
'altKey': True,
'shiftKey': True,
'ctrlKey': True,
'altGraphKey': True,
'key': 'A',
'code': 'KeyA',
}))
def test_parses_left_ctrl_key(self):
self.assertKeystrokesEqual(
keystroke.Keystroke(left_meta_modifier=False,
left_alt_modifier=False,
left_shift_modifier=False,
left_ctrl_modifier=True,
right_alt_modifier=False,
key='Control',
code='ControlLeft'),
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': True,
'altGraphKey': False,
'key': 'Control',
'code': 'ControlLeft',
}))
def test_parses_right_ctrl_key(self):
self.assertKeystrokesEqual(
keystroke.Keystroke(
left_meta_modifier=False,
left_alt_modifier=False,
left_shift_modifier=False,
# For simplicity, we store right Ctrl modifier in
# left_ctrl_modifier since there's no right version in
# keystroke.Keystroke.
left_ctrl_modifier=True,
right_alt_modifier=False,
key='Control',
code='ControlRight'),
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': True,
'altGraphKey': False,
'key': 'Control',
'code': 'ControlRight',
}))
def test_rejects_invalid_meta_modifier(self):
with self.assertRaises(keystroke.InvalidModifierKeyError):
keystroke.parse_keystroke({
'metaKey': 'banana',
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_invalid_alt_modifier(self):
with self.assertRaises(keystroke.InvalidModifierKeyError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': 'banana',
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_invalid_shift_modifier(self):
with self.assertRaises(keystroke.InvalidModifierKeyError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': 'banana',
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_invalid_ctrl_modifier(self):
with self.assertRaises(keystroke.InvalidModifierKeyError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': 'banana',
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_invalid_alt_graph_modifier(self):
with self.assertRaises(keystroke.InvalidModifierKeyError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': 'banana',
'key': 'A',
'code': 'KeyA',
})
def test_rejects_float_keycode_value(self):
with self.assertRaises(keystroke.InvalidKeyCodeError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 1.25,
})
def test_rejects_missing_meta_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_missing_alt_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'metaKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_missing_alt_graph_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'altKey': False,
'metaKey': False,
'shiftKey': False,
'ctrlKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_missing_shift_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_missing_ctrl_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'KeyA',
})
def test_rejects_missing_key_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'code': 'KeyA',
})
def test_rejects_missing_code_value(self):
with self.assertRaises(keystroke.MissingFieldErrorError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
})
def test_rejects_too_long_code_value(self):
with self.assertRaises(keystroke.InvalidKeyCodeError):
keystroke.parse_keystroke({
'metaKey': False,
'altKey': False,
'shiftKey': False,
'ctrlKey': False,
'altGraphKey': False,
'key': 'A',
'code': 'A' * 31,
})
|
#!/usr/bin/python3
# You can follow this installation instructions to get your RPi set up:
# https://gist.github.com/ageitgey/1ac8dbe8572f3f533df6269dab35df65
import os
import cv2
import time
import threading
import requests
import json
import settings
import sys
import time
import argparse
RECOGNITION_HOST = settings.RECOGNITION_HOST
TMP_IMG = settings.TMP_IMG
FACECAM_DATA_DIR = settings.FACECAM_DATA_DIR
WEBCAM_LOCKFILE = settings.WEBCAM_LOCKFILE
WEBCAM_ERRFILE = settings.WEBCAM_ERRFILE
WEBCAM_LOGFILE = settings.WEBCAM_LOGFILE
class Recognition:
"""
:param duration -> duração do reconhecimento (=-1)
:param height -> altura das fotos a serem reconhecidas (=240)
:param width -> largura das fotos a serem reconhecidas (=320)
:param n_camera -> número da camera a ser usada (=0)
:param video -> para exibir janela de vídeo da camera (=True)
:param resize -> redimensionar o quadro de fotos da camera pra alcançar bom desempenho (=True)
"""
def __init__(self,height=240, width=320, n_camera=0, video=True, resize=True):
# Camera settings
self.height = height
self.width = width
self.camera = cv2.VideoCapture(n_camera)
self.video = video
self.resize = resize
def start_recognize(self):
self.run()
def draw_box(self, name, frame, location):
top, right, left, bottom = location
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
# Draw a label with a name below the face
cv2.rectangle(frame, (left, bottom - 35),
(right, bottom), (0, 0, 255), cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6),
font, 1.0, (255, 255, 255), 1)
def run(self):
print(time.ctime())
counter = 0
process_frame = True
self.names = {}
while True:
ret, frame = self.camera.read()
if self.resize:
height, width = frame.shape[:2]
h_scale = self.height/height
w_scale = self.width/width
newX, newY = frame.shape[1]*w_scale, frame.shape[0]*h_scale
# Resize frame of video to 1/4 size for faster face recognition processing
small_frame = cv2.resize(frame, (int(newX), int(newY)))
# Only process every other frame of video to save time
if process_frame:
# salva o frame atual temporariamente
if self.resize:
cv2.imwrite(TMP_IMG, small_frame)
else:
cv2.imwrite(TMP_IMG, frame)
# envio a imagem temporária pro servidor de reconhecimento
predictions = send_image(TMP_IMG)
for person in predictions:
name, (top, right, bottom, left) = person['label'], person['location']
print("encontrei {name} em ({left},{top})".format(
name=name, left=left, top=top))
if name != 'desconhecido':
# marcando a entrada da pessoa
if name not in self.names:
self.names[name] = [0, 0]
self.names[name][0] = int(time.time())
else:
# atualiza o timestamp da saida
self.names[name][1] = int(time.time())
if self.resize:
top = int(top / h_scale)
left = int(left / w_scale)
right = int(right / w_scale)
bottom = int(bottom / h_scale)
if self.video:
# Draw a box around the face
self.draw_box(name, frame, (top, right, left, bottom))
process_frame = not process_frame
# Display the resulting ima
if self.video:
cv2.imshow('Video', frame)
# Hit 'q' on the keyboard to quit!
if cv2.waitKey(1) & 0xFF == ord('q') or os.path.exists(WEBCAM_LOCKFILE):
break
# Release handle to the webcam
self.camera.release()
cv2.destroyAllWindows()
# salva em algum arquivo de log
recognized_file = os.path.join(FACECAM_DATA_DIR,'recognized.log')
with open(recognized_file, 'w') as f:
for name in self.names:
current = self.names[name]
f.write("{}:{},{}\n".format(name,current[0],current[1]))
print(time.ctime())
def send_image(image_path):
url = 'http://{RECOGNITION_HOST}:5000/api/recognition/svm'.format(
RECOGNITION_HOST=RECOGNITION_HOST)
files = {'file': open(image_path, 'rb')}
r = requests.post(url, files=files)
return json.loads(r.text)
def main():
parser = argparse.ArgumentParser(
description='Programa para reconhecer alunos com a camera')
parser.add_argument('--camera', dest='n_camera', type=int, default=0,
nargs='?', help='número da webcam a ser usada, 0 por padrão ')
parser.add_argument('--video', dest='use_video', action='store_const', default=False,
const=True, help='indicar para exibir vídeo capturado, falso por padrão', required=False)
arguments = parser.parse_args()
n_camera = arguments.n_camera
use_video = arguments.use_video
recog = Recognition(n_camera=arguments.n_camera, video=use_video)
recog.start_recognize()
if __name__ == "__main__":
main()
|
from .locations import LocationModelSerializer, ImageModelSerializer, RatingModelSerializer
LocationModelSerializer, ImageModelSerializer, RatingModelSerializer
|
import re
listofspecies = open("Bacteria_Species_extremo37.txt").readlines()
do_print = False
with open("Bac16s.faa") as f:
for line in f:
if do_print:
print (do_print + line.strip())
do_print = False
continue
for x in listofspecies:
names = x.split("_")
if re.findall(names[0].strip(), line):
if re.findall(names[1].strip(), line):
do_print = ">" + x
break
|
#
# PySNMP MIB module Nortel-Magellan-Passport-BgpMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-Magellan-Passport-BgpMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:17:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint")
vrIp, vrIpIndex = mibBuilder.importSymbols("Nortel-Magellan-Passport-IpMIB", "vrIp", "vrIpIndex")
Counter32, DisplayString, StorageType, Gauge32, Unsigned32, Integer32, RowStatus = mibBuilder.importSymbols("Nortel-Magellan-Passport-StandardTextualConventionsMIB", "Counter32", "DisplayString", "StorageType", "Gauge32", "Unsigned32", "Integer32", "RowStatus")
HexString, NonReplicated, Hex, AsciiString, IntegerSequence = mibBuilder.importSymbols("Nortel-Magellan-Passport-TextualConventionsMIB", "HexString", "NonReplicated", "Hex", "AsciiString", "IntegerSequence")
passportMIBs, = mibBuilder.importSymbols("Nortel-Magellan-Passport-UsefulDefinitionsMIB", "passportMIBs")
vrIndex, = mibBuilder.importSymbols("Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, MibIdentifier, iso, TimeTicks, ObjectIdentity, ModuleIdentity, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Unsigned32, Bits, Counter64, NotificationType, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "iso", "TimeTicks", "ObjectIdentity", "ModuleIdentity", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Unsigned32", "Bits", "Counter64", "NotificationType", "IpAddress")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
bgpMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141))
vrIpBgp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21))
vrIpBgpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1), )
if mibBuilder.loadTexts: vrIpBgpRowStatusTable.setStatus('mandatory')
vrIpBgpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpRowStatusEntry.setStatus('mandatory')
vrIpBgpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpRowStatus.setStatus('mandatory')
vrIpBgpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpComponentName.setStatus('mandatory')
vrIpBgpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpStorageType.setStatus('mandatory')
vrIpBgpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: vrIpBgpIndex.setStatus('mandatory')
vrIpBgpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100), )
if mibBuilder.loadTexts: vrIpBgpProvTable.setStatus('mandatory')
vrIpBgpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpProvEntry.setStatus('mandatory')
vrIpBgpBgpIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpBgpIdentifier.setStatus('mandatory')
vrIpBgpLocalAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpLocalAs.setStatus('mandatory')
vrIpBgpDefaultLocalPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(144)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpDefaultLocalPreference.setStatus('mandatory')
vrIpBgpDefaultMultiExitDisc = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967294)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpDefaultMultiExitDisc.setStatus('mandatory')
vrIpBgpRouteThrottleLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 1000)).clone(250)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpRouteThrottleLimit.setStatus('mandatory')
vrIpBgpRouteThrottleInter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 30)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpRouteThrottleInter.setStatus('mandatory')
vrIpBgpRouteReflector = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2))).clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpRouteReflector.setStatus('mandatory')
vrIpBgpRouteReflectorCluster = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 100, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpRouteReflectorCluster.setStatus('mandatory')
vrIpBgpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101), )
if mibBuilder.loadTexts: vrIpBgpOperTable.setStatus('mandatory')
vrIpBgpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpOperEntry.setStatus('mandatory')
vrIpBgpTableVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpTableVersion.setStatus('mandatory')
vrIpBgpInMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpInMsgs.setStatus('mandatory')
vrIpBgpInErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpInErrors.setStatus('mandatory')
vrIpBgpInErrorMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpInErrorMsgs.setStatus('mandatory')
vrIpBgpOutMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutMsgs.setStatus('mandatory')
vrIpBgpOutDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutDiscards.setStatus('mandatory')
vrIpBgpOutErrorMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutErrorMsgs.setStatus('mandatory')
vrIpBgpIndbSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 101, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbSize.setStatus('mandatory')
vrIpBgpStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 102), )
if mibBuilder.loadTexts: vrIpBgpStateTable.setStatus('mandatory')
vrIpBgpStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 102, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpStateEntry.setStatus('mandatory')
vrIpBgpAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 102, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAdminState.setStatus('mandatory')
vrIpBgpOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 102, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOperationalState.setStatus('mandatory')
vrIpBgpUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 102, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpUsageState.setStatus('mandatory')
vrIpBgpAdminControlTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 103), )
if mibBuilder.loadTexts: vrIpBgpAdminControlTable.setStatus('mandatory')
vrIpBgpAdminControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 103, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpAdminControlEntry.setStatus('mandatory')
vrIpBgpSnmpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 103, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpSnmpAdminStatus.setStatus('mandatory')
vrIpBgpOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 106), )
if mibBuilder.loadTexts: vrIpBgpOperStatusTable.setStatus('mandatory')
vrIpBgpOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 106, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"))
if mibBuilder.loadTexts: vrIpBgpOperStatusEntry.setStatus('mandatory')
vrIpBgpSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 106, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpSnmpOperStatus.setStatus('mandatory')
vrIpBgpPeer = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2))
vrIpBgpPeerRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1), )
if mibBuilder.loadTexts: vrIpBgpPeerRowStatusTable.setStatus('mandatory')
vrIpBgpPeerRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpPeerPeerAddressIndex"))
if mibBuilder.loadTexts: vrIpBgpPeerRowStatusEntry.setStatus('mandatory')
vrIpBgpPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerRowStatus.setStatus('mandatory')
vrIpBgpPeerComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerComponentName.setStatus('mandatory')
vrIpBgpPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerStorageType.setStatus('mandatory')
vrIpBgpPeerPeerAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 1, 1, 10), IpAddress())
if mibBuilder.loadTexts: vrIpBgpPeerPeerAddressIndex.setStatus('mandatory')
vrIpBgpPeerProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10), )
if mibBuilder.loadTexts: vrIpBgpPeerProvTable.setStatus('mandatory')
vrIpBgpPeerProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpPeerPeerAddressIndex"))
if mibBuilder.loadTexts: vrIpBgpPeerProvEntry.setStatus('mandatory')
vrIpBgpPeerPeerAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerPeerAs.setStatus('mandatory')
vrIpBgpPeerLocalAddressConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerLocalAddressConfigured.setStatus('mandatory')
vrIpBgpPeerKeepAliveConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 21845)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerKeepAliveConfigured.setStatus('mandatory')
vrIpBgpPeerHoldTimeConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 65535), )).clone(90)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerHoldTimeConfigured.setStatus('mandatory')
vrIpBgpPeerConnectRetryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerConnectRetryTime.setStatus('mandatory')
vrIpBgpPeerMinAsOrigTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerMinAsOrigTime.setStatus('mandatory')
vrIpBgpPeerMinRouteAdvTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerMinRouteAdvTime.setStatus('mandatory')
vrIpBgpPeerDefaultInAggMed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967295)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerDefaultInAggMed.setStatus('mandatory')
vrIpBgpPeerIsRouteReflectorClient = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2))).clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpPeerIsRouteReflectorClient.setStatus('mandatory')
vrIpBgpPeerStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 11), )
if mibBuilder.loadTexts: vrIpBgpPeerStateTable.setStatus('mandatory')
vrIpBgpPeerStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 11, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpPeerPeerAddressIndex"))
if mibBuilder.loadTexts: vrIpBgpPeerStateEntry.setStatus('mandatory')
vrIpBgpPeerAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerAdminState.setStatus('mandatory')
vrIpBgpPeerOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerOperationalState.setStatus('mandatory')
vrIpBgpPeerUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 11, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerUsageState.setStatus('mandatory')
vrIpBgpPeerOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12), )
if mibBuilder.loadTexts: vrIpBgpPeerOperTable.setStatus('mandatory')
vrIpBgpPeerOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpPeerPeerAddressIndex"))
if mibBuilder.loadTexts: vrIpBgpPeerOperEntry.setStatus('mandatory')
vrIpBgpPeerConnectionState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("idle", 1), ("connect", 2), ("active", 3), ("openSent", 4), ("openConfirm", 5), ("established", 6))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerConnectionState.setStatus('mandatory')
vrIpBgpPeerBgpIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerBgpIdentifier.setStatus('mandatory')
vrIpBgpPeerVersionNegotiated = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerVersionNegotiated.setStatus('mandatory')
vrIpBgpPeerHoldTimeNegotiated = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 65535), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerHoldTimeNegotiated.setStatus('mandatory')
vrIpBgpPeerKeepAliveNegotiated = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 21845))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerKeepAliveNegotiated.setStatus('mandatory')
vrIpBgpPeerLocalAddressUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerLocalAddressUsed.setStatus('mandatory')
vrIpBgpPeerLocalPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerLocalPort.setStatus('mandatory')
vrIpBgpPeerRemotePort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerRemotePort.setStatus('mandatory')
vrIpBgpPeerLastError = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 11), HexString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerLastError.setStatus('mandatory')
vrIpBgpPeerConnectionEstablishedTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 12), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerConnectionEstablishedTime.setStatus('mandatory')
vrIpBgpPeerConnectionEstablishedTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerConnectionEstablishedTransitions.setStatus('mandatory')
vrIpBgpPeerInUpdateElapsedTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 14), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInUpdateElapsedTime.setStatus('mandatory')
vrIpBgpPeerInMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInMsgs.setStatus('mandatory')
vrIpBgpPeerInUpdates = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInUpdates.setStatus('mandatory')
vrIpBgpPeerInErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInErrors.setStatus('mandatory')
vrIpBgpPeerInErrorMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInErrorMsgs.setStatus('mandatory')
vrIpBgpPeerOutMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerOutMsgs.setStatus('mandatory')
vrIpBgpPeerOutUpdates = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerOutUpdates.setStatus('mandatory')
vrIpBgpPeerOutDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerOutDiscards.setStatus('mandatory')
vrIpBgpPeerOutErrorMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerOutErrorMsgs.setStatus('mandatory')
vrIpBgpPeerInRoutes = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 2, 12, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpPeerInRoutes.setStatus('mandatory')
vrIpBgpImport = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3))
vrIpBgpImportRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1), )
if mibBuilder.loadTexts: vrIpBgpImportRowStatusTable.setStatus('mandatory')
vrIpBgpImportRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportIndex"))
if mibBuilder.loadTexts: vrIpBgpImportRowStatusEntry.setStatus('mandatory')
vrIpBgpImportRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportRowStatus.setStatus('mandatory')
vrIpBgpImportComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpImportComponentName.setStatus('mandatory')
vrIpBgpImportStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpImportStorageType.setStatus('mandatory')
vrIpBgpImportIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpImportIndex.setStatus('mandatory')
vrIpBgpImportProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10), )
if mibBuilder.loadTexts: vrIpBgpImportProvTable.setStatus('mandatory')
vrIpBgpImportProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportIndex"))
if mibBuilder.loadTexts: vrIpBgpImportProvEntry.setStatus('mandatory')
vrIpBgpImportPeerAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportPeerAs.setStatus('mandatory')
vrIpBgpImportPeerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportPeerIpAddress.setStatus('mandatory')
vrIpBgpImportOriginAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportOriginAs.setStatus('mandatory')
vrIpBgpImportOriginProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("any", 0), ("igp", 1), ("egp", 2), ("incomplete", 3))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportOriginProtocol.setStatus('mandatory')
vrIpBgpImportUsageFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("use", 1), ("ignore", 2), ("exclude", 3))).clone('use')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportUsageFlag.setStatus('mandatory')
vrIpBgpImportLocalPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportLocalPreference.setStatus('mandatory')
vrIpBgpImportPreferredOver = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(6, 70))).clone(namedValues=NamedValues(("overIntOspf", 6), ("underIntOspf", 70))).clone('underIntOspf')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportPreferredOver.setStatus('mandatory')
vrIpBgpImportAsPathExpression = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 8), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportAsPathExpression.setStatus('mandatory')
vrIpBgpImportCommunityExpression = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 9), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportCommunityExpression.setStatus('mandatory')
vrIpBgpImportExpressPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportExpressPreference.setStatus('mandatory')
vrIpBgpImportAppendCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 10, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportAppendCommunity.setStatus('mandatory')
vrIpBgpImportNet = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2))
vrIpBgpImportNetRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1), )
if mibBuilder.loadTexts: vrIpBgpImportNetRowStatusTable.setStatus('mandatory')
vrIpBgpImportNetRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportNetIndex"))
if mibBuilder.loadTexts: vrIpBgpImportNetRowStatusEntry.setStatus('mandatory')
vrIpBgpImportNetRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportNetRowStatus.setStatus('mandatory')
vrIpBgpImportNetComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpImportNetComponentName.setStatus('mandatory')
vrIpBgpImportNetStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpImportNetStorageType.setStatus('mandatory')
vrIpBgpImportNetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpImportNetIndex.setStatus('mandatory')
vrIpBgpImportNetProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 10), )
if mibBuilder.loadTexts: vrIpBgpImportNetProvTable.setStatus('mandatory')
vrIpBgpImportNetProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpImportNetIndex"))
if mibBuilder.loadTexts: vrIpBgpImportNetProvEntry.setStatus('mandatory')
vrIpBgpImportNetPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 10, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportNetPrefix.setStatus('mandatory')
vrIpBgpImportNetLength = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 3, 2, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpImportNetLength.setStatus('mandatory')
vrIpBgpExport = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4))
vrIpBgpExportRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1), )
if mibBuilder.loadTexts: vrIpBgpExportRowStatusTable.setStatus('mandatory')
vrIpBgpExportRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportIndex"))
if mibBuilder.loadTexts: vrIpBgpExportRowStatusEntry.setStatus('mandatory')
vrIpBgpExportRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportRowStatus.setStatus('mandatory')
vrIpBgpExportComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpExportComponentName.setStatus('mandatory')
vrIpBgpExportStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpExportStorageType.setStatus('mandatory')
vrIpBgpExportIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpExportIndex.setStatus('mandatory')
vrIpBgpExportProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10), )
if mibBuilder.loadTexts: vrIpBgpExportProvTable.setStatus('mandatory')
vrIpBgpExportProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportIndex"))
if mibBuilder.loadTexts: vrIpBgpExportProvEntry.setStatus('mandatory')
vrIpBgpExportPeerAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportPeerAs.setStatus('mandatory')
vrIpBgpExportPeerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportPeerIpAddress.setStatus('mandatory')
vrIpBgpExportProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("all", 1), ("egp", 2), ("rip", 3), ("ospfInternal", 4), ("ospfExternal", 5), ("staticLocal", 6), ("staticRemote", 7), ("bgpInternal", 8), ("bgpExternal", 9))).clone('all')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportProtocol.setStatus('mandatory')
vrIpBgpExportEgpAsId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportEgpAsId.setStatus('mandatory')
vrIpBgpExportBgpAsId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportBgpAsId.setStatus('mandatory')
vrIpBgpExportOspfTag = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 6), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967295)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportOspfTag.setStatus('mandatory')
vrIpBgpExportRipInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 7), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportRipInterface.setStatus('mandatory')
vrIpBgpExportRipNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 8), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportRipNeighbor.setStatus('mandatory')
vrIpBgpExportAdvertiseStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("send", 1), ("block", 2))).clone('send')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportAdvertiseStatus.setStatus('mandatory')
vrIpBgpExportMultiExitDisc = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportMultiExitDisc.setStatus('mandatory')
vrIpBgpExportSendMultiExitDiscToEbgp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2))).clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportSendMultiExitDiscToEbgp.setStatus('mandatory')
vrIpBgpExportAsPathExpression = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 12), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportAsPathExpression.setStatus('mandatory')
vrIpBgpExportCommunityExpression = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 13), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportCommunityExpression.setStatus('mandatory')
vrIpBgpExportExpressPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 14), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportExpressPreference.setStatus('mandatory')
vrIpBgpExportSendCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 15), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportSendCommunity.setStatus('mandatory')
vrIpBgpExportInsertDummyAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 10, 1, 200), IntegerSequence()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportInsertDummyAs.setStatus('mandatory')
vrIpBgpExportNet = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2))
vrIpBgpExportNetRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1), )
if mibBuilder.loadTexts: vrIpBgpExportNetRowStatusTable.setStatus('mandatory')
vrIpBgpExportNetRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportNetIndex"))
if mibBuilder.loadTexts: vrIpBgpExportNetRowStatusEntry.setStatus('mandatory')
vrIpBgpExportNetRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportNetRowStatus.setStatus('mandatory')
vrIpBgpExportNetComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpExportNetComponentName.setStatus('mandatory')
vrIpBgpExportNetStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpExportNetStorageType.setStatus('mandatory')
vrIpBgpExportNetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpExportNetIndex.setStatus('mandatory')
vrIpBgpExportNetProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 10), )
if mibBuilder.loadTexts: vrIpBgpExportNetProvTable.setStatus('mandatory')
vrIpBgpExportNetProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpExportNetIndex"))
if mibBuilder.loadTexts: vrIpBgpExportNetProvEntry.setStatus('mandatory')
vrIpBgpExportNetPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 10, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportNetPrefix.setStatus('mandatory')
vrIpBgpExportNetLength = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 4, 2, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpExportNetLength.setStatus('mandatory')
vrIpBgpAs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5))
vrIpBgpAsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1), )
if mibBuilder.loadTexts: vrIpBgpAsRowStatusTable.setStatus('mandatory')
vrIpBgpAsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAsIndex"))
if mibBuilder.loadTexts: vrIpBgpAsRowStatusEntry.setStatus('mandatory')
vrIpBgpAsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAsRowStatus.setStatus('mandatory')
vrIpBgpAsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAsComponentName.setStatus('mandatory')
vrIpBgpAsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAsStorageType.setStatus('mandatory')
vrIpBgpAsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpAsIndex.setStatus('mandatory')
vrIpBgpAsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 10), )
if mibBuilder.loadTexts: vrIpBgpAsProvTable.setStatus('mandatory')
vrIpBgpAsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAsIndex"))
if mibBuilder.loadTexts: vrIpBgpAsProvEntry.setStatus('mandatory')
vrIpBgpAsWeight = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 5, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAsWeight.setStatus('mandatory')
vrIpBgpAggregate = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6))
vrIpBgpAggregateRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1), )
if mibBuilder.loadTexts: vrIpBgpAggregateRowStatusTable.setStatus('mandatory')
vrIpBgpAggregateRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregatePrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregateLengthIndex"))
if mibBuilder.loadTexts: vrIpBgpAggregateRowStatusEntry.setStatus('mandatory')
vrIpBgpAggregateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateRowStatus.setStatus('mandatory')
vrIpBgpAggregateComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAggregateComponentName.setStatus('mandatory')
vrIpBgpAggregateStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAggregateStorageType.setStatus('mandatory')
vrIpBgpAggregatePrefixIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1, 10), IpAddress())
if mibBuilder.loadTexts: vrIpBgpAggregatePrefixIndex.setStatus('mandatory')
vrIpBgpAggregateLengthIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32)))
if mibBuilder.loadTexts: vrIpBgpAggregateLengthIndex.setStatus('mandatory')
vrIpBgpAggregateNet = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2))
vrIpBgpAggregateNetRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1), )
if mibBuilder.loadTexts: vrIpBgpAggregateNetRowStatusTable.setStatus('mandatory')
vrIpBgpAggregateNetRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregatePrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregateLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregateNetIndex"))
if mibBuilder.loadTexts: vrIpBgpAggregateNetRowStatusEntry.setStatus('mandatory')
vrIpBgpAggregateNetRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetRowStatus.setStatus('mandatory')
vrIpBgpAggregateNetComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAggregateNetComponentName.setStatus('mandatory')
vrIpBgpAggregateNetStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpAggregateNetStorageType.setStatus('mandatory')
vrIpBgpAggregateNetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: vrIpBgpAggregateNetIndex.setStatus('mandatory')
vrIpBgpAggregateNetProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10), )
if mibBuilder.loadTexts: vrIpBgpAggregateNetProvTable.setStatus('mandatory')
vrIpBgpAggregateNetProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregatePrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregateLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpAggregateNetIndex"))
if mibBuilder.loadTexts: vrIpBgpAggregateNetProvEntry.setStatus('mandatory')
vrIpBgpAggregateNetPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 1), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetPrefix.setStatus('mandatory')
vrIpBgpAggregateNetLength = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetLength.setStatus('mandatory')
vrIpBgpAggregateNetProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("all", 1), ("egp", 2), ("rip", 3), ("ospfInternal", 4), ("ospfExternal", 5), ("staticLocal", 6), ("staticRemote", 7), ("bgpInternal", 8), ("bgpExternal", 9))).clone('all')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetProtocol.setStatus('mandatory')
vrIpBgpAggregateNetEgpAsId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetEgpAsId.setStatus('mandatory')
vrIpBgpAggregateNetBgpAsId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetBgpAsId.setStatus('mandatory')
vrIpBgpAggregateNetOspfTag = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 6), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)).clone(4294967295)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetOspfTag.setStatus('mandatory')
vrIpBgpAggregateNetRipInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 7), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetRipInterface.setStatus('mandatory')
vrIpBgpAggregateNetAction = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 6, 2, 10, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("aggregate", 1), ("advertise", 2))).clone('aggregate')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vrIpBgpAggregateNetAction.setStatus('mandatory')
vrIpBgpIndb = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7))
vrIpBgpIndbRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1), )
if mibBuilder.loadTexts: vrIpBgpIndbRowStatusTable.setStatus('mandatory')
vrIpBgpIndbRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpIndbRowStatusEntry.setStatus('mandatory')
vrIpBgpIndbRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbRowStatus.setStatus('mandatory')
vrIpBgpIndbComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbComponentName.setStatus('mandatory')
vrIpBgpIndbStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbStorageType.setStatus('mandatory')
vrIpBgpIndbPrefixIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 10), IpAddress())
if mibBuilder.loadTexts: vrIpBgpIndbPrefixIndex.setStatus('mandatory')
vrIpBgpIndbLengthIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32)))
if mibBuilder.loadTexts: vrIpBgpIndbLengthIndex.setStatus('mandatory')
vrIpBgpIndbPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 1, 1, 12), IpAddress())
if mibBuilder.loadTexts: vrIpBgpIndbPeerIndex.setStatus('mandatory')
vrIpBgpIndbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10), )
if mibBuilder.loadTexts: vrIpBgpIndbOperTable.setStatus('mandatory')
vrIpBgpIndbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpIndbOperEntry.setStatus('mandatory')
vrIpBgpIndbOrigin = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("igp", 1), ("egp", 2), ("incomplete", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbOrigin.setStatus('mandatory')
vrIpBgpIndbInLocaldb = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbInLocaldb.setStatus('mandatory')
vrIpBgpIndbNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 6), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbNextHop.setStatus('mandatory')
vrIpBgpIndbLocalPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbLocalPreference.setStatus('mandatory')
vrIpBgpIndbCalcLocalPref = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbCalcLocalPref.setStatus('mandatory')
vrIpBgpIndbMultiExitDiscriminator = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbMultiExitDiscriminator.setStatus('mandatory')
vrIpBgpIndbAtomicAggregate = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("lessSpecificRouteNotSelected", 1), ("lessSpecificRouteSelected", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbAtomicAggregate.setStatus('mandatory')
vrIpBgpIndbAggregatorAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbAggregatorAs.setStatus('mandatory')
vrIpBgpIndbAggregatorAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 12), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbAggregatorAddr.setStatus('mandatory')
vrIpBgpIndbAsPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 13), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(2, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbAsPath.setStatus('mandatory')
vrIpBgpIndbUnknownAttributes = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 14), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbUnknownAttributes.setStatus('mandatory')
vrIpBgpIndbCommunityPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 15), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbCommunityPath.setStatus('mandatory')
vrIpBgpIndbAsOriginatorId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 10, 1, 16), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbAsOriginatorId.setStatus('mandatory')
vrIpBgpIndbRrClusterListTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 798), )
if mibBuilder.loadTexts: vrIpBgpIndbRrClusterListTable.setStatus('mandatory')
vrIpBgpIndbRrClusterListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 798, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbPeerIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndbRrClusterListValue"))
if mibBuilder.loadTexts: vrIpBgpIndbRrClusterListEntry.setStatus('mandatory')
vrIpBgpIndbRrClusterListValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 7, 798, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpIndbRrClusterListValue.setStatus('mandatory')
vrIpBgpLocaldb = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8))
vrIpBgpLocaldbRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1), )
if mibBuilder.loadTexts: vrIpBgpLocaldbRowStatusTable.setStatus('mandatory')
vrIpBgpLocaldbRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpLocaldbRowStatusEntry.setStatus('mandatory')
vrIpBgpLocaldbRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbRowStatus.setStatus('mandatory')
vrIpBgpLocaldbComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbComponentName.setStatus('mandatory')
vrIpBgpLocaldbStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbStorageType.setStatus('mandatory')
vrIpBgpLocaldbPrefixIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 10), IpAddress())
if mibBuilder.loadTexts: vrIpBgpLocaldbPrefixIndex.setStatus('mandatory')
vrIpBgpLocaldbLengthIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32)))
if mibBuilder.loadTexts: vrIpBgpLocaldbLengthIndex.setStatus('mandatory')
vrIpBgpLocaldbPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 1, 1, 12), IpAddress())
if mibBuilder.loadTexts: vrIpBgpLocaldbPeerIndex.setStatus('mandatory')
vrIpBgpLocaldbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10), )
if mibBuilder.loadTexts: vrIpBgpLocaldbOperTable.setStatus('mandatory')
vrIpBgpLocaldbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpLocaldbOperEntry.setStatus('mandatory')
vrIpBgpLocaldbOrigin = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("igp", 1), ("egp", 2), ("incomplete", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbOrigin.setStatus('mandatory')
vrIpBgpLocaldbNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbNextHop.setStatus('mandatory')
vrIpBgpLocaldbLocalPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbLocalPreference.setStatus('mandatory')
vrIpBgpLocaldbMultiExitDiscriminator = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbMultiExitDiscriminator.setStatus('mandatory')
vrIpBgpLocaldbAtomicAggregate = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("lessSpecificRouteNotSelected", 1), ("lessSpecificRouteSelected", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbAtomicAggregate.setStatus('mandatory')
vrIpBgpLocaldbAggregatorAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbAggregatorAs.setStatus('mandatory')
vrIpBgpLocaldbAggregatorAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbAggregatorAddr.setStatus('mandatory')
vrIpBgpLocaldbAsPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 11), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(2, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbAsPath.setStatus('mandatory')
vrIpBgpLocaldbUnknownAttributes = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 12), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbUnknownAttributes.setStatus('mandatory')
vrIpBgpLocaldbCommunityPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 13), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbCommunityPath.setStatus('mandatory')
vrIpBgpLocaldbAsOriginatorId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 10, 1, 14), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbAsOriginatorId.setStatus('mandatory')
vrIpBgpLocaldbRrClusterListTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 797), )
if mibBuilder.loadTexts: vrIpBgpLocaldbRrClusterListTable.setStatus('mandatory')
vrIpBgpLocaldbRrClusterListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 797, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbPeerIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpLocaldbRrClusterListValue"))
if mibBuilder.loadTexts: vrIpBgpLocaldbRrClusterListEntry.setStatus('mandatory')
vrIpBgpLocaldbRrClusterListValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 8, 797, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpLocaldbRrClusterListValue.setStatus('mandatory')
vrIpBgpOutdb = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9))
vrIpBgpOutdbRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1), )
if mibBuilder.loadTexts: vrIpBgpOutdbRowStatusTable.setStatus('mandatory')
vrIpBgpOutdbRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpOutdbRowStatusEntry.setStatus('mandatory')
vrIpBgpOutdbRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbRowStatus.setStatus('mandatory')
vrIpBgpOutdbComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbComponentName.setStatus('mandatory')
vrIpBgpOutdbStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbStorageType.setStatus('mandatory')
vrIpBgpOutdbPrefixIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 10), IpAddress())
if mibBuilder.loadTexts: vrIpBgpOutdbPrefixIndex.setStatus('mandatory')
vrIpBgpOutdbLengthIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32)))
if mibBuilder.loadTexts: vrIpBgpOutdbLengthIndex.setStatus('mandatory')
vrIpBgpOutdbPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 1, 1, 12), IpAddress())
if mibBuilder.loadTexts: vrIpBgpOutdbPeerIndex.setStatus('mandatory')
vrIpBgpOutdbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10), )
if mibBuilder.loadTexts: vrIpBgpOutdbOperTable.setStatus('mandatory')
vrIpBgpOutdbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPeerIndex"))
if mibBuilder.loadTexts: vrIpBgpOutdbOperEntry.setStatus('mandatory')
vrIpBgpOutdbOrigin = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("igp", 1), ("egp", 2), ("incomplete", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbOrigin.setStatus('mandatory')
vrIpBgpOutdbNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbNextHop.setStatus('mandatory')
vrIpBgpOutdbLocalPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbLocalPreference.setStatus('mandatory')
vrIpBgpOutdbMultiExitDiscriminator = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbMultiExitDiscriminator.setStatus('mandatory')
vrIpBgpOutdbAtomicAggregate = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("lessSpecificRouteNotSelected", 1), ("lessSpecificRouteSelected", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbAtomicAggregate.setStatus('mandatory')
vrIpBgpOutdbAggregatorAs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbAggregatorAs.setStatus('mandatory')
vrIpBgpOutdbAggregatorAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbAggregatorAddr.setStatus('mandatory')
vrIpBgpOutdbAsPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 11), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(2, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbAsPath.setStatus('mandatory')
vrIpBgpOutdbUnknownAttributes = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 12), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbUnknownAttributes.setStatus('mandatory')
vrIpBgpOutdbCommunityPath = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 13), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbCommunityPath.setStatus('mandatory')
vrIpBgpOutdbAsOriginatorId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 10, 1, 14), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbAsOriginatorId.setStatus('mandatory')
vrIpBgpOutdbRrClusterListTable = MibTable((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 799), )
if mibBuilder.loadTexts: vrIpBgpOutdbRrClusterListTable.setStatus('mandatory')
vrIpBgpOutdbRrClusterListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 799, 1), ).setIndexNames((0, "Nortel-Magellan-Passport-VirtualRouterMIB", "vrIndex"), (0, "Nortel-Magellan-Passport-IpMIB", "vrIpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPrefixIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbLengthIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbPeerIndex"), (0, "Nortel-Magellan-Passport-BgpMIB", "vrIpBgpOutdbRrClusterListValue"))
if mibBuilder.loadTexts: vrIpBgpOutdbRrClusterListEntry.setStatus('mandatory')
vrIpBgpOutdbRrClusterListValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 2, 4, 1, 100, 6, 21, 9, 799, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vrIpBgpOutdbRrClusterListValue.setStatus('mandatory')
bgpGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 1))
bgpGroupBE = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 1, 5))
bgpGroupBE01 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 1, 5, 2))
bgpGroupBE01A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 1, 5, 2, 2))
bgpCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 3))
bgpCapabilitiesBE = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 3, 5))
bgpCapabilitiesBE01 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 3, 5, 2))
bgpCapabilitiesBE01A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 2, 4, 2, 141, 3, 5, 2, 2))
mibBuilder.exportSymbols("Nortel-Magellan-Passport-BgpMIB", vrIpBgpAggregateNetRowStatusTable=vrIpBgpAggregateNetRowStatusTable, vrIpBgpPeerBgpIdentifier=vrIpBgpPeerBgpIdentifier, vrIpBgpProvTable=vrIpBgpProvTable, vrIpBgpPeerRowStatus=vrIpBgpPeerRowStatus, vrIpBgpOutErrorMsgs=vrIpBgpOutErrorMsgs, vrIpBgpExportNetIndex=vrIpBgpExportNetIndex, vrIpBgpOutdbRrClusterListTable=vrIpBgpOutdbRrClusterListTable, bgpCapabilitiesBE01A=bgpCapabilitiesBE01A, vrIpBgpPeerAdminState=vrIpBgpPeerAdminState, vrIpBgpAggregateNetOspfTag=vrIpBgpAggregateNetOspfTag, vrIpBgpIndbOperTable=vrIpBgpIndbOperTable, vrIpBgpAggregateNetStorageType=vrIpBgpAggregateNetStorageType, vrIpBgpAggregateNetLength=vrIpBgpAggregateNetLength, vrIpBgpPeerKeepAliveNegotiated=vrIpBgpPeerKeepAliveNegotiated, vrIpBgpImportUsageFlag=vrIpBgpImportUsageFlag, vrIpBgpImportLocalPreference=vrIpBgpImportLocalPreference, vrIpBgpExportAdvertiseStatus=vrIpBgpExportAdvertiseStatus, vrIpBgpIndbPeerIndex=vrIpBgpIndbPeerIndex, vrIpBgpOutdbRowStatusEntry=vrIpBgpOutdbRowStatusEntry, vrIpBgpImportOriginProtocol=vrIpBgpImportOriginProtocol, vrIpBgpImportNetStorageType=vrIpBgpImportNetStorageType, vrIpBgpLocaldbComponentName=vrIpBgpLocaldbComponentName, vrIpBgpLocaldbOperTable=vrIpBgpLocaldbOperTable, vrIpBgpImport=vrIpBgpImport, vrIpBgpExportRowStatusTable=vrIpBgpExportRowStatusTable, vrIpBgpOutdbNextHop=vrIpBgpOutdbNextHop, vrIpBgpPeerPeerAddressIndex=vrIpBgpPeerPeerAddressIndex, vrIpBgpIndbCommunityPath=vrIpBgpIndbCommunityPath, vrIpBgpPeerLocalAddressUsed=vrIpBgpPeerLocalAddressUsed, vrIpBgpStateTable=vrIpBgpStateTable, vrIpBgpPeerOperationalState=vrIpBgpPeerOperationalState, vrIpBgpAdminControlEntry=vrIpBgpAdminControlEntry, bgpGroupBE=bgpGroupBE, vrIpBgpSnmpAdminStatus=vrIpBgpSnmpAdminStatus, vrIpBgpOutdbRrClusterListValue=vrIpBgpOutdbRrClusterListValue, vrIpBgpAs=vrIpBgpAs, vrIpBgpImportNetRowStatusEntry=vrIpBgpImportNetRowStatusEntry, vrIpBgpLocaldbPrefixIndex=vrIpBgpLocaldbPrefixIndex, vrIpBgpLocaldbAggregatorAddr=vrIpBgpLocaldbAggregatorAddr, vrIpBgpOperStatusEntry=vrIpBgpOperStatusEntry, vrIpBgpRouteReflector=vrIpBgpRouteReflector, vrIpBgpExportNetRowStatusTable=vrIpBgpExportNetRowStatusTable, vrIpBgpLocaldbPeerIndex=vrIpBgpLocaldbPeerIndex, vrIpBgpRouteThrottleLimit=vrIpBgpRouteThrottleLimit, vrIpBgpExportOspfTag=vrIpBgpExportOspfTag, vrIpBgpAsWeight=vrIpBgpAsWeight, vrIpBgpPeerPeerAs=vrIpBgpPeerPeerAs, vrIpBgpOutdbRowStatusTable=vrIpBgpOutdbRowStatusTable, vrIpBgpAsStorageType=vrIpBgpAsStorageType, vrIpBgpExportNetStorageType=vrIpBgpExportNetStorageType, vrIpBgpAggregateNetProvEntry=vrIpBgpAggregateNetProvEntry, vrIpBgpIndbAggregatorAs=vrIpBgpIndbAggregatorAs, vrIpBgpAsRowStatus=vrIpBgpAsRowStatus, vrIpBgpIndbAtomicAggregate=vrIpBgpIndbAtomicAggregate, vrIpBgpPeerOutErrorMsgs=vrIpBgpPeerOutErrorMsgs, vrIpBgpAggregateRowStatusTable=vrIpBgpAggregateRowStatusTable, vrIpBgpLocaldbMultiExitDiscriminator=vrIpBgpLocaldbMultiExitDiscriminator, bgpGroupBE01=bgpGroupBE01, vrIpBgpPeerStateTable=vrIpBgpPeerStateTable, vrIpBgpPeerProvTable=vrIpBgpPeerProvTable, vrIpBgpImportPeerIpAddress=vrIpBgpImportPeerIpAddress, vrIpBgpIndbInLocaldb=vrIpBgpIndbInLocaldb, vrIpBgpProvEntry=vrIpBgpProvEntry, vrIpBgpLocaldbLocalPreference=vrIpBgpLocaldbLocalPreference, vrIpBgpImportComponentName=vrIpBgpImportComponentName, vrIpBgpRouteReflectorCluster=vrIpBgpRouteReflectorCluster, vrIpBgpLocaldbNextHop=vrIpBgpLocaldbNextHop, vrIpBgpOperEntry=vrIpBgpOperEntry, vrIpBgpLocaldbStorageType=vrIpBgpLocaldbStorageType, vrIpBgpLocaldbAsPath=vrIpBgpLocaldbAsPath, vrIpBgpOutDiscards=vrIpBgpOutDiscards, vrIpBgpImportAppendCommunity=vrIpBgpImportAppendCommunity, vrIpBgpIndbSize=vrIpBgpIndbSize, vrIpBgpIndbRrClusterListEntry=vrIpBgpIndbRrClusterListEntry, vrIpBgpIndbNextHop=vrIpBgpIndbNextHop, vrIpBgpImportNetRowStatus=vrIpBgpImportNetRowStatus, vrIpBgpExportExpressPreference=vrIpBgpExportExpressPreference, vrIpBgpAdminState=vrIpBgpAdminState, vrIpBgpOperationalState=vrIpBgpOperationalState, vrIpBgpExportProvEntry=vrIpBgpExportProvEntry, vrIpBgpPeerHoldTimeConfigured=vrIpBgpPeerHoldTimeConfigured, vrIpBgpOutdbStorageType=vrIpBgpOutdbStorageType, vrIpBgpPeerInRoutes=vrIpBgpPeerInRoutes, vrIpBgpPeerConnectionEstablishedTime=vrIpBgpPeerConnectionEstablishedTime, vrIpBgpImportExpressPreference=vrIpBgpImportExpressPreference, vrIpBgpInErrors=vrIpBgpInErrors, vrIpBgpLocaldbLengthIndex=vrIpBgpLocaldbLengthIndex, vrIpBgpAggregateNetProtocol=vrIpBgpAggregateNetProtocol, vrIpBgpIndbUnknownAttributes=vrIpBgpIndbUnknownAttributes, vrIpBgpOutdbRrClusterListEntry=vrIpBgpOutdbRrClusterListEntry, vrIpBgpAggregateComponentName=vrIpBgpAggregateComponentName, vrIpBgpAsIndex=vrIpBgpAsIndex, vrIpBgpExportRowStatusEntry=vrIpBgpExportRowStatusEntry, vrIpBgpAggregateNetIndex=vrIpBgpAggregateNetIndex, vrIpBgpAsRowStatusTable=vrIpBgpAsRowStatusTable, vrIpBgpOutdb=vrIpBgpOutdb, vrIpBgpExportSendMultiExitDiscToEbgp=vrIpBgpExportSendMultiExitDiscToEbgp, vrIpBgpPeerOutMsgs=vrIpBgpPeerOutMsgs, vrIpBgpLocaldbAsOriginatorId=vrIpBgpLocaldbAsOriginatorId, vrIpBgpDefaultMultiExitDisc=vrIpBgpDefaultMultiExitDisc, vrIpBgp=vrIpBgp, vrIpBgpOutdbLengthIndex=vrIpBgpOutdbLengthIndex, vrIpBgpImportStorageType=vrIpBgpImportStorageType, vrIpBgpAggregateNetAction=vrIpBgpAggregateNetAction, vrIpBgpLocaldbRowStatusTable=vrIpBgpLocaldbRowStatusTable, vrIpBgpExportNetRowStatusEntry=vrIpBgpExportNetRowStatusEntry, vrIpBgpPeerRowStatusTable=vrIpBgpPeerRowStatusTable, vrIpBgpLocaldbUnknownAttributes=vrIpBgpLocaldbUnknownAttributes, vrIpBgpLocaldb=vrIpBgpLocaldb, vrIpBgpOutdbAsOriginatorId=vrIpBgpOutdbAsOriginatorId, vrIpBgpAsProvEntry=vrIpBgpAsProvEntry, vrIpBgpExportRowStatus=vrIpBgpExportRowStatus, vrIpBgpOutdbAggregatorAs=vrIpBgpOutdbAggregatorAs, vrIpBgpLocaldbRowStatus=vrIpBgpLocaldbRowStatus, bgpMIB=bgpMIB, vrIpBgpIndbAggregatorAddr=vrIpBgpIndbAggregatorAddr, vrIpBgpLocaldbRrClusterListTable=vrIpBgpLocaldbRrClusterListTable, vrIpBgpImportOriginAs=vrIpBgpImportOriginAs, vrIpBgpExportNetProvEntry=vrIpBgpExportNetProvEntry, vrIpBgpExportStorageType=vrIpBgpExportStorageType, vrIpBgpIndbOperEntry=vrIpBgpIndbOperEntry, vrIpBgpPeerDefaultInAggMed=vrIpBgpPeerDefaultInAggMed, vrIpBgpUsageState=vrIpBgpUsageState, vrIpBgpLocaldbRrClusterListEntry=vrIpBgpLocaldbRrClusterListEntry, vrIpBgpStorageType=vrIpBgpStorageType, vrIpBgpIndbRowStatusEntry=vrIpBgpIndbRowStatusEntry, vrIpBgpPeerMinAsOrigTime=vrIpBgpPeerMinAsOrigTime, vrIpBgpPeerVersionNegotiated=vrIpBgpPeerVersionNegotiated, vrIpBgpPeerOutUpdates=vrIpBgpPeerOutUpdates, vrIpBgpPeerOperEntry=vrIpBgpPeerOperEntry, vrIpBgpRowStatus=vrIpBgpRowStatus, vrIpBgpImportRowStatusTable=vrIpBgpImportRowStatusTable, vrIpBgpPeerUsageState=vrIpBgpPeerUsageState, vrIpBgpIndbComponentName=vrIpBgpIndbComponentName, vrIpBgpIndbAsPath=vrIpBgpIndbAsPath, vrIpBgpOutdbRowStatus=vrIpBgpOutdbRowStatus, vrIpBgpAggregateNetProvTable=vrIpBgpAggregateNetProvTable, bgpCapabilitiesBE01=bgpCapabilitiesBE01, vrIpBgpRouteThrottleInter=vrIpBgpRouteThrottleInter, vrIpBgpPeerIsRouteReflectorClient=vrIpBgpPeerIsRouteReflectorClient, vrIpBgpImportPeerAs=vrIpBgpImportPeerAs, vrIpBgpAggregate=vrIpBgpAggregate, vrIpBgpPeerStateEntry=vrIpBgpPeerStateEntry, vrIpBgpOutdbLocalPreference=vrIpBgpOutdbLocalPreference, vrIpBgpLocalAs=vrIpBgpLocalAs, vrIpBgpOutdbOperTable=vrIpBgpOutdbOperTable, vrIpBgpBgpIdentifier=vrIpBgpBgpIdentifier, vrIpBgpPeerRemotePort=vrIpBgpPeerRemotePort, vrIpBgpAggregateRowStatusEntry=vrIpBgpAggregateRowStatusEntry, vrIpBgpAggregateNetRowStatusEntry=vrIpBgpAggregateNetRowStatusEntry, vrIpBgpOutdbPeerIndex=vrIpBgpOutdbPeerIndex, vrIpBgpPeer=vrIpBgpPeer, vrIpBgpPeerOutDiscards=vrIpBgpPeerOutDiscards, vrIpBgpExportCommunityExpression=vrIpBgpExportCommunityExpression, vrIpBgpIndbRrClusterListValue=vrIpBgpIndbRrClusterListValue, vrIpBgpLocaldbAggregatorAs=vrIpBgpLocaldbAggregatorAs, vrIpBgpOutMsgs=vrIpBgpOutMsgs, vrIpBgpAggregateNetRowStatus=vrIpBgpAggregateNetRowStatus, vrIpBgpPeerInUpdates=vrIpBgpPeerInUpdates, vrIpBgpOperStatusTable=vrIpBgpOperStatusTable, vrIpBgpIndbCalcLocalPref=vrIpBgpIndbCalcLocalPref, vrIpBgpAsProvTable=vrIpBgpAsProvTable, vrIpBgpOutdbComponentName=vrIpBgpOutdbComponentName, vrIpBgpOutdbMultiExitDiscriminator=vrIpBgpOutdbMultiExitDiscriminator, vrIpBgpTableVersion=vrIpBgpTableVersion, vrIpBgpImportNetRowStatusTable=vrIpBgpImportNetRowStatusTable, vrIpBgpExportNet=vrIpBgpExportNet, bgpGroup=bgpGroup, vrIpBgpAggregateNetBgpAsId=vrIpBgpAggregateNetBgpAsId, vrIpBgpAsComponentName=vrIpBgpAsComponentName, vrIpBgpPeerComponentName=vrIpBgpPeerComponentName, vrIpBgpAggregatePrefixIndex=vrIpBgpAggregatePrefixIndex, vrIpBgpImportAsPathExpression=vrIpBgpImportAsPathExpression, vrIpBgpLocaldbOrigin=vrIpBgpLocaldbOrigin, vrIpBgpPeerInUpdateElapsedTime=vrIpBgpPeerInUpdateElapsedTime, vrIpBgpOutdbAsPath=vrIpBgpOutdbAsPath, vrIpBgpExportPeerAs=vrIpBgpExportPeerAs, vrIpBgpPeerInErrors=vrIpBgpPeerInErrors, vrIpBgpExportRipNeighbor=vrIpBgpExportRipNeighbor, vrIpBgpIndbRowStatusTable=vrIpBgpIndbRowStatusTable, vrIpBgpIndbOrigin=vrIpBgpIndbOrigin, vrIpBgpPeerLocalPort=vrIpBgpPeerLocalPort, vrIpBgpIndex=vrIpBgpIndex, vrIpBgpImportRowStatus=vrIpBgpImportRowStatus, vrIpBgpPeerOperTable=vrIpBgpPeerOperTable, vrIpBgpAggregateStorageType=vrIpBgpAggregateStorageType, vrIpBgpImportNet=vrIpBgpImportNet, vrIpBgpOutdbAtomicAggregate=vrIpBgpOutdbAtomicAggregate, vrIpBgpOutdbAggregatorAddr=vrIpBgpOutdbAggregatorAddr, vrIpBgpIndbStorageType=vrIpBgpIndbStorageType, vrIpBgpExportProvTable=vrIpBgpExportProvTable, vrIpBgpLocaldbRowStatusEntry=vrIpBgpLocaldbRowStatusEntry, vrIpBgpImportNetComponentName=vrIpBgpImportNetComponentName, vrIpBgpExportNetPrefix=vrIpBgpExportNetPrefix, vrIpBgpAdminControlTable=vrIpBgpAdminControlTable, vrIpBgpExport=vrIpBgpExport, vrIpBgpExportIndex=vrIpBgpExportIndex, vrIpBgpDefaultLocalPreference=vrIpBgpDefaultLocalPreference, vrIpBgpAggregateNetEgpAsId=vrIpBgpAggregateNetEgpAsId, vrIpBgpIndbLocalPreference=vrIpBgpIndbLocalPreference, vrIpBgpImportNetPrefix=vrIpBgpImportNetPrefix, vrIpBgpAggregateNetPrefix=vrIpBgpAggregateNetPrefix, vrIpBgpPeerStorageType=vrIpBgpPeerStorageType, vrIpBgpImportIndex=vrIpBgpImportIndex, vrIpBgpImportNetProvTable=vrIpBgpImportNetProvTable, vrIpBgpLocaldbOperEntry=vrIpBgpLocaldbOperEntry, vrIpBgpImportNetIndex=vrIpBgpImportNetIndex, vrIpBgpInMsgs=vrIpBgpInMsgs, vrIpBgpExportBgpAsId=vrIpBgpExportBgpAsId, vrIpBgpImportProvEntry=vrIpBgpImportProvEntry, vrIpBgpOutdbOperEntry=vrIpBgpOutdbOperEntry, vrIpBgpExportProtocol=vrIpBgpExportProtocol, vrIpBgpExportAsPathExpression=vrIpBgpExportAsPathExpression, vrIpBgpExportSendCommunity=vrIpBgpExportSendCommunity, vrIpBgpOutdbUnknownAttributes=vrIpBgpOutdbUnknownAttributes, vrIpBgpComponentName=vrIpBgpComponentName, vrIpBgpLocaldbCommunityPath=vrIpBgpLocaldbCommunityPath, bgpCapabilitiesBE=bgpCapabilitiesBE, vrIpBgpIndb=vrIpBgpIndb, vrIpBgpLocaldbRrClusterListValue=vrIpBgpLocaldbRrClusterListValue, vrIpBgpExportNetComponentName=vrIpBgpExportNetComponentName, vrIpBgpIndbMultiExitDiscriminator=vrIpBgpIndbMultiExitDiscriminator, vrIpBgpPeerHoldTimeNegotiated=vrIpBgpPeerHoldTimeNegotiated, vrIpBgpRowStatusTable=vrIpBgpRowStatusTable, vrIpBgpIndbAsOriginatorId=vrIpBgpIndbAsOriginatorId, vrIpBgpPeerConnectionState=vrIpBgpPeerConnectionState, vrIpBgpPeerProvEntry=vrIpBgpPeerProvEntry, vrIpBgpPeerLocalAddressConfigured=vrIpBgpPeerLocalAddressConfigured, vrIpBgpExportComponentName=vrIpBgpExportComponentName, vrIpBgpExportMultiExitDisc=vrIpBgpExportMultiExitDisc, vrIpBgpExportNetRowStatus=vrIpBgpExportNetRowStatus, vrIpBgpPeerInMsgs=vrIpBgpPeerInMsgs, vrIpBgpAggregateNetRipInterface=vrIpBgpAggregateNetRipInterface, vrIpBgpAggregateNet=vrIpBgpAggregateNet, vrIpBgpPeerLastError=vrIpBgpPeerLastError, vrIpBgpPeerInErrorMsgs=vrIpBgpPeerInErrorMsgs, vrIpBgpIndbLengthIndex=vrIpBgpIndbLengthIndex, vrIpBgpIndbRrClusterListTable=vrIpBgpIndbRrClusterListTable, vrIpBgpOutdbPrefixIndex=vrIpBgpOutdbPrefixIndex, vrIpBgpLocaldbAtomicAggregate=vrIpBgpLocaldbAtomicAggregate, vrIpBgpOutdbOrigin=vrIpBgpOutdbOrigin, vrIpBgpExportNetLength=vrIpBgpExportNetLength, vrIpBgpPeerKeepAliveConfigured=vrIpBgpPeerKeepAliveConfigured, vrIpBgpPeerConnectionEstablishedTransitions=vrIpBgpPeerConnectionEstablishedTransitions, vrIpBgpImportProvTable=vrIpBgpImportProvTable, vrIpBgpExportEgpAsId=vrIpBgpExportEgpAsId, vrIpBgpImportPreferredOver=vrIpBgpImportPreferredOver, vrIpBgpExportInsertDummyAs=vrIpBgpExportInsertDummyAs, vrIpBgpPeerConnectRetryTime=vrIpBgpPeerConnectRetryTime, vrIpBgpImportNetLength=vrIpBgpImportNetLength, vrIpBgpAggregateLengthIndex=vrIpBgpAggregateLengthIndex, vrIpBgpImportCommunityExpression=vrIpBgpImportCommunityExpression, vrIpBgpOutdbCommunityPath=vrIpBgpOutdbCommunityPath, vrIpBgpPeerMinRouteAdvTime=vrIpBgpPeerMinRouteAdvTime)
mibBuilder.exportSymbols("Nortel-Magellan-Passport-BgpMIB", vrIpBgpAggregateRowStatus=vrIpBgpAggregateRowStatus, vrIpBgpIndbRowStatus=vrIpBgpIndbRowStatus, vrIpBgpImportRowStatusEntry=vrIpBgpImportRowStatusEntry, vrIpBgpRowStatusEntry=vrIpBgpRowStatusEntry, vrIpBgpImportNetProvEntry=vrIpBgpImportNetProvEntry, vrIpBgpStateEntry=vrIpBgpStateEntry, vrIpBgpOperTable=vrIpBgpOperTable, vrIpBgpInErrorMsgs=vrIpBgpInErrorMsgs, vrIpBgpExportPeerIpAddress=vrIpBgpExportPeerIpAddress, vrIpBgpPeerRowStatusEntry=vrIpBgpPeerRowStatusEntry, vrIpBgpExportNetProvTable=vrIpBgpExportNetProvTable, vrIpBgpAggregateNetComponentName=vrIpBgpAggregateNetComponentName, vrIpBgpIndbPrefixIndex=vrIpBgpIndbPrefixIndex, vrIpBgpSnmpOperStatus=vrIpBgpSnmpOperStatus, vrIpBgpExportRipInterface=vrIpBgpExportRipInterface, vrIpBgpAsRowStatusEntry=vrIpBgpAsRowStatusEntry, bgpCapabilities=bgpCapabilities, bgpGroupBE01A=bgpGroupBE01A)
|
import pathlib
import setuptools
def read(*args: str) -> str:
file_path = pathlib.Path(__file__).parent.joinpath(*args)
return file_path.read_text("utf-8")
setuptools.setup(
name="labels",
version="0.3.0.dev0",
author="Raphael Pierzina",
author_email="raphael@hackebrot.de",
maintainer="Raphael Pierzina",
maintainer_email="raphael@hackebrot.de",
license="MIT",
url="https://github.com/hackebrot/labels",
project_urls={
"Repository": "https://github.com/hackebrot/labels",
"Issues": "https://github.com/hackebrot/labels/issues",
},
description="CLI app for managing GitHub labels for Python 3.6 and newer. 📝",
long_description=read("README.md"),
long_description_content_type="text/markdown",
packages=setuptools.find_packages("src"),
package_dir={"": "src"},
include_package_data=True,
zip_safe=False,
python_requires=">=3.6",
install_requires=["click", "requests", "pytoml", "attrs"],
entry_points={"console_scripts": ["labels = labels.cli:labels"]},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Utilities",
],
keywords=["github", "command-line"],
)
|
import re
from django.contrib.auth import (
authenticate,
login as auth_login,
logout as auth_logout,
)
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.db import transaction
from django.http import (
Http404,
JsonResponse,
)
from django.shortcuts import (
get_object_or_404,
redirect,
render,
)
from goodchat.decorators import public
from profiles.forms import (
AddUserForm,
EditUserForm,
LoginForm,
ProfileForm,
ProfileSearchForm,
UserSearchForm,
)
from profiles.models import (
Blocked,
Language,
LookingFor,
PersonalInterest,
ProfessionalInterest,
Profile,
Starred,
)
from profiles.utils import get_search_results
@login_required
def blocked_list(request):
blocks = Blocked.objects.filter(blocking_profile=request.user.profile)
blocked_profiles = [block.blocked_user.profile for block in blocks]
return render(request, 'blocked_list.html', {
'blocked_profiles': blocked_profiles,
})
@login_required
def profile_block(request, pk):
profile = Profile.objects.get(user=request.user)
blocked_user = User.objects.get(pk=pk)
request_user_blocks = Profile.objects.get(user=request.user).blocked.all()
request_user_stars = Profile.objects.get(user=request.user).starred.all()
if blocked_user in request_user_blocks:
profile.blocked.remove(blocked_user)
else:
with transaction.atomic():
profile.blocked.add(blocked_user)
if blocked_user in request_user_stars:
profile.starred.remove(blocked_user)
return redirect('blocked-list')
@login_required
def starred_list(request):
stars = Starred.objects.filter(starring_profile=request.user.profile)
starred_profiles = [star.starred_user.profile for star in stars]
if starred_profiles:
paginator = Paginator(starred_profiles, 20)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
else:
page_obj = None
return render(request, 'starred_list.html', {
'page_obj': page_obj,
})
@login_required
def profile_star(request, pk):
profile = Profile.objects.get(user=request.user)
starred_user = User.objects.get(pk=pk)
request_user_stars = Profile.objects.get(user=request.user).starred.all()
if starred_user in request_user_stars:
profile.starred.remove(starred_user)
action = 'unstarred'
else:
profile.starred.add(starred_user)
action = 'starred'
response = {'user': starred_user.pk, 'action': action}
return JsonResponse(response)
@login_required
def username_search(request):
starred_users = Profile.objects.get(user=request.user).starred.all()
blocking_users = request.user.blocking_profile.all()
blocked_users = Profile.objects.get(user=request.user).blocked.all()
blocked_profiles = [u.profile for u in blocked_users]
if request.method == 'POST':
form = UserSearchForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
search_results = Profile.objects.\
filter(user__username__icontains=username).\
exclude(user__in=blocked_profiles).\
exclude(user__in=blocking_users).\
exclude(user=request.user).\
exclude(user__pk=1)
# Save the search expression to the session for reuse.
request.session['user_search_username'] = username
else:
username = request.session.get('user_search_username')
if username is not None:
# TODO: De-duplicate this query and one one above.
search_results = Profile.objects.\
filter(user__username__icontains=username).\
exclude(user__in=blocked_profiles).\
exclude(user__in=blocking_users).\
exclude(user=request.user).\
exclude(user__pk=1)
initial = {"username": username}
else:
search_results = None
initial = None
form = UserSearchForm(initial=initial)
if search_results is not None:
count = search_results.count()
paginator = Paginator(search_results, 20)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
search_data = {'username': username}
else:
count = None
page_obj = None
search_data = None
return render(request, 'username_search.html', {
'count': count,
'form': form,
'page_obj': page_obj,
'search_data': search_data,
'starred_users': starred_users,
})
@login_required
def search(request):
user = request.user
starred_users = Profile.objects.get(user=user).starred.all()
if request.method == 'POST':
profile_form = ProfileSearchForm(request.POST)
if profile_form.is_valid():
search_data = profile_form.cleaned_data
search_results = get_search_results(user, search_data)
request.session['search_data'] = search_data
else:
search_data = request.session.get('search_data')
if search_data is not None:
search_results = get_search_results(user, search_data)
else:
search_results = None
profile_form = ProfileSearchForm(initial=search_data)
if search_results is not None:
count = search_results.count()
paginator = Paginator(search_results, 20)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
else:
count = None
page_obj = None
search_data = None
natural_keyed_search_data = {}
if search_results is not None:
# Replace foreign keys in search data with the strings they represent.
natural_keyed_search_data['languages'] = list(
Language.objects.
filter(pk__in=search_data['languages']).
values_list('language', flat=True)
)
natural_keyed_search_data['looking_for'] = list(
LookingFor.objects.
filter(pk__in=search_data['looking_for']).
values_list('looking_for', flat=True)
)
natural_keyed_search_data['prof_interests'] = list(
ProfessionalInterest.objects.
filter(pk__in=search_data['prof_interests']).
values_list('prof_interest', flat=True)
)
natural_keyed_search_data['personal_interests'] = list(
PersonalInterest.objects.
filter(pk__in=search_data['personal_interests']).
values_list('personal_interest', flat=True)
)
words_regex = re.compile(r'\w+')
natural_keyed_search_data['stack'] = \
words_regex.findall(search_data['stack'])
natural_keyed_search_data['detail'] = \
words_regex.findall(search_data['detail'])
return render(request, 'search.html', {
'count': count,
'page_obj': page_obj,
'profile_form': profile_form,
'search_data': natural_keyed_search_data,
'starred_users': starred_users,
})
@login_required
def clear_search(request):
request.session.pop('search_data', None)
return JsonResponse({'result': True})
@login_required
def clear_username_search(request):
request.session.pop('user_search_username', None)
return JsonResponse({'result': True})
@public
def login(request):
message = None
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
username_or_email = form.cleaned_data['username']
password = form.cleaned_data['password']
# Assume the user provided a username.
user_matching_username = User.objects.\
filter(username__iexact=username_or_email).\
first()
if user_matching_username:
# Get the username from the user so it has the correct casing.
username = user_matching_username.username
user = authenticate(username=username, password=password)
else:
user = None
# If the user didn't provide a username, assume they provided an
# email address.
if not user:
user_matching_email = User.objects.\
filter(email__iexact=username_or_email).\
first()
if user_matching_email:
username = user_matching_email.username
user = authenticate(username=username, password=password)
# If there is an authenticated user, log them in.
if user:
auth_login(request, user)
return redirect('conversations')
else:
message = 'Those credentials are incorrect.'
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
'message': message,
})
@login_required
def logout(request):
auth_logout(request)
return redirect('login')
@public
def signup(request):
if request.method == 'POST':
form = AddUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
email = form.cleaned_data['email']
with transaction.atomic():
user = User.objects.create_user(
username,
email,
password=password,
)
Profile.objects.create(user=user)
auth_login(request, user)
return redirect('conversations')
else:
form = AddUserForm()
return render(request, 'signup.html', {
'form': form,
})
@login_required
def profile_edit(request):
user = request.user
if request.method == 'POST':
user_form = EditUserForm(
request.POST,
instance=user
)
profile_form = ProfileForm(
request.POST,
instance=Profile.objects.get(user=user)
)
if profile_form.is_valid() and user_form.is_valid():
with transaction.atomic():
profile_form.save()
user_form.save()
return redirect('profile-detail', user.username)
else:
profile_form = ProfileForm(instance=Profile.objects.get(user=user))
user_form = EditUserForm(instance=user)
return render(request, 'profile_edit.html', {
'profile_form': profile_form,
'user': user,
'user_form': user_form,
})
@login_required
def profile_detail(request, username):
user_to_view = get_object_or_404(User, username__iexact=username)
profile_to_view = Profile.objects.get(user=user_to_view)
request_user_stars = Profile.objects.get(user=request.user).starred.all()
request_user_blocked = Profile.objects.get(user=request.user).blocked.all()
if request.user in Profile.objects.get(user=user_to_view).blocked.all():
raise Http404()
profile_is_empty = not any([
profile_to_view.languages.all().exists(),
profile_to_view.looking_for.all().exists(),
profile_to_view.personal_interests.all().exists(),
profile_to_view.prof_interests.all().exists(),
profile_to_view.detail,
profile_to_view.stack,
profile_to_view.twitter,
profile_to_view.github,
])
return render(request, 'profile_detail.html', {
'profile_is_empty': profile_is_empty,
'request_user_blocked': request_user_blocked,
'request_user_stars': request_user_stars,
'user_to_view': user_to_view,
'profile_to_view': profile_to_view,
})
|
from datetime import datetime
from django import forms
from django.forms import fields, widgets
from .models import Cliente, Pedido, DESCR_COBERTURA, DESCR_RECHEIO, DESCR_MASSA, DESCR_TAMANHO, TipoMassa
from django.utils.translation import ugettext_lazy as _
class PedidoForm(forms.ModelForm):
# class Fault(models.Model):
# date_time = models.DateTimeField(default=get_now)
# data_pedido_1 = forms.DateTimeField(input_formats=["%d/%m/%Y %H:%M"],
# widget=forms.DateTimeInput(attrs={'class':'form-control datetimepicker-input',
# 'data-target': '#datetimepicker1'}))
data_pedido = forms.DateField(widget=forms.DateInput(attrs={'type': 'date'}))
data_entrega = forms.DateField(widget=forms.DateInput(attrs={'type': 'date'}))
class Meta:
model = Pedido
fields = (
'tamanho',
'massa',
'recheio',
'cobertura',
'data_pedido',
'data_entrega',
'valor_total',
)
class LoginForm(forms.ModelForm):
class Meta:
model = Cliente
fields = ('email_cli', 'senha_cli')
widgets = {
'senha_cli': widgets.PasswordInput(render_value=True),
'email_cli': widgets.EmailInput(),
}
def clean_email(self):
email = self.cleaned_data['email_cli']
return email[0]
def clean_senha(self):
senha = self.cleaned_data['senha_cli']
return senha
class ClienteForm(forms.ModelForm):
class Meta:
model = Cliente
fields = ('nome_cli', 'end_cli', 'fone_cli', 'email_cli', 'senha_cli', 'cid_cli')
widgets = {
'senha_cli': widgets.PasswordInput(render_value=True),
'email_cli': widgets.EmailInput(),
}
labels = {
'nome_cli': _('Nome'),
'end_cli': _('Endereço'),
'fone_cli': _('Telefone'),
'email_cli': _('Email'),
'senha_cli': _('Senha'),
'cid_cli': _('Cidade'),
} |
import glob
import logging
import os
import subprocess
from plugins import BaseAssessment
from yapsy.IPlugin import IPlugin
import asmtypes
class QuastAssessment(BaseAssessment, IPlugin):
new_version = True
def run(self):
contigsets = self.data.contigsets
for i,c, in enumerate(contigsets):
c['tags'].append('quast-{}'.format(i+1))
contigfiles = self.data.contigfiles
if len(contigsets) == 0: #Check for scaffolds
contigsets = self.data.scaffoldsets
contigfiles = self.data.scaffoldfiles
scaffolds = True
assert len(contigsets) != 0
else: scaffolds = False
ref = self.initial_data.referencefiles or None
cmd_args = [os.path.join(os.getcwd(),self.executable),
'--threads', self.process_threads_allowed,
'--min-contig', self.min_contig,
'-o', self.outpath,
'--gene-finding']
if scaffolds: cmd_args.append('--scaffolds')
#### Add Reference ####
if ref:
rfile = ref[0]
cmd_args += ['-R', rfile, '--gage']
#### Add Contig files ####
cmd_args += contigfiles
cmd_args += ['-l', '"{}"'.format(', '.join([cset.name for cset in contigsets]))]
#### Run Quast ####
self.arast_popen(cmd_args)
output = {}
report = os.path.join(self.outpath, 'report.txt')
if not os.path.exists(report):
print 'No Quast Output'
report = None
else: output['report'] = report
return output
|
'''OpenGL extension EXT.vertex_attrib_64bit
This module customises the behaviour of the
OpenGL.raw.GL.EXT.vertex_attrib_64bit to provide a more
Python-friendly API
Overview (from the spec)
This extension provides OpenGL shading language support for vertex shader
inputs with 64-bit floating-point components and OpenGL API support for
specifying the value of those inputs using vertex array or immediate mode
entry points. This builds on the support for general-purpose support for
64-bit floating-point values in the ARB_gpu_shader_fp64 extension.
This extension provides a new class of vertex attribute functions,
beginning with "VertexAttribL" ("L" for "long"), that can be used to
specify attributes with 64-bit floating-point components. This extension
provides no automatic type conversion between attribute and shader
variables; single-precision attributes are not automatically converted to
double-precision or vice versa. For shader variables with 64-bit
component types, the "VertexAttribL" functions must be used to specify
attribute values. For other shader variables, the "VertexAttribL"
functions must not be used. If a vertex attribute is specified using the
wrong attribute function, the values of the corresponding shader input are
undefined. This approach requiring matching types is identical to that
used for the "VertexAttribI" functions provided by OpenGL 3.0 and the
EXT_gpu_shader4 extension.
Additionally, some vertex shader inputs using the wider 64-bit components
may count double against the implementation-dependent limit on the number
of vertex shader attribute vectors. A 64-bit scalar or a two-component
vector consumes only a single generic vertex attribute; three- and
four-component "long" may count as two. This approach is similar to the
one used in the current GL where matrix attributes consume multiple
attributes.
Note that 64-bit generic vertex attributes were nominally supported
beginning with the introduction of vertex shaders in OpenGL 2.0. However,
the OpenGL Shading Language at the time had no support for 64-bit data
types, so any such values were automatically converted to 32-bit.
Support for 64-bit floating-point vertex attributes in this extension can
be combined with other extensions. In particular, this extension provides
an entry point that can be used with EXT_direct_state_access to directly
set state for any vertex array object. Also, the related
NV_vertex_attrib_integer_64bit extension provides an entry point to
specify bindless vertex attribute arrays with 64-bit components, integer
or floating-point.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/vertex_attrib_64bit.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.EXT.vertex_attrib_64bit import *
from OpenGL.raw.GL.EXT.vertex_attrib_64bit import _EXTENSION_NAME
def glInitVertexAttrib64BitEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
glVertexAttribL1dvEXT=wrapper.wrapper(glVertexAttribL1dvEXT).setInputArraySize(
'v', 1
)
glVertexAttribL2dvEXT=wrapper.wrapper(glVertexAttribL2dvEXT).setInputArraySize(
'v', 2
)
glVertexAttribL3dvEXT=wrapper.wrapper(glVertexAttribL3dvEXT).setInputArraySize(
'v', 3
)
glVertexAttribL4dvEXT=wrapper.wrapper(glVertexAttribL4dvEXT).setInputArraySize(
'v', 4
)
# INPUT glVertexAttribLPointerEXT.pointer size not checked against size
glVertexAttribLPointerEXT=wrapper.wrapper(glVertexAttribLPointerEXT).setInputArraySize(
'pointer', None
)
glGetVertexAttribLdvEXT=wrapper.wrapper(glGetVertexAttribLdvEXT).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
### END AUTOGENERATED SECTION |
#!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# biolatency Summarize block device I/O latency as a histogram.
# For Linux, uses BCC, eBPF.
#
# USAGE: biolatency [-h] [-T] [-Q] [-m] [-D] [interval] [count]
#
# Copyright (c) 2015 Brendan Gregg.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 20-Sep-2015 Brendan Gregg Created this.
from __future__ import print_function
from bcc import BPF
from time import sleep, strftime
import argparse
import ctypes as ct
# arguments
examples = """examples:
./biolatency # summarize block I/O latency as a histogram
./biolatency 1 10 # print 1 second summaries, 10 times
./biolatency -mT 1 # 1s summaries, milliseconds, and timestamps
./biolatency -Q # include OS queued time in I/O time
./biolatency -D # show each disk device separately
./biolatency -F # show I/O flags separately
./biolatency -j # print a dictionary
"""
parser = argparse.ArgumentParser(
description="Summarize block device I/O latency as a histogram",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-T", "--timestamp", action="store_true",
help="include timestamp on output")
parser.add_argument("-Q", "--queued", action="store_true",
help="include OS queued time in I/O time")
parser.add_argument("-m", "--milliseconds", action="store_true",
help="millisecond histogram")
parser.add_argument("-D", "--disks", action="store_true",
help="print a histogram per disk device")
parser.add_argument("-F", "--flags", action="store_true",
help="print a histogram per set of I/O flags")
parser.add_argument("interval", nargs="?", default=99999999,
help="output interval, in seconds")
parser.add_argument("count", nargs="?", default=99999999,
help="number of outputs")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
parser.add_argument("-j", "--json", action="store_true",
help="json output")
args = parser.parse_args()
countdown = int(args.count)
debug = 0
if args.flags and args.disks:
print("ERROR: can only use -D or -F. Exiting.")
exit()
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/blkdev.h>
typedef struct disk_key {
char disk[DISK_NAME_LEN];
u64 slot;
} disk_key_t;
typedef struct flag_key {
u64 flags;
u64 slot;
} flag_key_t;
BPF_HASH(start, struct request *);
STORAGE
// time block I/O
int trace_req_start(struct pt_regs *ctx, struct request *req)
{
u64 ts = bpf_ktime_get_ns();
start.update(&req, &ts);
return 0;
}
// output
int trace_req_done(struct pt_regs *ctx, struct request *req)
{
u64 *tsp, delta;
// fetch timestamp and calculate delta
tsp = start.lookup(&req);
if (tsp == 0) {
return 0; // missed issue
}
delta = bpf_ktime_get_ns() - *tsp;
FACTOR
// store as histogram
STORE
start.delete(&req);
return 0;
}
"""
# code substitutions
if args.milliseconds:
bpf_text = bpf_text.replace('FACTOR', 'delta /= 1000000;')
label = "msecs"
else:
bpf_text = bpf_text.replace('FACTOR', 'delta /= 1000;')
label = "usecs"
if args.disks:
bpf_text = bpf_text.replace('STORAGE',
'BPF_HISTOGRAM(dist, disk_key_t);')
bpf_text = bpf_text.replace('STORE',
'disk_key_t key = {.slot = bpf_log2l(delta)}; ' +
'void *__tmp = (void *)req->rq_disk->disk_name; ' +
'bpf_probe_read_kernel(&key.disk, sizeof(key.disk), __tmp); ' +
'dist.increment(key);')
elif args.flags:
bpf_text = bpf_text.replace('STORAGE',
'BPF_HISTOGRAM(dist, flag_key_t);')
bpf_text = bpf_text.replace('STORE',
'flag_key_t key = {.slot = bpf_log2l(delta)}; ' +
'key.flags = req->cmd_flags; ' +
'dist.increment(key);')
else:
bpf_text = bpf_text.replace('STORAGE', 'BPF_HISTOGRAM(dist);')
bpf_text = bpf_text.replace('STORE',
'dist.increment(bpf_log2l(delta));')
if debug or args.ebpf:
print(bpf_text)
if args.ebpf:
exit()
# load BPF program
b = BPF(text=bpf_text)
if args.queued:
b.attach_kprobe(event="blk_account_io_start", fn_name="trace_req_start")
else:
if BPF.get_kprobe_functions(b'blk_start_request'):
b.attach_kprobe(event="blk_start_request", fn_name="trace_req_start")
b.attach_kprobe(event="blk_mq_start_request", fn_name="trace_req_start")
b.attach_kprobe(event="blk_account_io_done",
fn_name="trace_req_done")
if not args.json:
print("Tracing block device I/O... Hit Ctrl-C to end.")
# see blk_fill_rwbs():
req_opf = {
0: "Read",
1: "Write",
2: "Flush",
3: "Discard",
5: "SecureErase",
6: "ZoneReset",
7: "WriteSame",
9: "WriteZeros"
}
REQ_OP_BITS = 8
REQ_OP_MASK = ((1 << REQ_OP_BITS) - 1)
REQ_SYNC = 1 << (REQ_OP_BITS + 3)
REQ_META = 1 << (REQ_OP_BITS + 4)
REQ_PRIO = 1 << (REQ_OP_BITS + 5)
REQ_NOMERGE = 1 << (REQ_OP_BITS + 6)
REQ_IDLE = 1 << (REQ_OP_BITS + 7)
REQ_FUA = 1 << (REQ_OP_BITS + 9)
REQ_RAHEAD = 1 << (REQ_OP_BITS + 11)
REQ_BACKGROUND = 1 << (REQ_OP_BITS + 12)
REQ_NOWAIT = 1 << (REQ_OP_BITS + 13)
def flags_print(flags):
desc = ""
# operation
if flags & REQ_OP_MASK in req_opf:
desc = req_opf[flags & REQ_OP_MASK]
else:
desc = "Unknown"
# flags
if flags & REQ_SYNC:
desc = "Sync-" + desc
if flags & REQ_META:
desc = "Metadata-" + desc
if flags & REQ_FUA:
desc = "ForcedUnitAccess-" + desc
if flags & REQ_PRIO:
desc = "Priority-" + desc
if flags & REQ_NOMERGE:
desc = "NoMerge-" + desc
if flags & REQ_IDLE:
desc = "Idle-" + desc
if flags & REQ_RAHEAD:
desc = "ReadAhead-" + desc
if flags & REQ_BACKGROUND:
desc = "Background-" + desc
if flags & REQ_NOWAIT:
desc = "NoWait-" + desc
return desc
def _print_json_hist(vals, val_type, section_bucket=None):
hist_list = []
max_nonzero_idx = 0
for i in range(len(vals)):
if vals[i] != 0:
max_nonzero_idx = i
index = 1
prev = 0
for i in range(len(vals)):
if i != 0 and i <= max_nonzero_idx:
index = index * 2
list_obj = {}
list_obj['interval-start'] = prev
list_obj['interval-end'] = int(index) - 1
list_obj['count'] = int(vals[i])
hist_list.append(list_obj)
prev = index
histogram = {"ts": strftime("%Y-%m-%d %H:%M:%S"), "val_type": val_type, "data": hist_list}
if section_bucket:
histogram[section_bucket[0]] = section_bucket[1]
print(histogram)
def print_json_hist(self, val_type="value", section_header="Bucket ptr",
section_print_fn=None, bucket_fn=None, bucket_sort_fn=None):
log2_index_max = 65 # this is a temporary workaround. Variable available in table.py
if isinstance(self.Key(), ct.Structure):
tmp = {}
f1 = self.Key._fields_[0][0]
f2 = self.Key._fields_[1][0]
if f2 == '__pad_1' and len(self.Key._fields_) == 3:
f2 = self.Key._fields_[2][0]
for k, v in self.items():
bucket = getattr(k, f1)
if bucket_fn:
bucket = bucket_fn(bucket)
vals = tmp[bucket] = tmp.get(bucket, [0] * log2_index_max)
slot = getattr(k, f2)
vals[slot] = v.value
buckets = list(tmp.keys())
if bucket_sort_fn:
buckets = bucket_sort_fn(buckets)
for bucket in buckets:
vals = tmp[bucket]
if section_print_fn:
section_bucket = (section_header, section_print_fn(bucket))
else:
section_bucket = (section_header, bucket)
_print_json_hist(vals, val_type, section_bucket)
else:
vals = [0] * log2_index_max
for k, v in self.items():
vals[k.value] = v.value
_print_json_hist(vals, val_type)
# output
exiting = 0 if args.interval else 1
dist = b.get_table("dist")
while (1):
try:
sleep(int(args.interval))
except KeyboardInterrupt:
exiting = 1
print()
if args.json:
if args.timestamp:
print("%-8s\n" % strftime("%H:%M:%S"), end="")
if args.flags:
print_json_hist(dist, label, "flags", flags_print)
else:
print_json_hist(dist, label)
else:
if args.timestamp:
print("%-8s\n" % strftime("%H:%M:%S"), end="")
if args.flags:
dist.print_log2_hist(label, "flags", flags_print)
else:
dist.print_log2_hist(label, "disk")
dist.clear()
countdown -= 1
if exiting or countdown == 0:
exit()
|
#!/usr/bin/python
import getopt
import sys
import pandas as pd
import numpy as np
import os
import subprocess
from osgeo import gdal
import geopandas as gpd
from shapely.geometry import Point
import rasterio
import multiprocessing as mp
'''
Converts a large HDF or CSV file of pixel values to raster
Arguments:
in_csv (String):
column (String):
ref_file (String):
out_file (String):
x (String):
y (String)
'''
in_csv = ''
column = ''
ref_file = ''
out_file = ''
x = ''
y = ''
chunksize = 1000000
min_0 = False
print 'ARGV :', sys.argv[1:]
options, remainder = getopt.getopt(sys.argv[1:], '', ['in_csv=',
'column=',
'ref_file=',
'out_file=',
'x_name=',
'y_name=',
'chunksize=',
'min_0'
])
for opt, arg in options:
if opt in ('--in_csv'):
in_csv = arg
elif opt in ('--column'):
column = arg
elif opt in ('--ref_file'):
ref_file = arg
elif opt in ('--out_file'):
out_file = arg
elif opt in ('--x_name'):
x = arg
elif opt in ('--y_name'):
y = arg
elif opt in ('--chunksize'):
chunksize = arg
elif opt in ('--min_0'):
min_0 = True
xy = [x,y]
src = rasterio.open(ref_file)
data = np.zeros((src.height,src.width))
data[:,:] = np.nan
result = np.ctypeslib.as_ctypes(data)
shared_array = mp.sharedctypes.RawArray(result._type_, result)
def fill_per_window(df):
for i,row in df.iterrows():
#print(row)
tmp = np.ctypeslib.as_array(shared_array)
index = src.index(row[xy[0]],row[xy[1]])
if min_0:
est = np.maximum(row[name],0)
else:
est = row[name]
tmp[index] = est
print('Entering pool')
p = mp.Pool()
if '.csv' in in_csv:
p.imap_unordered(fill_per_window, pd.read_csv(in_csv, chunksize=chunksize)
else:
p.imap_unordered(fill_per_window, pd.read_hdf(in_csv, chunksize=chunksize)
p.close()
p.join()
result = np.ctypeslib.as_array(shared_array)
profile = src.profile
profile.update(dtype=rasterio.float32,count=1,compress='lzw')
with rasterio.open(out_file, 'w', **profile) as dst:
dst.write(result.astype(rasterio.float32), 1)
dst.nodata = np.nan
|
#!/usr/bin/env python
import sys
from datetime import datetime
sys.path.append("../") # noqa
from aiohttp import web
import jinja2
import aiohttp_jinja2
import aiohttp_session
from aiohttp_tools import jsonify, redirect, get_client_ip, get_argument
from aiohttp_tools import flash
from aiohttp_tools.static_url import static_url
import aiohttp_tools.middlewares
import aiohttp_tools.context_processors
@aiohttp_jinja2.template("index.html")
async def index(request):
if request.method == "POST":
data = await request.post()
message = get_argument(data, "message")
flash.info(request, message)
return redirect(request, "index")
return {}
@jsonify
async def api(request):
method = request.match_info["method"]
if method == "now":
return {"now": datetime.now()}
elif method == "ip":
return {"ip": get_client_ip(request)}
return {"error": "unknown api method"}
app = web.Application(
middlewares=[
aiohttp_tools.middlewares.fix_host("0.0.0.0:8000"),
aiohttp_tools.middlewares.add_trailing_slash,
]
)
aiohttp_session.setup(app, aiohttp_session.SimpleCookieStorage())
app.middlewares.append(aiohttp_tools.flash.middleware)
env = aiohttp_jinja2.setup(
app,
loader=jinja2.FileSystemLoader("./"),
context_processors=[
aiohttp_tools.context_processors.url_for_processor,
aiohttp_tools.flash.context_processor,
],
)
env.globals["static_url"] = static_url
app.router.add_get("/", index, name="index")
app.router.add_post("/", index)
app.router.add_get("/api/{method}", api, name="api")
app.router.add_static("/", "./")
if __name__ == "__main__":
web.run_app(app, port=8000)
|
from tests.common import EWSTest
t = EWSTest()
t.setUpClass()
t.wipe_test_account()
|
# The MIT License (MIT)
# Copyright (c) 2020. Ian Buttimer
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
from typing import Union
from PIL import Image, ImageOps
def resize_keep_aspect(path: str, desired_size: Union[int, tuple], out_folder: str):
"""
Resize an image keeping the aspect ratio; resultant image will be 'desired_size' * 'desired_size' pixels
:param path: Path to image
:param desired_size: New size in pixels
:param out_folder: folder to save new image
"""
# Based on https://jdhao.github.io/2017/11/06/resize-image-to-square-with-padding/
im = Image.open(path)
old_size = im.size # old_size is in (width, height) format
if isinstance(desired_size, tuple):
ratio = [float(desired_size[i]/old_size[i]) for i in range(2)]
ratio = min(ratio)
else:
ratio = float(desired_size) / max(old_size)
desired_size = tuple([desired_size for i in range(2)])
new_size = tuple([int(x * ratio) for x in old_size])
coord = tuple([(desired_size[i] - new_size[i]) // 2 for i in range(2)])
# use thumbnail() or resize() method to resize the input image
# thumbnail is a in-place operation
# im.thumbnail(new_size, Image.ANTIALIAS)
im = im.resize(new_size, Image.ANTIALIAS)
# create a new image and paste the resized on it
new_im = Image.new("RGB", desired_size)
new_im.paste(im, coord)
new_im.save(os.path.join(out_folder, os.path.basename(path)))
new_im.close()
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=line-too-long
from azure.cli.core.commands import CliCommandType
def load_command_table(self, _):
from azext_mixed_reality.generated._client_factory import cf_spatial_anchor_account
mixed_reality_spatial_anchor_account = CliCommandType(
operations_tmpl='azext_mixed_reality.vendored_sdks.mixedreality.operations._spatial_anchors_accounts_operations#SpatialAnchorsAccountsOperations.{}',
client_factory=cf_spatial_anchor_account,
)
with self.command_group(
'spatial-anchors-account', mixed_reality_spatial_anchor_account, client_factory=cf_spatial_anchor_account
) as g:
g.custom_command('regenerate-key', 'spatial_anchors_account_regenerate_key')
from azext_mixed_reality.generated._client_factory import cf_remote_rendering_account
mixed_reality_remote_rendering_account = CliCommandType(
operations_tmpl='azext_mixed_reality.vendored_sdks.mixedreality.operations._remote_rendering_accounts_operations#RemoteRenderingAccountsOperations.{}',
client_factory=cf_remote_rendering_account,
)
with self.command_group(
'remote-rendering-account', mixed_reality_remote_rendering_account, client_factory=cf_remote_rendering_account
) as g:
g.custom_command('list', 'remote_rendering_account_list')
g.custom_show_command('show', 'remote_rendering_account_show')
g.custom_command('create', 'remote_rendering_account_create')
g.custom_command('update', 'remote_rendering_account_update')
g.custom_command('delete', 'remote_rendering_account_delete', confirmation=True)
g.custom_command('list-key', 'remote_rendering_account_list_key')
g.custom_command('regenerate-key', 'remote_rendering_account_regenerate_key')
with self.command_group('mixed-reality', is_experimental=True):
pass
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import numpy as np
import tensorflow as tf
from utils.common import *
class Vocabulary(object):
"""
Token vocabulary.
"""
PAD = "<PAD>"
BOS = "<BOS>"
EOS = "<EOS>"
UNK = "<UNK>"
RESERV_TOKEN = [PAD, BOS, EOS, UNK]
def __init__(self, vocab_file, need_reserve=True, encoding='utf-8'):
"""
Args:
vocab_file: a flat text file with one (normalized) token per line.
need_reserve: if `False`, reserve-tokens will not be added.
"""
self._id_to_word = []
self._word_to_id = {}
if need_reserve:
for reserv_tok in Vocabulary.RESERV_TOKEN:
self._word_to_id[reserv_tok] = len(self._id_to_word)
self._id_to_word.append(reserv_tok)
with open(vocab_file, encoding=encoding) as f:
for line in f:
word_name = line.strip('\n')
self._word_to_id[word_name] = len(self._id_to_word)
self._id_to_word.append(word_name)
self.size = len(self._id_to_word)
def word_to_id(self, word):
if word in self._word_to_id:
return self._word_to_id[word]
else:
return self._word_to_id[Vocabulary.UNK]
def id_to_word(self, cur_id):
return self._id_to_word[cur_id]
def encode(self, sentence, add_bos_eos=False, max_len=None, split=None):
"""
Convert a sentence to a list of ids, with special tokens added.
Args:
sentence: If `split` is `None`, sentence is a list of tokens.
Else sentence is a single string with tokens separated by `split`
add_bos_eos: If `True`, BOS/EOS will be added.
max_len: If not `None`, the return token ids exceed `max_len` will be truncated.
Returns:
A numpy array of token ids for the input sentence.
"""
if split:
sentence = sentence.split(split)
if add_bos_eos:
sentence = [Vocabulary.BOS] + sentence + [Vocabulary.EOS]
word_ids = [ self.word_to_id(word) for word in sentence ]
return np.array(word_ids[:max_len], dtype=NP_DTYPE_INT)
def read_wembed(wembed_file):
"""
Read word embedding from file.
Args:
wembed_file: file path of word embedding.
Returns:
Numpy array of word embeddings.
"""
if wembed_file.find(".hdf5") != -1:
return read_wembed_hdf5(wembed_file)
else:
return read_wembed_txt(wembed_file)
def read_wembed_hdf5(wembed_file, name="word_embeddings"):
"""
Read word embedding from HDF5 file.
Args:
name: name of hdf5 dataset.
"""
import h5py
print("Reading word embeddings from hdf5 file: %s" % (wembed_file))
with h5py.File(wembed_file, 'r') as fin:
dataset = fin[name]
embeddings = np.zeros([dataset.shape[0], dataset.shape[1]], dtype=NP_DTYPE)
embeddings = dataset[...]
print("Read word embeddings finished, vocab_size=%d, dim=%d" % \
(embeddings.shape[0], embeddings.shape[1]))
return embeddings
def read_wembed_txt(wembed_file, sep=" "):
"""
Read word embedding from text file.
Args:
sep: seperate character within a single line.
"""
print("Reading word embeddings from txt file: %s" % (wembed_file))
with open(wembed_file) as fin:
header = fin.readline().strip('\n').split(sep)
num = int(header[0])
dim = int(header[1])
embeddings = np.zeros(shape=[num, dim], dtype=NP_DTYPE)
for idx, line in enumerate(fin.readlines()):
line = line.strip('\n')
word_pos = line.find(sep)
word = line[:word_pos]
array = line[word_pos+1:]
embeddings[idx] = np.fromstring(array, sep=sep)
if (idx+1) != num:
raise ValueError("Word embedding num[%d] not match with header[%d]" % (idx+1, num))
print("Read word embeddings finished, vocab_size=%d, dim=%d" % (num, dim))
return embeddings
|
import traceback
from datetime import datetime
import json
from flask import (
Blueprint, Response,
current_app, request, render_template, abort
)
from sqlalchemy import desc
from ..models import ConversationHistory
bp = Blueprint("conversation_history", __name__, template_folder="../templates")
@bp.app_template_filter("timestamp_to_str")
def timestamp_to_str(timestamp):
if timestamp:
return datetime.utcfromtimestamp(timestamp).\
strftime("%Y/%m/%d %H:%M:%S")
else:
return "-"
@bp.app_template_filter("format_json")
def format_json(val):
if val is None:
return None
elif isinstance(val, dict):
return json.dumps(val, ensure_ascii=False, indent=2)
else:
return json.dumps(json.loads(val), ensure_ascii=False, indent=2)
@bp.route("/admin/history")
def conversation_history_index():
db = current_app.bot.db_session()
try:
count = int(request.args.get("count", 50))
if count < 0:
count = 0
offset = int(request.args.get("offset", 0))
if offset < 0:
offset = 0
conversation_histories = db.query(ConversationHistory).\
order_by(desc(ConversationHistory.updated_at)).\
limit(count).offset(offset).all()
return render_template(
"history.html",
data={
"conversation_histories": conversation_histories,
"count": count,
"offset": offset
}
)
except Exception as ex:
current_app.bot.logger.error(
"Error in getting index of message log: "
+ f"{str(ex)}\n{traceback.format_exc()}"
)
abort(500)
finally:
db.close()
@bp.route("/admin/history/<history_id>")
def conversation_history_detail(history_id):
db = current_app.bot.db_session()
try:
conversation_history = db.query(ConversationHistory).\
filter(ConversationHistory.id == history_id).first()
if conversation_history:
return render_template(
"history_detail.html",
data={
"ml": conversation_history
}
)
else:
# Use Response to avoid raising error
return Response("History not found", status=404)
except Exception as ex:
current_app.bot.logger.error(
"Error in getting detail of message log: "
+ f"{str(ex)}\n{traceback.format_exc()}"
)
abort(500)
finally:
db.close()
|
#Loss functions
def gradient_penalty(samples, output, weights):
gradients = K.gradients(output, samples)[0]
gradients_sqr = K.square(gradients)
gradient_penalty = K.sum(gradients_sqr,
axis=np.arange(1, len(gradients_sqr.shape)))
# (weight / 2) * ||grad||^2
# Penalize the gradient norm
return K.mean(gradient_penalty * weights) |
import numpy as np
def sigmoid(x):
"""
Calculate sigmoid
"""
return 1 / (1 + np.exp(-x))
x = np.array([0.5, 0.1, -0.2])
target = 0.6
learnrate = 0.5
weights_input_hidden = np.array([[0.5, -0.6],
[0.1, -0.2],
[0.1, 0.7]])
weights_hidden_output = np.array([0.1, -0.3])
# Forward pass
hidden_layer_input = np.dot(x, weights_input_hidden)
hidden_layer_output = sigmoid(hidden_layer_input)
print('HO = ', hidden_layer_output)
output_layer_in = np.dot(hidden_layer_output, weights_hidden_output)
output = sigmoid(output_layer_in)
print('OO = ', output)
# Backwards pass
# TODO: Calculate output error
error = target - output
print('Output Error = ', error)
# TODO: Calculate error term for output layer
output_error_term = error * output * (1 - output)
print('dO = ', output_error_term)
# TODO: Calculate error term for hidden layer
hidden_error_term = np.dot(weights_hidden_output, output_error_term) * \
hidden_layer_output * (1 - hidden_layer_output)
print('dH = ', hidden_error_term)
# TODO: Calculate change in weights for hidden layer to output layer
delta_w_h_o = learnrate * output_error_term * hidden_layer_output
print('delta w H->O = ', delta_w_h_o)
# TODO: Calculate change in weights for input layer to hidden layer
delta_w_i_h = learnrate * hidden_error_term * x[:, None]
print('delta w I->H = ', delta_w_i_h)
print('Change in weights for hidden layer to output layer:')
print(delta_w_h_o)
print('Change in weights for input layer to hidden layer:')
print(delta_w_i_h)
|
# -*- coding: utf-8 -*-
"""
database decorators module.
"""
import pyrin.database.services as database_services
def session_factory(*args, **kwargs):
"""
decorator to register a database session factory.
:param object args: session factory class constructor arguments.
:param object kwargs: session factory class constructor keyword arguments.
:keyword bool replace: specifies that if there is another registered
session factory with the same name, replace it with
the new one, otherwise raise an error. defaults to False.
:raises InvalidSessionFactoryTypeError: invalid session factory type error.
:raises DuplicatedSessionFactoryError: duplicated session factory error.
:returns: session factory class.
:rtype: type
"""
def decorator(cls):
"""
decorates the given class and registers an instance
of it into database session factories.
:param type cls: session factory class.
:returns: session factory class.
:rtype: type
"""
instance = cls(*args, **kwargs)
database_services.register_session_factory(instance, **kwargs)
return cls
return decorator
def database_hook():
"""
decorator to register a database hook.
:raises InvalidDatabaseHookTypeError: invalid database hook type error.
:returns: database hook class.
:rtype: type
"""
def decorator(cls):
"""
decorates the given class and registers an instance
of it into available database hooks.
:param type cls: database hook class.
:returns: database hook class.
:rtype: type
"""
instance = cls()
database_services.register_hook(instance)
return cls
return decorator
|
import cv2
import numpy as np
from keras import backend as K
from losses import ActivationMaximization
from optimizer import Optimizer
from regularizers import TotalVariation, LPNorm
from utils import utils
def _get_num_filters(layer):
"""
Returns: Total number of filters within this layer
"""
# For all other layers it is 4
isDense = K.ndim(layer.output) == 2
if isDense:
return layer.output.shape[1]
else:
if K.image_dim_ordering() == 'th':
return layer.output._keras_shape[1]
else:
return layer.output._keras_shape[3]
def visualize_saliency(img, layer, filter_indices,
seed_img, overlay=True):
"""Generates a heatmap indicating the pixels that contributed the most towards
maximizing `filter_indices` output in the given `layer`.
For example, if you wanted to visualize the which pixels contributed to classifying an image as 'bird', say output
index 22 on final `keras.layers.Dense` layer, then, `filter_indices = [22]`, `layer = dense_layer`.
Alternatively one could use `filter_indices = [22, 23]` and hope to see image regions that are common to output
categories 22, 23 to show up in the heatmap.
Args:
img: 4D input image tensor with shape: `(samples, channels, rows, cols)` if dim_ordering='th'
or `(samples, rows, cols, channels)` if dim_ordering='tf'.
layer: The `keras.Layer` layer whose filters needs to be visualized.
filter_indices: filter indices within the layer to be maximized.
For `keras.layers.Dense` layer, `filter_idx` is interpreted as the output index.
If you are visualizing final `keras.layers.Dense` layer, you tend to get
better results with 'linear' activation as opposed to 'softmax'. This is because 'softmax'
output can be maximized by minimizing scores for other classes.
seed_img: The input image for which activation map needs to be visualized.
overlay: If true, overlays the heatmap over the original image (Default value = True)
Returns:
The heatmap image indicating image regions that, when changed, would contribute the most towards maximizing
a the filter output.
"""
losses = [
(ActivationMaximization(layer, filter_indices), 1)
]
opt = Optimizer(img, losses)
_, grads = opt.minimize(max_iter=1, verbose=True, jitter=0, seed_img=seed_img)
s, c, w, h = utils.get_img_indices()
grads = np.max(np.abs(grads), axis=c, keepdims=True)
# Smoothen activation map
grads = utils.deprocess_image(grads[0])
grads /= np.max(grads)
# Convert to heatmap and zero out low probabilities for a cleaner output.
heatmap = cv2.applyColorMap(cv2.GaussianBlur(grads * 255, (3, 3), 0), cv2.COLORMAP_JET)
heatmap[np.where(grads <= 0.2)] = 0
if overlay:
return cv2.addWeighted(seed_img, 1, heatmap, 0.5, 0)
else:
return heatmap
def visualize_activation(img, layer, filter_indices=None,
seed_img=None, max_iter=200,
act_max_weight=1, lp_norm_weight=10, tv_weight=10, verbose=False,
show_filter_idx_text=True, idx_label_map=None, cols=5):
"""Generates stitched input image(s) over all `filter_indices` in the given `layer` that maximize
the filter output activation.
For example, if you wanted to visualize the input image that would maximize the output index 22, say on
final `keras.layers.Dense` layer, then, `filter_indices = [22]`, `layer = dense_layer`.
If `filter_indices = [22, 23]`, then a stitched image comprising of two images are generated, each
corresponding to the entry in `filter_indices`.
Args:
img: 4D input image tensor with shape: `(samples, channels, rows, cols)` if dim_ordering='th'
or `(samples, rows, cols, channels)` if dim_ordering='tf'.
layer: The `keras.Layer` layer whose filters needs to be visualized.
filter_indices: filter indices within the layer to be maximized.
If None, all filters are visualized. (Default value = None)
An input image is generated for each entry in `filter_indices`. The entry can also be an array.
For example, `filter_indices = [[1, 2], 3, [4, 5, 6]]` would generate three input images. The first one
would maximize output of filters 1, 2, 3 jointly. A fun use of this might be to generate a dog-fish
image by maximizing 'dog' and 'fish' output in final `Dense` layer.
For `keras.layers.Dense` layers, `filter_idx` is interpreted as the output index.
If you are visualizing final `keras.layers.Dense` layer, you tend to get
better results with 'linear' activation as opposed to 'softmax'. This is because 'softmax'
output can be maximized by minimizing scores for other classes.
seed_img: Seeds the optimization with a starting image. Initialized with a random value when set to None.
(Default value = None)
max_iter: The maximum number of gradient descent iterations. (Default value = 200)
act_max_weight: The weight param for `ActivationMaximization` loss. Not used if 0 or None. (Default value = 1)
lp_norm_weight: The weight param for `LPNorm` regularization loss. Not used if 0 or None. (Default value = 10)
tv_weight: The weight param for `TotalVariation` regularization loss. Not used if 0 or None. (Default value = 10)
verbose: Shows verbose loss output for each filter. (Default value = False)
Very useful to estimate loss weight factor. (Default value = True)
show_filter_idx_text: Adds filter_idx text to the image if set to True. (Default value = True)
If the entry in `filter_indices` is an array, then comma separated labels are generated.
idx_label_map: Map of filter_idx to text label. If not None, this map is used to translate filter_idx
to text value when show_filter_idx_text = True. (Default value = None)
cols: Max number of image cols. New row is created when number of images exceed the column size.
(Default value = 5)
Returns:
Stitched image output visualizing input images that maximize the filter output(s). (Default value = 10)
"""
if filter_indices is None:
filter_indices = np.arange(_get_num_filters(layer))
imgs = []
for i, idx in enumerate(filter_indices):
indices = idx if isinstance(idx, list) else [idx]
losses = [
(ActivationMaximization(layer, indices), act_max_weight or 0),
(LPNorm(), lp_norm_weight or 0),
(TotalVariation(), tv_weight or 0)
]
opt = Optimizer(img, losses)
print('Working on filter {}/{}'.format(i + 1, len(filter_indices)))
opt_img, g = opt.minimize(seed_img=seed_img, max_iter=max_iter, verbose=verbose)
# Add filter text to image if applicable.
if show_filter_idx_text:
label = None
if idx_label_map:
label = ', '.join([idx_label_map.get(i) for i in indices])
if label is None:
label = "Filter {}".format(', '.join([str(i) for i in indices]))
cv2.putText(opt_img, label, (10, 25), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 0), 2)
imgs.append(opt_img)
return utils.stitch_images(imgs, cols=cols)
|
# @date 2021-05-16
# @author Frederic Scherma, All rights reserved without prejudices.
# @license Copyright (c) 2021 Dream Overflow
# History tools
import json
from tools.tool import Tool
from config import utils
from terminal.terminal import Terminal
from watcher.service import WatcherService
import logging
logger = logging.getLogger('siis.tools.history')
error_logger = logging.getLogger('siis.tools.error.history')
READ_DEBUG_CACHE = 0
WRITE_DEBUG_CACHE = 0
class History(Tool):
"""
Make a connection and take the history of orders or position.
"""
ORDERS_HEADER = (
'id',
'symbol',
'market-id',
'status',
'ref-id',
'direction',
'type',
'mode',
'timestamp',
'avg-price',
'quantity',
'cumulative-filled',
'cumulative-commission-amount',
'price',
'stop-price',
'time-in-force',
'post-only',
'close-only',
'reduce-only',
'stop-loss',
'take-profit',
'fully-filled',
'trades'
)
@classmethod
def alias(cls):
return "hist"
@classmethod
def help(cls):
return ("Process a checkout of the history of orders or position from a particular account and period.",
"Specify --profile, --broker, --from, --to.",
"Optional --market, --filename.")
@classmethod
def detailed_help(cls):
return tuple()
@classmethod
def need_identity(cls):
return True
def __init__(self, options):
super().__init__("history", options)
self._watcher_service = None
self._identity = options.get('identity')
self._identities_config = utils.identities(options.get('config-path'))
self._profile = options.get('profile', 'default')
self._profile_config = utils.load_config(options, "profiles/%s" % self._profile)
def check_options(self, options):
if not options.get('from') or not options.get('to'):
logger.error("Options from, and to must be defined")
return False
if not options.get('broker'):
logger.error("Options broker must be defined")
return False
if options.get('filename'):
if options['filename'][-4:] == '.csv':
logger.error("Base filename only must be specified")
return False
return True
def init(self, options):
Terminal.inst().info("Starting watcher's service...")
self._watcher_service = WatcherService(None, options)
return True
def run(self, options):
strategy = self._profile_config.get('strategy', {})
if not strategy:
logger.error("Missing strategy")
return False
watchers = self._profile_config.get('watchers', {})
if not watchers or options['broker'] not in watchers:
logger.error("Missing watcher")
return False
watcher_id = options['broker']
if not watcher_id:
logger.error("Missing watcher name")
return False
markets = options['market'].split(',') if options.get('market') else None
fetcher = self._watcher_service.create_fetcher(options, options['broker'])
if fetcher:
fetcher.connect()
closed_orders = []
open_orders = []
try:
if READ_DEBUG_CACHE:
with open('c.tmp', 'rt') as f:
closed_orders = json.loads(f.read())
else:
closed_orders = fetcher.fetch_closed_orders(options.get('from'), options.get('to'))
if WRITE_DEBUG_CACHE:
with open('c.tmp', 'wt') as f:
f.write(json.dumps(closed_orders))
except Exception as e:
error_logger.error("fetch_closed_orders : " + str(e))
try:
if READ_DEBUG_CACHE:
with open('o.tmp', 'rt') as f:
open_orders = json.loads(f.read())
else:
open_orders = fetcher.fetch_open_orders()
if WRITE_DEBUG_CACHE:
with open('o.tmp', 'wt') as f:
f.write(json.dumps(open_orders))
except Exception as e:
error_logger.error("fetch_open_oders : " + str(e))
# filter orders
if markets:
tmp = closed_orders
closed_orders = []
for o in tmp:
if o['market-id'] in markets:
closed_orders.append(o)
tmp = open_orders
open_orders = []
for o in tmp:
if o['market-id'] in markets:
open_orders.append(o)
# keep only close orders with cumulative filled greater than 0
tmp = closed_orders
closed_orders = []
for o in tmp:
if float(o['cumulative-filled']) > 0:
closed_orders.append(o)
# keep only open orders opened before to_date
tmp = open_orders
open_orders = []
filtered_open_orders = []
from_date = options.get('from').strftime('%Y-%m-%dT%H:%M:%SZ')
to_date = options.get('to').strftime('%Y-%m-%dT%H:%M:%SZ')
for o in tmp:
if o['timestamp'] >= from_date and o['timestamp'] <= to_date:
open_orders.append(o)
else:
filtered_open_orders.append(o)
if options.get('filename'):
self.export_to_csv(closed_orders, open_orders, options['filename'])
else:
self.display(closed_orders, open_orders)
fetcher.disconnect()
paired, unpaired = self.detect_unmanaged(closed_orders, open_orders, filtered_open_orders)
print(len(paired), len(unpaired), len(open_orders))
if options.get('filename'):
self.export_unpaired_orders_to_csv(unpaired, options['filename'])
else:
self.display_unpaired_orders(unpaired)
return True
def export_to_csv(self, closed_orders, open_orders, filename):
# closed orders
try:
f = open(filename + "_closed.csv", 'wt')
f.write('\t'.join(History.ORDERS_HEADER) + '\n')
for o in closed_orders:
row = [str(o[n]) for n in History.ORDERS_HEADER]
f.write('\t'.join(row) + '\n')
f.close()
f = None
except Exception as e:
logger.error(repr(e))
# open orders
try:
f = open(filename + "_open.csv", 'wt')
f.write('\t'.join(History.ORDERS_HEADER) + '\n')
for o in open_orders:
row = [str(o[n]) for n in History.ORDERS_HEADER]
f.write('\t'.join(row) + '\n')
f.close()
f = None
except Exception as e:
logger.error(repr(e))
def display(self, closed_orders, open_orders):
print("> Closed orders :")
print('\t'.join(History.ORDERS_HEADER) + '\n')
for o in closed_orders:
# format to display or to CSV
row = [str(o[n]) for n in History.ORDERS_HEADER]
print('\t'.join(row))
print("> Open orders :")
print('\t'.join(History.ORDERS_HEADER) + '\n')
for o in open_orders:
# format to display or to CSV
row = [str(o[n]) for n in History.ORDERS_HEADER]
print('\t'.join(row))
def detect_unmanaged(self, closed_orders, open_orders, filtered_open_orders):
paired = {}
unpaired = []
# for each entry order try to find the corresponding exit
# make a first list with detect pairs of orders
# and a second list with the remaining
entries = []
exits = []
# distinct entry and exit order from closed orders
for o in closed_orders:
if o['mode'] == "spot":
if o['direction'] == "buy":
entries.append(o)
elif o['direction'] == "sell":
exits.append(o)
elif o['mode'] == "margin":
# @todo test
if o['reduce-only'] == "false" or o['close-only'] == "false":
entries.append(o)
elif o['direction'] == "true" or o['close-only'] == "true":
exits.append(o)
# and from open orders
for o in open_orders:
if o['mode'] == "spot":
# if o['direction'] == "buy":
# entries.append(o)
if o['direction'] == "sell":
exits.append(o)
elif o['mode'] == "margin":
# @todo test
# if o['reduce-only'] == "false" or o['close-only'] == "false":
# entries.append(o)
if o['direction'] == "true" or o['close-only'] == "true":
exits.append(o)
# and from filtered open orders only keep exits
for o in filtered_open_orders:
if o['mode'] == "spot":
if o['direction'] == "sell":
exits.append(o)
elif o['mode'] == "margin":
# @todo test
if o['direction'] == "true" or o['close-only'] == "true":
exits.append(o)
# detection
for e in entries:
rm = None
for x in exits:
if e['market-id'] != x['market-id']:
continue
if e['timestamp'] <= x['timestamp']:
continue
diff = ((float(e['cumulative-filled']) - float(x['cumulative-filled'])) / float(e['cumulative-filled']))
if diff < 0.0 or diff > 0.001:
continue
print(e['cumulative-filled'], x['cumulative-filled'], x['market-id'])
paired[e['id']] = x
rm = x
break
if rm:
exits.remove(rm)
else:
unpaired.append(e)
return paired, unpaired
def display_unpaired_orders(self, unpaired_orders):
print("> Unpaired orders :")
print('\t'.join(History.ORDERS_HEADER) + '\n')
for o in unpaired_orders:
# format to display or to CSV
row = [str(o[n]) for n in History.ORDERS_HEADER]
print('\t'.join(row))
def export_unpaired_orders_to_csv(self, unpaired_orders, filename):
# unpaired orders
try:
f = open(filename + "_unpaired.csv", 'wt')
f.write('\t'.join(History.ORDERS_HEADER) + '\n')
for o in unpaired_orders:
row = [str(o[n]) for n in History.ORDERS_HEADER]
f.write('\t'.join(row) + '\n')
f.close()
f = None
except Exception as e:
logger.error(repr(e))
def terminate(self, options):
self._watcher_service.terminate()
return True
def forced_interrupt(self, options):
return True
tool = History
|
import os
import json
import requests
import jenkinsapi
from jenkinsapi.jenkins import Jenkins
from jenkinsapi.constants import STATUS_SUCCESS
payload_str = os.environ['payload']
payload_str = payload_str.decode('utf-8','ignore')
#parse to json obj
payload = json.loads(payload_str)
#pr = payload['pull_request']
url = payload['html_url']
print "build pr:" + url
#get statuses url
statuses_url = payload['statuses_url']
J = Jenkins(os.environ['JENKINS_URL'])
target_url = os.environ['BUILD_URL']
build_number = int(os.environ['BUILD_NUMBER'])
data = {"state":"pending", "target_url":target_url}
access_token = os.environ['GITHUB_ACCESS_TOKEN']
Headers = {"Authorization":"token " + access_token}
result = J[os.environ['JOB_NAME']].get_build(build_number).get_status()
if(result == STATUS_SUCCESS):
data['state'] = "success"
else:
data['state'] = "failure"
requests.post(statuses_url, data=json.dumps(data), headers=Headers)
|
#!/usr/bin/env python3
import sys
sum_ = 0
for line in sys.stdin:
for output in line.split(" | ")[1].split():
sum_ += len(output) in (2, 3, 4, 7)
print(sum_)
|
import cairo
import math
import random
import sys
import os
sys.path.append(os.path.abspath('..'))
from lib import palettes
# Final image dimensions
# Threadless wall art recommended: 12000 x 8400px JPG
# Blanket recommended: 12500 x 9375px JPG
IMG_HEIGHT = 2160
IMG_WIDTH = 3840
#IMG_HEIGHT = 6300 # 4200
#IMG_WIDTH = 7200 # 4800
# Circle paramaters
MIN_RADIUS = int(IMG_HEIGHT / 150)
MAX_RADIUS = int(IMG_HEIGHT / 7)
TOTAL_CIRCLE_ATTEMPTS = 100000
# Empty distance between circles
SPACING = 2
class Circle:
# Defined by top-left corner of bounding box, and radius
def __init__(self, x, y, r):
self.x = x
self.y = y
self.r = r
def __str__(self):
return "({}, {}, {})".format(self.x, self.y, self.r)
def intersects(self, otherCircle):
center_1 = self.center()
center_2 = otherCircle.center()
x_dist = center_1[0] - center_2[0]
y_dist = center_1[1] - center_2[1]
return ((self.r + otherCircle.r + SPACING)
>= math.sqrt(x_dist * x_dist + y_dist * y_dist))
def center(self):
return (self.x + self.r, self.y + self.r)
def pack(self, circles):
if self.r >= MAX_RADIUS:
return self
grown_circle = Circle(self.x, self.y, self.r + 1)
if not grown_circle.intersectsAnythingElse(circles) and grown_circle.insideCanvas():
return grown_circle.pack(circles)
else:
return self
def intersectsAnythingElse(self, circles):
if not circles:
return False
for otherCircle in circles:
if self.intersects(otherCircle):
return True
return False
def insideCanvas(self):
return ((self.x + 2 * self.r <= IMG_WIDTH)
and (self.y + 2 * self.r) <= IMG_HEIGHT)
def randomCircleWithRadius(radius):
return Circle(
random.randint(0, IMG_WIDTH - 2 * radius),
random.randint(0, IMG_HEIGHT - 2 * radius),
radius
)
def makeRandomCircle(circles):
# simple version, without "growing" circles to pack the space
# return makeRandomCircleSeed(circles, radius=random.randint(MIN_RADIUS, MAX_RADIUS))
c = maybeMakeRandomCircleSeed(circles)
if not c:
return None
return c.pack(circles)
def maybeMakeRandomCircleSeed(circles, radius=MIN_RADIUS):
c = randomCircleWithRadius(radius)
if c.intersectsAnythingElse(circles):
# Did not succeed at finding a good spot
return None
return c
def hex_to_tuple(hex):
hex = hex.lstrip('#')
return tuple(int(hex[i:i+2], 16)/255 for i in (0, 2, 4))
def concentric(ctx, circle, palette, step=1.5):
colors = palette['colors']
background = palette['background']
(center_x, center_y) = circle.center()
radii = range(circle.r, MIN_RADIUS, -int(MIN_RADIUS * step))
for idx, radius in enumerate(radii):
if idx % 2 == 0:
ctx.arc(center_x, center_y, radius, 0, 2 * math.pi)
ctx.set_source_rgb(*hex_to_tuple(random.choice(colors)))
ctx.fill()
else:
ctx.arc(center_x, center_y, radius, 0, 2 * math.pi)
ctx.set_source_rgb(*hex_to_tuple(background))
ctx.fill()
def main(palette=random.choice(palettes.PALETTES), filename="output.png"):
ims = cairo.ImageSurface(cairo.FORMAT_ARGB32, IMG_WIDTH, IMG_HEIGHT)
ims.set_fallback_resolution(300.0, 300.0)
ctx = cairo.Context(ims)
# Background
ctx.rectangle(0, 0, IMG_WIDTH, IMG_HEIGHT)
ctx.set_source_rgb(*palettes.hex_to_tuple(palette['background']))
ctx.fill()
circles = []
for _ in range(TOTAL_CIRCLE_ATTEMPTS):
c = makeRandomCircle(circles)
if c:
print("New circle added. Total circles: ", len(circles))
circles.append(c)
else:
print(".")
for c in circles:
step = random.uniform(1.1, 5)
concentric(ctx, c, palette, step=step)
ims.write_to_png(filename)
if __name__ == "__main__":
for idx in range(1):
main(palette=random.choice(palettes.PALETTES), filename="output-{}.png".format(idx))
|
# Copyright (c) 2019 Alibaba Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import tensorflow as tf
import numpy as np
import gym
from easy_rl.agents import agents
from easy_rl.models import EvolutionStrategy
from easy_rl.utils.window_stat import WindowStat
MODEL_CONFIG = dict(
# specific
type="ES",
# common
init_lr=0.01,
lr_strategy_spec={
'type': 'exponential_decay',
'decay_steps': 50,
'decay_rate': 0.9
},
global_norm_clip=40)
AGENT_CONFIG = dict(
type="Agent",
# how many trials needed for one optimization
sample_batch_size=100,
)
np.random.seed(0)
class MyESmodel(EvolutionStrategy):
def _encode_obs(self, input_obs, scope="encode_obs"):
with tf.variable_scope(name_or_scope=scope):
h1 = tf.layers.dense(
input_obs,
units=64,
activation=tf.nn.relu,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.1, seed=0))
h2 = tf.layers.dense(
h1,
units=64,
activation=tf.nn.relu,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.1, seed=0))
logits = tf.layers.dense(
h2,
units=2,
activation=None,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.1, seed=0))
return logits
def main():
env = gym.make("CartPole-v0")
env.seed(0)
agent_class = agents[AGENT_CONFIG["type"]]
agent = agent_class(
env.observation_space,
env.action_space,
AGENT_CONFIG,
MODEL_CONFIG,
distributed_spec={},
checkpoint_dir="ckpt_dir",
custom_model=MyESmodel)
reward_window = WindowStat("reward", 50)
length_window = WindowStat("length", 50)
init_perturbation_scale = 0.1
seeds, rewards, perturbation_scales = list(), list(), list()
is_positive_direction = list()
# how many episodes needed for one trial
episode_per_perturbation = 1
returns = list()
for i in range(4000):
ob = env.reset()
done = False
episode_reward = .0
episode_len = 0
if i % episode_per_perturbation == 0:
# perturb parameters every `episode_per_seed` episodes
is_positive = True if len(
is_positive_direction
) == 0 else is_positive_direction[-1] != True
# each seed twice
seed = np.random.randint(1000000) if is_positive else seeds[-1]
perturbation_scale = max(
init_perturbation_scale * (1 - i / 2000.0), 0.02)
feed = agent.model.perturbation_feed
fetch = [agent.model.reset_perturbation_op]
agent.executor.run(
fetches=fetch,
feed_dict={
feed['perturbation_seeds']: [seed],
feed['perturbation_scales']: [perturbation_scale],
feed['positive_perturbation']: is_positive
})
if is_positive:
seeds.append(seed)
perturbation_scales.append(perturbation_scale)
is_positive_direction.append(is_positive)
while not done:
action, result = agent.act([ob], True, use_perturbed_action=True)
next_ob, reward, done, info = env.step(action[0])
ob = next_ob
episode_reward += reward
episode_len += 1
rewards.append(episode_reward)
reward_window.push(episode_reward)
length_window.push(episode_len)
if len(rewards) == episode_per_perturbation:
returns.append(np.mean(rewards))
rewards = []
if len(returns) == 2 * agent.config.get('sample_batch_size', 100):
print(reward_window)
assert len(seeds) == (len(returns) / 2)
assert len(perturbation_scales) == (len(returns) / 2)
agent.learn(
batch_data=dict(
perturbation_seeds=seeds,
perturbation_scales=perturbation_scales,
returns=np.reshape(returns, [-1, 2])))
seeds = []
perturbation_scales = []
returns = []
is_positive_direction = []
# evaluation 20 episodes
test_rewards = list()
for j in range(20):
done = False
ob = env.reset()
episode_reward = 0
episode_len = 0
while not done:
action, result = agent.act(
[ob], True, use_perturbed_action=False)
next_ob, reward, done, info = env.step(action[0])
ob = next_ob
episode_reward += reward
episode_len += 1
test_rewards.append(episode_reward)
print("[evaluation] average reward of 20 episodes:",
np.mean(test_rewards))
print('train at ', i)
agent.export_saved_model(export_dir="dump_dir")
print("Done.")
if __name__ == "__main__":
main()
|
from app import db
from gr.dao.BaseDao import BaseDao
from gr.model.usuario.Usuario import Usuario
class UsuarioDao(BaseDao):
def get_by_username(self, username):
return self.model.query.filter(self.model.username == username).first()
def get_by_empresa(self, empresaId):
return self.model.query.filter(self.model.empresaId == empresaId).all()
def delete(self, usuario):
usuario.del_ = True
db.session.upsert(usuario)
return db.session.commit()
def purge(self, usuario):
db.session.delete(usuario)
return db.session.commit()
usuario_dao = UsuarioDao(Usuario)
|
# https://github.com/jrob93/grav_cloud/blob/master/python_stuff/py_func.py
'''
Useful functions!
Notes:
- Units, generally units are SI (meters, seconds...) and angles are in radians, unless otherwise stated
'''
import numpy as numpy #Imports Python mathematical functions library
import math
#import scipy
#from scipy.optimize import minimize
#try:
# import matplotlib.pyplot as pyplot
#except:
import matplotlib
# matplotlib.use('Agg')
import matplotlib.pyplot as pyplot
import matplotlib.gridspec as gridspec
from mpl_toolkits.mplot3d import Axes3D
import rebound
import subprocess
import datetime
import os
try:
import pandas as pd
except:
print "proceed without pandas"
#-------------------------------------------------------------------------------
cos = numpy.cos
arccos = numpy.arccos
sin = numpy.sin
arcsin = numpy.arcsin
tan = numpy.tan
arctan = numpy.arctan
pi = numpy.pi
#-------------------------------------------------------------------------------
'''
p=numpy.load('o_params.npy')
#assign the parameters
G=p[0]
AU=p[1]
M_sun=p[2]
mu=p[3]
rho_E=p[4]
d_E=p[5]
rho_S=p[6]
q=p[7]
m=p[8]
V_inf=p[9]
d_min=p[10]
d_max=p[11]
imp_param=p[12]
limit=p[13]
'''
G=6.67428e-11 # m3 kg-1 s-2
M_sun=1.98855e30 # kg
AU=1.496e11 # m
R_sun=6.957e8 #m
limit=1e-12 # fractional cut off for find_E function
#a_0=30*AU
day_s=24.0*60.0*60.0 # 1 day in seconds
year_s=365.25*day_s # 1 year in seconds
pyplot_colours=pyplot.rcParams['axes.prop_cycle'].by_key()['color'] # list of matplotlib default colours
#run_params
#cwd,rp->Ntot,rp->a,rp->R_eq,rp->rho,rp->M_tot,rp->R_c,rp->OMEGA,rp->X,rp->OM_circ,rp->f,rp->dt,run_time,np,ins
#-------------------------------------------------------------------------------
def r_elliptical(a,e,theta):
'''
Function to find distance, r, at true anomaly, theta, around a keplerian orbit
Parameters
----------
a
semi major axis (m)
e
orbital eccentricity
theta
true anomaly (rad)
'''
r = (a*(1-e**2))/(1+e*cos(theta))
return r
#-------------------------------------------------------------------------------
def find_e_p(OMEGA,omega,I):
'''Function to find the normalised Lenz vector along the apsidal line, uses PSS eq11.39a'''
e_p=numpy.zeros(3)
e_p[0]=(cos(OMEGA)*cos(omega))-(cos(I)*sin(OMEGA)*sin(omega)) #x
e_p[1]=(sin(OMEGA)*cos(omega))+(cos(I)*cos(OMEGA)*sin(omega)) #y
e_p[2]=sin(I)*sin(omega) #z
return e_p
#-------------------------------------------------------------------------------
def find_e_Q(OMEGA,omega,I):
'''Function to find the normalised e_Q vector which is perp to h and e_a and
lies in the orbital plane, uses PSS eq11.39b'''
e_Q=numpy.zeros(3)
e_Q[0]=-(cos(OMEGA)*sin(omega))-(cos(I)*sin(OMEGA)*cos(omega)) #x
e_Q[1]=-(sin(OMEGA)*sin(omega))+(cos(I)*cos(OMEGA)*cos(omega)) #y
e_Q[2]=sin(I)*cos(omega) #z
return e_Q
#-------------------------------------------------------------------------------
def planet_orbit(orb,n):
'''function to find the xyz orbit data relative to the reference point'''
#orb is the 6 orbital elements: a,e,I,OM,om,f and n is number of points to plot with
a=orb[0]
e=orb[1]
I=orb[2]
OMEGA=orb[3]
omega=orb[4]
#specify theta from 0 to 2pi radians: i.e. number of points on orbit, THE TRUE ANOMALY
theta = numpy.linspace(0,2*pi, n)
#-------------------------------------------------------------------------------
r = numpy.zeros(len(theta))
for i in range(len(theta)):
r[i]=r_elliptical(a,e,theta[i])
#-------------------------------------------------------------------------------
#determine orbital basis vectors
e_p=find_e_p(OMEGA,omega,I)
e_Q=find_e_Q(OMEGA,omega,I)
#-------------------------------------------------------------------------------
#define the r(x,y,z) position array
pos=numpy.zeros((len(theta),3))
for n in range(len(pos[:,0])):
pos[n,:]=r[n]*((cos(theta[n])*e_p)+(sin(theta[n])*e_Q))#PSS eq 11.36a
pos[n,:]=pos[n,:]
return pos
#-------------------------------------------------------------------------------
def pos_vel_from_orbit(orb,mu):
'''function to find xyz and vxvyvz from the a,e,I,OMEGA,omega,f_true orbit data, where mu=G(M+m)'''
a=orb[0]
e=orb[1]
I=orb[2]
OMEGA=orb[3]
omega=orb[4]
f_true = orb[5]
n=numpy.sqrt(mu/(a**3))
eta=numpy.sqrt(1.0-(e**2))
r=r_elliptical(a,e,f_true)
#-------------------------------------------------------------------------------
#determine orbital basis vectors
e_p=find_e_p(OMEGA,omega,I)
e_Q=find_e_Q(OMEGA,omega,I)
#-------------------------------------------------------------------------------
#define the r(x,y,z) position array
pos=numpy.zeros(3)
pos=r*((cos(f_true)*e_p)+(sin(f_true)*e_Q))#PSS eq 11.36a
vel=numpy.zeros(3)
vel=(n*a/eta)*((-sin(f_true)*e_p)+((e+cos(f_true))*e_Q))
# print (n*a/eta)
# print numpy.linalg.norm(((-sin(f_true)*e_p)+((e+cos(f_true))*e_Q)))
# print 2.0*pi/n
return pos,vel
#-------------------------------------------------------------------------------
def find_E(M,e):
'''Solve Kepler's Equation.
Function to find E from M, to within a certain limit.
This is an iterative method and DIVERGES for e>0.662743
See Murray and Dermott Solar System Dynamics p.35'''
E_0=M
deltaE=2.0*limit #set deltaE to some value, greater than the limit
while deltaE>limit:
E_1=M+e*sin(E_0)
deltaE=numpy.absolute(E_1-E_0)
E_0=E_1
return E_0
#-------------------------------------------------------------------------------
def find_f_true(E,e):
'''Function to find f from E'''
f_true=2*arctan(numpy.sqrt((1+e)/(1-e))*tan(E/2))
return f_true
#-------------------------------------------------------------------------------
def orbit_by_time(orb,mu,t0,t):
'''find the position and velocity of a body on a heliocentric keplerian orbit,
as a function of time
Parameters
----------
orb
list of 5 orbital parameters [a,e,I,OMEGA,omega],f_true is found from time, t
mu
gravitational parameter = G * total mass
t0
reference time (i.e. time when f_true = 0 ?)
t
the time, from t0, at which we want to fidn the position and velocity
'''
a=orb[0]
e=orb[1]
I=orb[2]
OMEGA=orb[3]
omega=orb[4]
eta=numpy.sqrt(1.0-(e**2))
n=numpy.sqrt(mu/(a**3.0)) # mean motion
M=n*(t-t0) # mean anomaly
E=find_E(M,e) # eccentric anomaly
f_true=find_f_true(E,e) # true anomaly
r=r_elliptical(a,e,f_true)
#determine orbital basis vectors
e_p=find_e_p(OMEGA,omega,I)
e_Q=find_e_Q(OMEGA,omega,I)
pos=r*((cos(f_true)*e_p)+(sin(f_true)*e_Q))#PSS eq 11.36a
vel=(n*a/eta)*((-sin(f_true)*e_p)+((e+cos(f_true))*e_Q))
return pos,vel
#-------------------------------------------------------------------------------
def centre_of_mass(pos,m):
'''Function to find CoM of N particles from the position and mass arrays.
Note: you can also find the centre of mass velocity by passing a velocity array instead.
Parameters
----------
pos
2d numpy array, containing x,y,z position for each particle, shape (N,3)
m
1d numpy array of particle masses, length (N)
'''
xm=pos[:,0]*m
ym=pos[:,1]*m
zm=pos[:,2]*m
M=numpy.sum(m)
CoM=numpy.zeros(3)
CoM[0]=numpy.sum(xm)/M
CoM[1]=numpy.sum(ym)/M
CoM[2]=numpy.sum(zm)/M
return CoM
#-------------------------------------------------------------------------------
def N_largest(N,i,fname):
'''Function to find the N largest particles from the data array'''
t=numpy.loadtxt(fname,usecols=(0,)) # read only first column to get time
dat=numpy.loadtxt(fname,skiprows=1) # read in the data skipping the first row
t=t[0] # do some wizardry to extract the time value from the whole column
#create structured array
dat2 = numpy.core.records.fromarrays(dat.transpose(),names='x,y,z,vx,vy,vz,m,r',formats = 'f16,f16,f16,f16,f16,f16,f16,f16')
dat2=numpy.sort(dat2, order='m')
dat2=numpy.flipud(dat2)
new_dat=numpy.zeros((N,8))
for i in range(N):
new_dat[i,0]=dat2[i][6] #mass
new_dat[i,1]=dat2[i][7] #radius
for j in range(3):
new_dat[i,j+2]=dat2[i][j] #positions
new_dat[i,j+5]=dat2[i][j+3] #velocities
return new_dat,t
#-------------------------------------------------------------------------------
def N_largest2(m_lim,i,fname):
'''Function to find the N largest particles from the data array, down to a particle mass limit'''
t=numpy.loadtxt(fname,usecols=(0,)) # read only first column to get time
if len(t)<1:
print( "FILE {} IS EMPTY!".format(i))
new_dat=numpy.zeros(0)
t="skip"
return new_dat,t
dat=numpy.loadtxt(fname,skiprows=1) # read in the data skipping the first row
t=t[0] # do some wizardry to extract the time value from the whole column
#check that all particles are at least of mass m_lim?
if numpy.amax(dat[:,6])<m_lim:
print( "frame: {}, particles are too small".format(i))
new_dat=numpy.zeros(0)
t="skip"
return new_dat,t
#create structured array
dat2 = numpy.core.records.fromarrays(dat.transpose(),names='x,y,z,vx,vy,vz,m,r',formats = 'f16,f16,f16,f16,f16,f16,f16,f16')
dat2=numpy.sort(dat2, order='m')
dat2=numpy.flipud(dat2)
new_dat=numpy.zeros(8)
new_dat[0]=dat2[0][6] #mass
new_dat[1]=dat2[0][7] #radius
for j in range(3):
new_dat[j+2]=dat2[0][j] #positions
new_dat[j+5]=dat2[0][j+3] #velocities
temp=numpy.zeros(8)
for i in range(1,len(dat2)):
if dat2[i][6]<m_lim:
break
else:
temp[0]=dat2[i][6] #mass
temp[1]=dat2[i][7] #radius
for j in range(3):
temp[j+2]=dat2[i][j] #positions
temp[j+5]=dat2[i][j+3] #velocities
new_dat=numpy.vstack((new_dat,temp))
if i==1:
print( "only one large particle!")
new_dat=numpy.zeros(0)
t="skip"
return new_dat,t
return new_dat,t
#-------------------------------------------------------------------------------
def N_largest_mlim(m_ratio_lim,fname):
'''Function to find the N largest particles from the data array, down to a particle mass limit.
The mass limit is set by the minimum mass ratio of interest
returns a dataframe'''
#load from data file
arrays = [numpy.array(map(float,line.split())) for line in open(fname)] #reads in line by line
t=float(arrays[0]) #first line is time
dat=numpy.array(arrays[1:]) #rest is the 8xN particle data array
#create dataframe and sort
df_dat=pd.DataFrame(dat,columns=['x(m)','y(m)','z(m)','vx(ms^-1)','vy(ms^-1)','vz(ms^-1)','m(kg)','r(m)'])
df_dat=df_dat.sort_values('m(kg)',axis=0,ascending=False) #sort in descending order of mass
m_lim=m_ratio_lim*numpy.amax(df_dat['m(kg)'])
df_dat=df_dat[df_dat['m(kg)']>m_lim] #keep only masses greater than the mass limit
return df_dat,t
#-------------------------------------------------------------------------------
# These transformations are tested in "grav_cloud/python_stuff/coord_transform"
def rotating_to_heliocentric(r,v,a,t):
'''Function to transform from rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0)) to the heliocentric frame'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0]) #time gives location of frame
Om_vec=numpy.array([0.0,0.0,Om_k]) # define angular velocity vector
theta=Om_k*t # angle of rotationa s a function of time
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3)) # z axis rotation vector
R=a_vec+numpy.dot(rot_mat,r) # transform position
#R=r+a_vec
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
V=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r)+numpy.dot(rot_mat,v) # transform velocity
# V=v+numpy.cross(Om_vec,a_vec)#+numpy.cross(Om_vec,r) #velocity in rot frame + circular orbital velocity of rot frame + rotational velocity of rot frame
#dot_a_vec=a*Om_k*numpy.array([-sin(Om_k*t),cos(Om_k*t),0.0])
#dot_rot_mat=Om_k*numpy.array([-sin(theta),cos(theta),0,-cos(theta),-sin(theta),0,0,0,0]).reshape((3,3))
#V_check=dot_a_vec+(numpy.dot(dot_rot_mat,r))+(numpy.dot(rot_mat,v))
#V_check=dot_a_vec+numpy.cross(Om_vec,numpy.dot(rot_mat,r))
# V_check=numpy.cross(Om_vec,a_vec)+numpy.cross(Om_vec,numpy.dot(rot_mat,r))+numpy.dot(rot_mat,v)
#print "check V : {}".format((V-V_check)/numpy.linalg.norm(V))
return R,V#_check
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def heliocentric_to_rotating(R,V,a,t):
'''Function to transform from heliocentric frame to rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0))'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0])
Om_vec=numpy.array([0.0,0.0,Om_k])
theta=-Om_k*t #reverse angle
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3))
r=numpy.dot(rot_mat,R)-numpy.dot(rot_mat,a_vec)
# r=numpy.dot(rot_mat,R)-a_vec
# r=R-a_vec
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=-numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
#v=V-numpy.cross(Om_vec,a_vec)#-numpy.cross(Om_vec,r)
v=numpy.dot(rot_mat_dot,R)+numpy.dot(rot_mat,V)-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
#dot_a_vec=a*Om_k*numpy.array([-sin(Om_k*t),cos(Om_k*t),0.0])
#dot_rot_mat=Om_k*numpy.array([-sin(theta),cos(theta),0,-cos(theta),-sin(theta),0,0,0,0]).reshape((3,3))
#v_check=numpy.dot(dot_rot_mat,(R-a_vec))+numpy.dot(rot_mat,(V-dot_a_vec))
#v_check=(numpy.dot(rot_mat,V))-(numpy.dot(rot_mat,a*Om_k*numpy.array([-sin(-theta),cos(-theta),0])))-(numpy.dot(rot_mat,numpy.dot(dot_rot_mat,r)))
# v_check=-numpy.cross(Om_vec,numpy.dot(rot_mat,(R-a_vec)))+numpy.dot(rot_mat,(V-numpy.cross(Om_vec,a_vec)))
#v_check=numpy.cross(Om_vec,numpy.dot(rot_mat,(-R+a_vec)))-numpy.dot(rot_mat,(-V+numpy.cross(Om_vec,a_vec)))
#print "check v : {}".format(v-v_check/numpy.linalg.norm(v))
return r,v#_check
def rotating_to_heliocentric_array(r,v,a,t):
'''Function to transform from rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0)) to the heliocentric frame'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0]) #time gives location of frame
Om_vec=numpy.array([0.0,0.0,Om_k]) # define angular velocity vector
theta=Om_k*t # angle of rotationa s a function of time
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3)) # z axis rotation vector
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
if len(r.shape)>1:
N=len(r)
# R=numpy.zeros((N,3))
# V=numpy.zeros((N,3))
for i in range(N):
# R[i,:]=a_vec+numpy.dot(rot_mat,r[i,:]) # transform position
# V[i,:]=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r[i,:])+numpy.dot(rot_mat,v[i,:]) # transform velocity
R=a_vec+numpy.dot(rot_mat,r.T).T # transform position
V=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r.T).T+numpy.dot(rot_mat,v.T).T # transform velocity
else:
R=a_vec+numpy.dot(rot_mat,r) # transform position
V=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r)+numpy.dot(rot_mat,v) # transform velocity
return R,V
def heliocentric_to_rotating_array(R,V,a,t):
'''Function to transform from heliocentric frame to rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0))'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0])
Om_vec=numpy.array([0.0,0.0,Om_k])
theta=-Om_k*t #reverse angle
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3))
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=-numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
if len(R.shape)>1:
N=len(R)
# r=numpy.zeros((N,3))
# v=numpy.zeros((N,3))
for i in range(N):
# r[i,:]=numpy.dot(rot_mat,R[i,:])-numpy.dot(rot_mat,a_vec)
# v[i,:]=numpy.dot(rot_mat_dot,R[i,:])+numpy.dot(rot_mat,V[i,:])-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
r=numpy.dot(rot_mat,R.T).T-numpy.dot(rot_mat,a_vec)
v=numpy.dot(rot_mat_dot,R.T).T+numpy.dot(rot_mat,V.T).T-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
else:
r=numpy.dot(rot_mat,R)-numpy.dot(rot_mat,a_vec)
v=numpy.dot(rot_mat_dot,R)+numpy.dot(rot_mat,V)-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
return r,v
#-------------------------------------------------------------------------------
# Still need to test these?
def rotating_to_heliocentric_array_precalc(r,v,a,t):
'''Function to transform from rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0)) to the heliocentric frame
Here we precalculate all the vectors and matrices, then loop over all particles in the array'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0]) #time gives location of frame
Om_vec=numpy.array([0.0,0.0,Om_k]) # define angular velocity vector
theta=Om_k*t # angle of rotationa s a function of time
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3)) # z axis rotation vector
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
# Several particle case
if len(r.shape)>1:
N=len(r[:,0])
R=numpy.zeros((N,3))
V=numpy.zeros((N,3))
for i in range(N):
#print i
R[i,:]=a_vec+numpy.dot(rot_mat,r[i,:]) # transform position
V[i,:]=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r[i,:])+numpy.dot(rot_mat,v[i,:]) # transform velocity
# Single particle case
else:
R=a_vec+numpy.dot(rot_mat,r) # transform position
V=numpy.cross(Om_vec,a_vec)+numpy.dot(rot_mat_dot,r)+numpy.dot(rot_mat,v) # transform velocity
return R,V
def heliocentric_to_rotating_array_precalc(R,V,a,t):
'''Function to transform from heliocentric frame to rotating frame (circular orbit of radius a, at R(t=0)=(a,0,0))
Here we precalculate all the vectors and matrices, then loop over all particles in the array'''
Om_k=numpy.sqrt(G*(M_sun)/(a**3.0)) #angular velocity at semimajor axis a_k
a_vec=a*numpy.array([cos(Om_k*t),sin(Om_k*t),0.0])
Om_vec=numpy.array([0.0,0.0,Om_k])
theta=-Om_k*t #reverse angle
rot_mat=numpy.array([cos(theta),-sin(theta),0,sin(theta),cos(theta),0,0,0,1]).reshape((3,3))
Om_skew=numpy.array([0,-Om_k,0,Om_k,0,0,0,0,0]).reshape((3,3)) #define skew symmetric matrix for angular velocity (0,0,Om_k)
rot_mat_dot=-numpy.dot(Om_skew,rot_mat) #time derivative of the rotation matrix
# Several particle case
if len(r.shape)>1:
N=len(R[:,0])
r=numpy.zeros((N,3))
v=numpy.zeros((N,3))
for i in range(N):
#print i
r[i,:]=numpy.dot(rot_mat,R[i,:])-numpy.dot(rot_mat,a_vec)
v[i,:]=numpy.dot(rot_mat_dot,R[i,:])+numpy.dot(rot_mat,V[i,:])-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
# Single particle case
else:
r=numpy.dot(rot_mat,R)-numpy.dot(rot_mat,a_vec)
v=numpy.dot(rot_mat_dot,R)+numpy.dot(rot_mat,V)-numpy.dot(rot_mat_dot,a_vec)-numpy.dot(rot_mat,numpy.cross(Om_vec,a_vec))
return r,v
#-------------------------------------------------------------------------------
def find_orbits3(df_dat,i_o,inds,a_0,t,orb_fname,i,N):
'''Function to find the orbits relative to a particle i_o. Orbit is an array: a, e, I, OMEGA, omega, f, E_J'''
orbf = open(orb_fname, 'a') #open file to store orbits
print "open file {}".format(orb_fname)
pos=numpy.array(df_dat.loc[:,['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
m=numpy.array(df_dat.loc[:,'m(kg)'])
r=numpy.array(df_dat.loc[:,'r(m)'])
#N=len(m)
#sim = add_to_sim(dat)
#sim.G=G
oc=0
for j in range(N):
duplicate=0 #DOUBLE CHECK WHERE THIS SHOULD BE TO CATCH ALL ORBITS?
if j!=i_o:
#RELATIVE COORDINATES
R=pos[j,:]-pos[i_o,:]
V=vel[j,:]-vel[i_o,:]
#V=(vel[j,:]+(1.5*OMEGA*pos[j,0]))-(vel[i_o,:]+(1.5*OMEGA*pos[i_o,0])) #NEED TO REMOVE SHEAR??!?!?!
mu=G*(m[j]+m[i_o])
orbit,f_true=find_elements(R,V,mu)
orbit=numpy.append(orbit,f_true)
#FIND BINARY COM ORBIT FOR E_J
#rotating frame CoM position and velocity
r_com=((m[j]*pos[j,:])+(m[i_o]*pos[i_o,:]))/(m[j]+m[i_o])
v_com=((m[j]*vel[j,:])+(m[i_o]*vel[i_o,:]))/(m[j]+m[i_o])
#transform to heliocentric frame CoM position and velocity
R_com,V_com=rotating_to_heliocentric(r_com,v_com,a_0,t)
#get a from orbit, give to function below so that we use the correct heliocentric angular velocity
mu_com=G*(M_sun+m[j]+m[i_o])
orbit_com,f_true_com=find_elements(R_com,V_com,mu_com)
orbit_com=numpy.append(orbit_com,f_true_com)
#print "com orbit position, velocity and mu: ",R_com,V_com,mu_com
a_c=orbit_com[0]
E_J=jacobi_rotating(pos[i_o,:],pos[j,:],vel[i_o,:],vel[j,:],m[i_o],m[j],a_c) #IS THIS FUNCTION CORRECT?
orbit=numpy.append(orbit,E_J)
#also output m1 and m2
orbit=numpy.append(orbit,m[i_o])
orbit=numpy.append(orbit,m[j])
if (orbit[1]<1.0) and (orbit[1]>=0.0): # 0<e<1, an elliptical orbit exists
oc+=1
inds.append([i_o,j]) #store indices or orbital particles
for k in range(len(inds)):
if inds[k][0]==inds[-1][1] and inds[k][1]==inds[-1][0]:
duplicate+=1
print "duplicate orbit"
break
if duplicate<1:
#orbits.append(orbit) #Don't store duplicate orbits
'''
print "orbit: t={}\ti_o={}\ta={}".format(t,i_o,orbit[0])
orbf.write("{}\t{}\t".format(i_o,i)) #primary particle index and file number
for k in range(len(orbit)):
orbf.write("{}\t".format(orbit[k])) #orbits contains the 6 orbital elements, E_J and m1, m2
orbf.write("{}\t".format(t)) #record time
#also add the binary CoM orbit
for k in range(len(orbit_com)):
orbf.write("{}\t".format(orbit_com[k])) #orbits contains the 6 orbital elements, E_J and m1, m2
orbf.write("{}\t".format(f_true_com)) #record binary com heliocentric orbit
orbf.write("{}\t".format(j)) #record secondary particle index
orbf.write("\n")'''
orbf.write("{}\t".format(t)) #record time
orbf.write("{}\t{}\t{}\t".format(i_o,j,i)) #primary particle index and file number
for k in range(len(orbit)):
orbf.write("{}\t".format(orbit[k])) #orbits contains the 6 orbital elements, E_J and m1, m2
#also add the binary CoM orbit
for k in range(len(orbit_com)):
orbf.write("{}\t".format(orbit_com[k])) #helio orbits contains the 6 orbital elements
orbf.write("\n")
print "suitable orbit found between {} and {}".format(i_o,j)
'''print "t = {}".format(t)
print "m1 = {}, m2 = {}".format(m[j],m[i_o])
print "pos_j = {}\npos_io = {}".format(pos[j,:],pos[i_o,:])
print "vel_j = {}\nvel_io = {}".format(vel[j,:],vel[i_o,:])
print "r_com = {}\nR_com = {}\nv_com = {}\nV_com = {}".format(r_com,R_com,v_com,V_com)
print "com orbit: {}, E_J={}\n".format(orbit_com,E_J)'''
else:
print "no suitable orbits for {} and {}".format(i_o,j)
orbf.close()
return
#-------------------------------------------------------------------------------
def orbit_search(m_ratio_lim,files,run_path,dirname,t_quit,N_lim):
'''Function to search the data files for orbits, outputs to a file'''
directory=run_path+"/"+dirname
#Read in from run_params
#run_params=numpy.genfromtxt(directory+"/"+"run_params_0.txt",skip_header=1,dtype=None) #genfromtxt to avoid problems with string 'new' in run_params
run_params=numpy.genfromtxt(directory+"/"+"run_params_0.txt",dtype=None) #genfromtxt to avoid problems with string 'new' in run_params
print "run parameters:\n {}".format(run_params)
a_0=float(run_params[2])
OMEGA=float(run_params[7])
Req=float(run_params[3])
rho=float(run_params[4])
X=float(run_params[8])
Mtot=(4.0/3.0)*pi*rho*(Req**3.0)
print "a0={},OM={},Req={},X={},Mtot={}".format(a_0,OMEGA,Req,X,Mtot)
#DEFINE MLIM RELATIVE TO MTOT?
print "analyse {}".format(directory)
orb_fname = directory+"/"+dirname+'_orbits.txt'
orbf1 = open(orb_fname, 'w') #open file to store orbits
files=list(reversed(files))
for fi in files:
i=int(fi[3:10])
fi2=directory+"/"+fi
df_dat,t=N_largest_mlim(m_ratio_lim,fi2)
if len(df_dat)<=1:
print "the particles are too small or there is only one large particle"
break
if t<t_quit:
print "{} analysed out of total time {}".format(t,t_quit)
break
N=len(df_dat)
print "N={}".format(N)
if N>N_lim:
print "N = {} > N_lim = {}".format(N,N_lim)
N=N_lim
print "N={}".format(N)
i_M=0 #the central particle is taken to be the most massive particle, index 0 after sorting
print("\nrun {}, frame: {}, time: {}, number of particles: {}\n".format(dirname,fi,t,N))
inds=[]
#go through the particle list to find all orbits relative to that particle
for i_o in range(N):
print("find orbit relative to particle: {} (out of {})".format(i_o,N))
find_orbits3(df_dat,i_o,inds,a_0,t,orb_fname,i,N)
#close files
orbf1.close()
#-------------------------------------------------------------------------------
def angle_fixer180(x):
'''Function to take an angle x radians and ensure it lies between 0 and pi'''
x=angle_fixer360(x) #first ensure it lies between 0 and 2pi
if x>pi:
while x>pi:
print( 'too big')
x=(2*pi)-x
return x
#-------------------------------------------------------------------------------
def angle_fixer360(x):
'''Function to take an angle x radians and ensure it lies between 0 and 2pi'''
if x<0:
while x<0:
x+=(2*pi)
if x>(2*pi):
while x>(2*pi):
x-=(2*pi)
return x
#-------------------------------------------------------------------------------
def find_elements(R,V,mu):
'''Function to find orbital elements (a,e,I,OMEGA,omega,ftrue) from position and velocity, R in m, V in m/s, angles in radians
This is better implemented within rebound.
Solution from Astromechanics.pdf, see also Murray and Dermott SSD sec.2.8 '''
#print mu
dat=numpy.zeros(5)
#basis vectors
ex=numpy.array([1.0,0,0])
ey=numpy.array([0,1.0,0])
ez=numpy.array([0,0,1.0])
#position and velocity magnitude
r=numpy.linalg.norm(R)
v=numpy.linalg.norm(V)
#Energy and semimajor axis
e=(v**2/2)-(mu/r) #redefine this for clarity?
a=-mu/(2*e)
dat[0]=a
#angular momentum
H=numpy.cross(R,V)
h=numpy.linalg.norm(H)
#eccentricity
E=(1/mu)*(numpy.cross(V,H)-(mu*(R/r)))
e=numpy.linalg.norm(E)
dat[1]=e
#Inclination
A=numpy.dot(H,ez)/h
I=arccos(A)
I=angle_fixer180(I)
if A>0 and I>(pi/2):
I=(2*pi)-I
if A<0 and I<(pi/2):
I=(2*pi)-I
dat[2]=I #store as radians
#nodal vector
N=numpy.cross(ez,H)
n=numpy.linalg.norm(N)
#longitude of ascending node
B=numpy.dot(N,ex)/n
OMEGA=arccos(B)
OMEGA=angle_fixer360(OMEGA)
ny=numpy.dot(N,ey)
if ny>0 and OMEGA>pi:
OMEGA=(2*pi)-OMEGA
if ny<0 and OMEGA<pi:
OMEGA=(2*pi)-OMEGA
dat[3]=OMEGA
#argument of pericentre
C=numpy.dot(N,E)/(n*e)
omega=arccos(C)
omega=angle_fixer360(omega)
e_z=numpy.dot(E,ez)
if e_z>0 and omega>pi:
omega=(2*pi)-omega
if e_z<0 and omega<pi:
omega=(2*pi)-omega
dat[4]=omega
#true anomaly
D=numpy.dot(R,E)/(r*e)
f_true=arccos(D)
f_true=angle_fixer360(f_true)
RV=numpy.dot(R,V)
if RV>0 and f_true>pi:
f_true=(2*pi)-f_true
if RV<0 and f_true<pi:
f_true=(2*pi)-f_true
return dat,f_true
#-------------------------------------------------------------------------------
def jacobi_rotating(r1,r2,v1,v2,m1,m2,a_c):
'''find jacobi integral of orbit of particle 2, wrt particle 1 (Hill approxiamtion, rotating frame: Nakazawa 1988 15.6.1)
CHECK THIS'''
p_rel=r2-r1 #relative position
v_rel=v2-v1 #relative velocity
r=numpy.linalg.norm(p_rel)
OM_k=numpy.sqrt(G*(M_sun)/(a_c**3.0)) #angular velocity at CoM semimajor axis a_c
M=m1+m2
r_H=pow(M/(3.0*M_sun),1.0/3.0)*(a_c) #mutual Hill radius
A=0.5*((v_rel[0]**2.0)+(v_rel[1]**2.0)+(v_rel[2]**2.0))
B=-(1.5*((OM_k*p_rel[0])**2.0))+(0.5*((OM_k*p_rel[2])**2.0))
C=-(G*(M)/r)+(4.5*((r_H*OM_k)**2.0))
E_J=A+B+C #Jacobi Energy Integral
#print "r_H = {}, sep = {}, sep/r_H = {}, E_J = {}".format(r_H,r,r/r_H,E_J)
return E_J
#-------------------------------------------------------------------------------
def add_to_sim(dat):
'''creates a simulation and adds the particles'''
sim = rebound.Simulation()
pos=dat[:,2:5]
vel=dat[:,5:]
m=dat[:,0]
r=dat[:,1]
for i in range(len(dat[:,0])):
sim.add(m=m[i],r=r[i],x=pos[i,0],y=pos[i,1],z=pos[i,2],vx=vel[i,0],vy=vel[i,1],vz=vel[i,2])
return sim
def add_file_to_sim(sim,fname):
'''loads particles from a file as a pandas dataframe, ands adds them to an existing rebound sim'''
t,df_dat=load_dat_file(fname)
pos=numpy.array(df_dat.loc[:,['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
m=numpy.array(df_dat.loc[:,'m(kg)'])
r=numpy.array(df_dat.loc[:,'r(m)'])
N=len(m)
for i in range(N):
sim.add(m=m[i],r=r[i],x=pos[i,0],y=pos[i,1],z=pos[i,2],vx=vel[i,0],vy=vel[i,1],vz=vel[i,2])
#return sim
#-------------------------------------------------------------------------------
def find_orbits(N,i,fname2,a_0):
orbits=[]
dat,t=N_largest(N,i,fname2)
i_M=0 #largest particle
#find the closest particle to most massive particle 0
dist=[]
for l in range(N-1):
n=l+1
R=numpy.linalg.norm(dat[n,2:5]-dat[0,2:5])
dist.append(R)
i_R=numpy.argmin(dist)+1
dat_keep=numpy.vstack((dat[0,:],dat[i_R,:]))
#dat=dat_keep #use only these two particles
pos=dat[:,2:5]
vel=dat[:,5:]
m=dat[:,0]
r=dat[:,1]
print("closest particle: ",i_R)
print( "t: ",t)
print( dat)
print( 'plot ',i)
print( 'time ',t)
#find CoM
xm=pos[:,0]*m
ym=pos[:,1]*m
zm=pos[:,2]*m
M=numpy.sum(m)
CoM=numpy.zeros(3)
CoM[0]=numpy.sum(xm)/M
CoM[1]=numpy.sum(ym)/M
CoM[2]=numpy.sum(zm)/M
print( "masses: ",m)
print( "mass ratio: ",m[1]/m[0])
print( "radii: ",r)
print( "radii ratio: ",r[1]/r[0])
print( "CoM: ",CoM)
#find orbits using rebound
sim = rebound.Simulation()
'''
numpy.savetxt("reb_orb_dat.txt",dat)
with open('reb_orb_dat.txt', 'r') as op:
s = op.read()
sim.add_particles_ascii(s)
'''
for i in range(N):
sim.add(m=m[i],r=r[i],x=pos[i,0],y=pos[i,1],z=pos[i,2],vx=vel[i,0],vy=vel[i,1],vz=vel[i,2])
if i>0:
E_J=jacobi_rotating(pos[i_M,:],pos[i,:],vel[i_M,:],vel[i,:],m[i_M],m[i],a_0)
print( "E_J={}".format(E_J))
N=sim.N
sim.G=G
#print "Gravitational constant, G = ",sim.G
#print "\n"
orbs = sim.calculate_orbits(heliocentric=True)
for orbit in orbs:
if orbit.e<1.0:
print(orbit)
orbits.append([orbit.a,orbit.e,orbit.inc,orbit.Omega,orbit.omega,orbit.f])
else:
print( "not elliptical (0<e<1)")
#record orbital elements of closest particle
i_R-=1 #reindex as orbital elements do not contain particle 0
a=(orbits[i_R][0])
e=(orbits[i_R][1])
I=(orbits[i_R][2])
time=(t)
#print "elements: \n",a,time
return orbits,a,e,I,time
#-------------------------------------------------------------------------------
def find_orbits2(dat,i_o,inds,a_0):
'''Function to find the orbits relative to a particle i_o. Orbit is an array: a, e, I, OMEGA, omega, f, E_J'''
orbits=[]
orb=numpy.zeros(6)
N=len(dat[:,0])
pos=dat[:,2:5]
vel=dat[:,5:]
m=dat[:,0]
r=dat[:,1]
sim = add_to_sim(dat)
sim.G=G
oc=0
for j in range(N):
duplicate=0 #DOUBLE CHECK WHERE THIS SHOULD BE TO CATCH ALL ORBITS?
if j!=i_o:
R=pos[j,:]-pos[i_o,:]
V=vel[j,:]-vel[i_o,:]
#V=(vel[j,:]+(1.5*OMEGA*pos[j,0]))-(vel[i_o,:]+(1.5*OMEGA*pos[i_o,0])) #NEED TO REMOVE SHEAR??!?!?!
mu=G*(m[j]+m[i_o])
orbit,f_true=find_elements(R,V,mu)
orbit=numpy.append(orbit,f_true)
E_J=jacobi_rotating(pos[i_o,:],pos[j,:],vel[i_o,:],vel[j,:],m[i_o],m[j],a_0)
orbit=numpy.append(orbit,E_J)
#also output m1 and m2
orbit=numpy.append(orbit,m[i_o])
orbit=numpy.append(orbit,m[j])
if orbit[1]<1.0: #an elliptical orbit exists
#print "particle: {}, {:.11e} {:.11e} {:.11e} E_J={}".format(j,orbit[0],orbit[1],orbit[2],E_J)
oc+=1
inds.append([i_o,j]) #store indices or orbital particles
for k in range(len(inds)):
if inds[k][0]==inds[-1][1] and inds[k][1]==inds[-1][0]:
duplicate+=1
#print "DUPLICATE ORBIT: {} {}".format(i_o,j)
break
if duplicate<1:
#print "APPEND"
orbits.append(orbit) #Don't store duplicate orbits
#else:
#print "particle: {}, not elliptical".format(j)
#print "number of orbits e<1: {}".format(oc)
return orbits
#-------------------------------------------------------------------------------
def orbital_plotting(fig,runname,fname,dat_file,i,t,pos,CoM,lim,lim2,i_M,orbits):
'''Function containing the plotting routines for orbits.py (figure already open)'''
pyplot.rc('text', usetex=True)
pyplot.rc('font', family='serif')
fig.suptitle('run: {}, frame: {}, particle: {}, time: {:.2e} s'.format(runname,dat_file,i_M,t))
plot_option=3
if plot_option==0: # xy plane
#plot only the N largest particles as simple markers
print( "plot option: ",plot_option)
fig.set_size_inches(15.5, 10.5)
gs = gridspec.GridSpec(1, 2,width_ratios=[1,1],height_ratios=[1,1])
ax1 = pyplot.subplot(gs[0,0])
ax2 = pyplot.subplot(gs[0,1])
ax1.set_aspect("equal")
ax1.set_xlabel('x /m')
ax1.set_ylabel('y /m')
ax1.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
ax1.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
ax1.set_xlim(-lim,lim)
ax1.set_ylim(-lim,lim)
ax2.set_aspect("equal")
ax2.set_xlabel('x /m')
ax2.set_ylabel('y /m')
ax2.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
ax2.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
ax2.set_xlim(-lim2,lim2)
ax2.set_ylim(-lim2,lim2)
ax1.scatter(pos[:,0],pos[:,1])
ax1.scatter(pos[i_M,0],pos[i_M,1],marker='x',c='r')
ax1.scatter(CoM[0],CoM[1],marker='x',c='k')
ax2.scatter(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1])
if plot_option==2: # xy plane
#simple N particles and orbit plot. Single Panel
print( "plot option: ",plot_option)
ax1 = fig.add_subplot(111)
ax1.set_aspect("equal")
lim2=1e9
ax1.set_xlim(-lim2,lim2)
ax1.set_ylim(-lim2,lim2)
ax1.scatter(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1])
ax1.scatter(CoM[0]-pos[i_M,0],CoM[1]-pos[i_M,1],marker='x',c='k')
for l in range(len(orbits[:,0])):
pos_orb=planet_orbit(orbits[l][:],1000)
ax1.plot(pos_orb[:,0],pos_orb[:,1])
if plot_option==3: # xy plane
#simple N particles and orbit plot. Double panel
print( "plot option: ",plot_option)
fig.set_size_inches(15.5, 10.5)
gs = gridspec.GridSpec(1, 2,width_ratios=[1,1],height_ratios=[1,1])
ax1 = pyplot.subplot(gs[0,0])
ax2 = pyplot.subplot(gs[0,1])
ax1.set_aspect("equal")
ax2.set_aspect("equal")
ax1.set_xlabel('x /m')
ax1.set_ylabel('y /m')
ax1.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
ax1.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
ax2.set_xlabel('x /m')
ax2.set_ylabel('y /m')
ax2.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
ax2.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
lim1=1e9
ax1.set_xlim(-lim1,lim1)
ax1.set_ylim(-lim1,lim1)
lim2=1e7
ax2.set_xlim(-lim2,lim2)
ax2.set_ylim(-lim2,lim2)
ax1.scatter(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1])
ax2.scatter(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1])
for l in range(len(orbits[:])):
pos_orb=planet_orbit(orbits[l],1000)
ax1.plot(pos_orb[:,0],pos_orb[:,1])
ax2.plot(pos_orb[:,0],pos_orb[:,1])
#save the figure
picname="./"+fname+str(plot_option)+"/"+fname+str(plot_option)+"min_%07d.png"%i
pyplot.savefig(picname, bbox_inches='tight')
#-------------------------------------------------------------------------------
def plot_elements(a,e,I,time):
'''Fucntion to plot orbital elements with time'''
fig = pyplot.figure()
gs = gridspec.GridSpec(1, 3)
ax1 = pyplot.subplot(gs[0,0])
ax2 = pyplot.subplot(gs[0,1])
ax3 = pyplot.subplot(gs[0,2])
ax1.set_xlabel('t /s')
ax1.set_ylabel('a /m')
ax2.set_xlabel('t /s')
ax2.set_ylabel('e')
ax3.set_xlabel('t /s')
ax3.set_ylabel('I /rad')
ax1.plot(time,a)
ax2.plot(time,e)
ax3.plot(time,I)
pyplot.show()
#-------------------------------------------------------------------------------
def orbit_plot3d(fig,pos,CoM,lim,orbits):
'''Function to plot positions and orbits in 3d'''
ax1 = fig.add_subplot(111,projection='3d')
ax1.scatter(0,0,0)
ax1.axis('equal') # set aspect ratio to equal
lim=1e7
ax1.set_xlim(-lim,lim)
ax1.set_ylim(-lim,lim)
ax1.set_zlim(-lim,lim)
ax1.scatter(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1],pos[:,2]-pos[i_M,2])
ax1.scatter(CoM[0]-pos[i_M,0],CoM[1]-pos[i_M,1],CoM[2]-pos[i_M,2],marker='x',c='k')
ax1.plot(pos[:,0]-pos[i_M,0],pos[:,1]-pos[i_M,1],pos[:,2]-pos[i_M,2])
for l in range(len(orbits[:,0])):
pos_orb=planet_orbit(orbits[l][:],1000)
ax1.plot(pos_orb[:,0],pos_orb[:,1])
#-------------------------------------------------------------------------------
def orbit_plot2d(fig,ax1,pos,orbits,i_M,i_o,lim):
'''simple N particles and orbit plot. Single Panel. i_M is the central particle, i_o is the orbit reference particle'''
#print "i_M={}, i_o={}".format(i_M,i_o)
#ax1 = fig.add_subplot(111)
ax1.set_aspect("equal")
ax1.set_xlim(-lim,lim)
ax1.set_ylim(-lim,lim)
ax1.set_xlabel('x /m')
ax1.set_ylabel('y /m')
ax1.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
ax1.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
pos_rel=pos-pos[i_M,:]
ax1.scatter(pos_rel[:,0],pos_rel[:,1]) #plot positions relative to central particle
for l in range(len(orbits)):
pos_orb=planet_orbit(orbits[l][:],1000)
pos_orb=pos_orb+pos_rel[i_o,:] #move orbits to relative to particle i_o
if i_o==i_M:
col="r"
else:
col="b"
if orbits[l][6]<0.0: #if E_J is negative
ls="-"
else:
ls=":"
ax1.plot(pos_orb[:,0],pos_orb[:,1],linestyle=ls,c=col)
#-------------------------------------------------------------------------------
def all_dat_sort():
path='../completed_runs/'
all_runs=numpy.genfromtxt(path+"all_runs_essential.txt",dtype=None,skip_header=1)
all_orbits=numpy.genfromtxt(path+"all_orbits.txt",dtype=None,skip_header=1)
for i in range(len(all_runs)):
run=all_runs[i][0]
run=int(run[:3])
all_runs[i][0]=run
inds1=[int(item[0]) for item in all_runs]
inds2=[int(item[0]) for item in all_orbits]
dat=numpy.zeros((len(all_orbits),10))
initial=[]
for i in range(len(all_runs)):
if int(all_runs[i][0]) in inds2:
j=inds2.index(int(all_runs[i][0]))
dat[j,0]=int(all_runs[i][0]) #run number
dat[j,1]=float(all_runs[i][1]) #N
dat[j,2]=all_runs[i][2] #X
dat[j,3]=all_runs[i][3] #f
dat[j,4]=all_runs[i][4] #dt
dat[j,5]=all_orbits[j][1] #m1
dat[j,6]=all_orbits[j][2] #m2m1
dat[j,7]=all_orbits[j][3] #a
dat[j,8]=all_orbits[j][4] #e
dat[j,9]=all_orbits[j][5] #I
initial.append(all_runs[i][7]) #initial
#create structured data array for sorting
dat_sort = numpy.core.records.fromarrays(dat.transpose(),names='run,N,X,f,dt,m1,m2m1,a,e,I',formats = 'i8,f8,f8,f8,f8,f8,f8,f8,f8,f8')
#print( "dat_sort:\n",dat_sort)
#print 'initial conditions',initial
dat_sort=numpy.lib.recfunctions.append_fields(dat_sort,'int',data=initial)
return all_runs,all_orbits,dat_sort
#-------------------------------------------------------------------------------
def all_dat_sort2():
#no path
all_runs=numpy.genfromtxt("all_runs_essential.txt",dtype=None,skip_header=1)
all_orbits=numpy.genfromtxt("all_orbits.txt",dtype=None,skip_header=1)
for i in range(len(all_runs)):
run=all_runs[i][0]
run=int(run[:3])
all_runs[i][0]=run
inds1=[int(item[0]) for item in all_runs]
inds2=[int(item[0]) for item in all_orbits]
dat=numpy.zeros((len(all_orbits),10))
initial=[]
for i in range(len(all_runs)):
if int(all_runs[i][0]) in inds2:
j=inds2.index(int(all_runs[i][0]))
dat[j,0]=int(all_runs[i][0]) #run number
dat[j,1]=float(all_runs[i][1]) #N
dat[j,2]=all_runs[i][2] #X
dat[j,3]=all_runs[i][3] #f
dat[j,4]=all_runs[i][4] #dt
dat[j,5]=all_orbits[j][1] #m1
dat[j,6]=all_orbits[j][2] #m2m1
dat[j,7]=all_orbits[j][3] #a
dat[j,8]=all_orbits[j][4] #e
dat[j,9]=all_orbits[j][5] #I
initial.append(all_runs[i][7]) #initial
#create structured data array for sorting
dat_sort = numpy.core.records.fromarrays(dat.transpose(),names='run,N,X,f,dt,m1,m2m1,a,e,I',formats = 'i8,f8,f8,f8,f8,f8,f8,f8,f8,f8')
#print( "dat_sort:\n",dat_sort)
#print 'initial conditions',initial
dat_sort=numpy.lib.recfunctions.append_fields(dat_sort,'int',data=initial)
return all_runs,all_orbits,dat_sort
#-------------------------------------------------------------------------------
def hist_dist(x,n_bins):
'''Function to find the arrays containing histogram data
CHECK LENGTHS
Parameters
----------
x
1d array of the data set
n_bins
number of bins
Returns
-------
center
array of the center positions of the bins, length=n_bins
hist
the number of points in each bin, length=n_bins
width
array of the bin widths, length=n_bins-1'''
hist,bins=numpy.histogram(x,bins=n_bins)
center = (bins[:-1] + bins[1:]) / 2
width = numpy.diff(bins)
return center, hist, width
#-------------------------------------------------------------------------------
def find_time_from_file(dirpath):
'''Function to find the most recent time stamp in a run directory'''
file_times=subprocess.check_output(['ls -alT {}/*bin'.format(dirpath)], shell=True)
file_lines=file_times.split("\n")
file_lines=file_lines[:-1]
line=file_lines[-1]
line2=line.split(" ")
fname=(line2[-1][:-3])+"txt"
day=line2[-5]
month=line2[-4]
time=line2[-3]
year=line2[-2]
ftime=datetime.datetime.strptime("{}/{}/{} {}".format(month,day,year,time),"%b/%d/%Y %H:%M:%S")
#get time from file
t=numpy.loadtxt(fname,usecols=(0,)) # read only first column to get time
dat=numpy.loadtxt(fname,skiprows=1) # read in the data skipping the first row
t=t[0]
return ftime,t_progress
#-------------------------------------------------------------------------------
def find_time_from_file_year(dirpath,year):
'''Function to find the most recent time stamp in a run directory'''
file_times=subprocess.check_output(['ls -alt {}/*bin'.format(dirpath)], shell=True)
file_lines=file_times.split("\n")
file_lines=file_lines[:-1]
line=file_lines[0]
print line
line2=line.split(" ")
print line2
fname=(line2[-1][:-3])+"txt"
day=line2[-4]
month=line2[-3]
time=line2[-2]
print day,month,time
try:
ftime=datetime.datetime.strptime("{}/{}/{} {}".format(month,day,year,time),"%b/%d/%Y %H:%M")
except:
ftime=datetime.datetime.strptime("{}/{}/{} {}".format(month,day,year,time),"%d/%b/%Y %H:%M")
#get time from file
#print fname
#print ftime.year
t=numpy.loadtxt(fname,usecols=(0,)) # read only first column to get time
dat=numpy.loadtxt(fname,skiprows=1) # read in the data skipping the first row
t=t[0]
return ftime,t
#-------------------------------------------------------------------------------
def load_collision_file(f):
'''Function to load the collisions_0.txt file as a dataframe'''
arrays = [numpy.array(map(float,line.split())) for line in open(f)] #reads in line by line
coll_dat=numpy.array(arrays) #rest is the 8xN particle data array
try: #this will load for reb_funcs.h: reb_collision_resolve_merge_out_damp_cut
df_coll=pd.DataFrame(coll_dat,columns=['t(s)','i','j','mi(kg)','mj(kg)','ri(m)','rj(m)',
'xi(m)','yi(m)','zi(m)','xj(m)','yj(m)','zj(m)','vxi(ms^-1)','vyi(ms^-1)',
'vzi(ms^-1)','vxj(ms^-1)','vyj(ms^-1)','vzj(ms^-1)','m(kg)','r(m)','x(m)','y(m)',
'z(m)','vx(ms^-1)','vy(ms^-1)','vz(ms^-1)','N_boulder'])
except: #this will load for reb_funcs.h: reb_collision_resolve_merge_out
df_coll=pd.DataFrame(coll_dat,columns=['t(s)','i','j','mi(kg)','mj(kg)',
'xi(m)','yi(m)','zi(m)','xj(m)','yj(m)','zj(m)','vxi(ms^-1)','vyi(ms^-1)',
'vzi(ms^-1)','vxj(ms^-1)','vyj(ms^-1)','vzj(ms^-1)','v_rel(ms^-1)'])
return df_coll
def load_all_collision_file(d_path):
'''
This function loads all collision files in a directory, and drops the duplicates.
This is required in restarted runs where there may be duplicate entries in the collision log.
We load all collision files in the directory "d_path", assuming that they are ordered as:
collisions_0.txt, collisions_1.txt, ...
THIS WORKS, ASSUMING THAT THE OVERLAPPING COLLISIONS ARE IDENTICAL!
Should rewrite, but with the time overlap being used instead
'''
# find all files
files=next(os.walk(d_path))[2] #retrieve the files in the run directory
files_coll = [ fi for fi in files if 'collision' in fi ]
files_coll.sort() #ensure that the files are always sorted the same?
# find all collision files and combine
# Load the all collisions, accounting for duplicates
j=0
for c_file in files_coll:
cf="{}/{}".format(d_path,c_file)
# print cf
if j==0:
try:
df_coll=load_collision_file(cf)
# print len(df_coll)
except:
# print 'no collisions'
df_coll=[]
break
else:
try:
_df_coll=load_collision_file(cf)
# print len(_df_coll)
df_coll=df_coll.append(_df_coll)
except:
# print 'empty collision file'
continue
df_coll=df_coll.drop_duplicates()
j+=1
return df_coll
#-------------------------------------------------------------------------------
def load_unusual_collision_files(f):
''' Loads file f and drops any lines that would prevent the data array being correctly made in a dataframe.
e.g. this is required for runs cloud_runs_slurm_189 and cloud_runs_slurm_194'''
columns=['t(s)','i','j','mi(kg)','mj(kg)',
'xi(m)','yi(m)','zi(m)','xj(m)','yj(m)','zj(m)','vxi(ms^-1)','vyi(ms^-1)',
'vzi(ms^-1)','vxj(ms^-1)','vyj(ms^-1)','vzj(ms^-1)','v_rel(ms^-1)']
error_lines=[] # empty array to store index of error lines
arrays = [numpy.array(map(str,line.split())) for line in open(f)] #reads in line by line as strings
coll_dat=numpy.array(arrays) # convert to array (will not work properly as element arrays are different lengths)
# print numpy.shape(coll_dat)
coll_dat2=[] # create an empty list to store the good lines
for i in range(len(coll_dat)):
if len(coll_dat[i])!=len(columns): # skip over lines that an incorrect length
error_lines.append(i)
# print "error line {}".format(i)
else:
try: # keep lines that can be successfully converted to floats (no weird numbers mashed together)
coll_dat2.append(numpy.array(coll_dat[i]).astype(float))
except: # skip over any lines with weirdly formatted numbers
error_lines.append(i)
# print "error line {}".format(i)
coll_dat=numpy.array(coll_dat2)
df_coll=pd.DataFrame(coll_dat,columns=columns)
# print len(df_coll)
return df_coll
#-------------------------------------------------------------------------------
def create_file_list(d):
'''Function to create the list of dat.txt files'''
files=next(os.walk(d))[2] #retrieve the files in the run directory
files = [ fi for fi in files if fi.endswith(".txt") and fi.startswith("dat") ] #keep only dat*.txt files
files.sort() #ensure that the files are always sorted the same?
return files
#-------------------------------------------------------------------------------
def file_list_no_restart(files):
''' take a list of files dat0000000_0.txt, dat0000001_0.txt, dat0000001_1.txt, ...
and account for an restarts
input should be sorted'''
final_files=[files[0]]
for j in range(1,len(files)):
fnum=int(files[j][3:10]) # number of current file
fnum2=int(files[j-1][3:10]) # number of previous file
if fnum!=fnum2: # if they are not equal, we can safely append
final_files.append(files[j])
else:
final_files[-1]=files[j] # otherwise we must reset the value of the last entry
return final_files
#-------------------------------------------------------------------------------
def create_dir_list(d):
'''Function to create the list of directories at location d'''
dirs=next(os.walk(d))[1]
dirs.sort() #ensure that the files are always sorted the same?
return dirs
#-------------------------------------------------------------------------------
def load_dat_file(f):
'''Load dat.txt file and return a time and a dataframe containing all particle data
Parameters
----------
f
path to data file, formatted with a time stamp on the first line, and then x,y,z,vx,vy,vz,m,r; one line per particle
'''
arrays = [numpy.array(map(float,line.split())) for line in open(f)] #reads in line by line
t=float(arrays[0]) #first line is time
dat=numpy.array(arrays[1:])
'''N=len(dat[:,0])
pos=dat[:,:3]
vel=dat[:,3:6]
m=dat[:,6]
r=dat[:,7]
return pos'''
df_dat=pd.DataFrame(dat,columns=['x(m)','y(m)','z(m)','vx(ms^-1)','vy(ms^-1)','vz(ms^-1)','m(kg)','r(m)'])
return t,df_dat
#-------------------------------------------------------------------------------
def load_orb_file(f):
'''Function to load orbits file as a dataframe'''
arrays = [numpy.array(map(float,line.split())) for line in open(f)] #reads in line by line
orb_dat=numpy.array(arrays)
try:
df_orb=pd.DataFrame(orb_dat,columns=['t(s)','i_o','j','file','a(m)','e','I(rad)',
'omega(rad)','OMEGA(rad)','f_true(rad)','E_J(J)','m1(kg)','m2(kg)','a_hel(m)',
'e_hel','I_hel(rad)','omega_hel(rad)','OMEGA_hel(rad)','f_true_hel(rad)'])
except:
df_orb=pd.DataFrame(orb_dat,columns=['t(s)','file','i','j','a(m)','e','I(rad)',
'omega(rad)','OMEGA(rad)','f_true(rad)','m1(kg)','m2(kg)'])
return df_orb
#-------------------------------------------------------------------------------
def load_run_params(file_path):
'''
Load the run_params file as a pandas dataframe
----------
file_path
path to run_params txt file
'''
params_columns=['run','run_dir','N_tot','a_orb(m)','R_eq(m)','rho(kgm-3)',
'M_tot(kg)','R_c(m)','OM_orb(s-1)','X','OM_circ(s-1)','f','dt(s)','t_run(s)',
'n_cores','initial']
df_params= pd.DataFrame([],columns=params_columns)
# params=numpy.genfromtxt(file_path,dtype=None) # this lines causes warnings
with open(file_path) as f:
content = f.readlines()
params = [x.strip() for x in content]
try:
d=int(params[0].split('/')[-1].split('_')[0]) #load a run with run number at front of dirname
except:
d=int(params[0].split('/')[-1].split('_')[-1]) #load a run with run number at end of dirname
params=numpy.insert(params,0,0) #insert placeholder for run number
df_params_add=pd.DataFrame([params],columns=params_columns)
df_params_add['run']=d
df_params=df_params.append(df_params_add,ignore_index=True)
return df_params
#-------------------------------------------------------------------------------
def file_stats(d,files,n_samp):
'''
Function to find the file stats of a particular run. Returns a dataframe
containing file number, number of particles, file size (MB) and simulation time (s)
Parameters
----------
d
path to run directory where files are kept, str
files
list of files to analyse in directory d, str
n_samp
Sampling interval to use when loading files, int
'''
f_num=[]
f_N=[]
f_s_MB=[]
f_t=[]
for i in range(0,len(files),n_samp):
f=files[i]
f_path='{}/{}'.format(d,f)
num=int(f.split('_')[0][3:])
s_b=float(os.path.getsize(f_path))
s_MB=s_b/(1024.0**2.0)
N=int(subprocess.check_output(['wc -l {}'.format(f_path)], shell=True).split()[0])-1 #subtract 1 to deal with header line
f_num.append(num)
f_N.append(N)
f_s_MB.append(s_MB)
with open(f_path, 'r') as _f:
first_line = _f.readline().strip()
t=(float(first_line))
f_t.append(t)
print '{} {} {} {}'.format(num,N,s_MB,t)
fstats=numpy.stack((numpy.array(f_num),numpy.array(f_N),numpy.array(f_s_MB),numpy.array(f_t)),axis=1)
print fstats.shape
cols=['num','N','s_MB','t(s)']
df_stats=pd.DataFrame(fstats,columns=cols)
return df_stats
#-------------------------------------------------------------------------------
def read_dir_sizes(f_size):
'''
Function to read run directory sizes, created in the main directory with:
$ du -sk ./*/ > run_dir_sizes_K.txt
Returns a dataframe of size in bytes (binary 1024 scale) and the run number
Parameters
----------
f_size
The path to the file run_dir_sizes.txt
'''
df_sizes=pd.read_csv(f_size,sep='\t',header=None,names=['size(B)','run']) #load txt file as a dataframe
for i in range(len(df_sizes)): #for each entry in dataframe
size=df_sizes.iloc[i,0]
run=df_sizes.iloc[i,1].split('/')[1].split('_')[0]
df_sizes.iloc[i,0]=size*1024 #replace the dataframe entries with the cleaned values
df_sizes.iloc[i,1]=int(run)
return df_sizes
#-------------------------------------------------------------------------------
def read_problem_file(run_path):
'''
create a dataframe of the run details by reading problem.c files. Extracts:
run, seed, R_eq(m), X, f, R_b(m)
If the problem.c file does not contain the 'reb_collision_resolve_merge_out_damp_cut' routine,
the value of R_b(m) will be NaN
Parameters
----------
run_path
The path to the directory containing each run as a sub directory
'''
jobs=next(os.walk(run_path))[1] #retrieve the files in the run directory
jobs.sort() #ensure that the files are always sorted the same?
dat,_num,_seed,_R_eq,_X,_f,_R_b,_N_tot,_rho,_t_max,_a_orb=[],[],[],[],[],[],[],[],[],[],[]
for j in jobs:
num=j[:3]
damp_check=0
for line in open('{}/{}/problem.c'.format(run_path,j)):
#test for certain strings in line, then use this to read value
if 'double X=' in line:
X=line.split('=')[-1][:-2]
if 'double _f=' in line:
f=line.split('=')[-1][:-2]
if 'rp->R_eq=' in line:
R_eq=line.split('=')[-1][:-2]
if 'char ins[64]=' in line:
seed=line.split('=')[-1][:-2][1:-1] #cut the quotation marks
if 'double r_boulder=' in line:
R_b=line.split('=')[-1][:-2]
if 'reb_collision_resolve_merge_out_damp_cut' in line: #check which collision routine is used
damp_check=1
if 'double Ntot=' in line:
N_tot=line.split('=')[-1].split(';')[0]
if 'rp->rho=' in line:
rho=line.split('=')[-1].split(';')[0]
if 'static double t_max=' in line:
t_max=line.split('=')[-1].split(';')[0]
if 'rp->a=' in line:
a_orb=line.split('=')[-1].split(';')[0]
if damp_check==0: #if the damped routine is not used we return a value of NaN for R_b
R_b='NaN'
_num.append(num)
_seed.append(seed)
_R_eq.append(float(R_eq))
_X.append(float(X))
_f.append(float(f))
_R_b.append(float(R_b))
_N_tot.append(float(N_tot))
_rho.append(float(rho))
_t_max.append(t_max)
_a_orb.append(a_orb)
# dat.append([num,seed,R_eq,X,f])
n_jobs=len(_t_max)
_M_tot=numpy.zeros(n_jobs)
for k in range(n_jobs):
# find t_max
t=_t_max[k].split('*')
tm=float(t[0])
for t in t[1:]:
tm*=float(t)
_t_max[k]=tm
# find a_orb
_a_orb[k]=float(_a_orb[k].split('*')[0])*AU
# find M_tot
_M_tot[k]=(4.0/3.0)*pi*_rho[k]*(_R_eq[k]**3.0)
#create dataframe
df_deets=pd.DataFrame({'run':pd.Series(_num, dtype='int'),
'initial':pd.Series(_seed, dtype='str'),
'R_eq(m)':pd.Series(_R_eq, dtype='float'),
'X':pd.Series(_X, dtype='float'),
'f':pd.Series(_f, dtype='float'),
'N_tot':pd.Series(_N_tot, dtype='float'),
'rho(kgm-3)':pd.Series(_rho, dtype='float'),
't_run(s)':pd.Series(_t_max, dtype='float'),
'a_orb(m)':pd.Series(_a_orb, dtype='float'),
'M_tot(kg)':pd.Series(_M_tot, dtype='float'),
'R_b(m)':pd.Series(_R_b, dtype='float')})
# df_deets=df_deets[['run','initial','R_eq(m)','X','f','N_tot','R_b(m)']] #set order of dataframe
return df_deets
#-------------------------------------------------------------------------------
def read_problem_file_cores(prob_path):
'''
Returns the number of cores as stated in the problem.c file
'''
for line in open(prob_path):
#test for certain strings in line, then use this to read value
if 'omp_set_num_threads' in line:
return line
#-------------------------------------------------------------------------------
def ang_mom_com(pos,vel,m):
'''
Calculates the angular momentum of a group of N particles, relative to centre of mass
Parameters
----------
pos
x, y, z positions
vel
vx, vy, vz velocities
m
N particle masses
'''
pos_com=centre_of_mass(pos,m)
vel_com=centre_of_mass(vel,m)
mass=numpy.column_stack((m,m,m))
L=mass*numpy.cross(pos-pos_com,vel-vel_com)
return L
#-------------------------------------------------------------------------------
def ang_mom(pos,vel,m):
'''
Calculates the angular momentum of a group of N particles, relative to origin
Parameters
----------
pos
x, y, z positions
vel
vx, vy, vz velocities
m
N particle masses
'''
mass=numpy.column_stack((m,m,m))
L=mass*numpy.cross(pos,vel)
return L
#-------------------------------------------------------------------------------
def kinetic_energy(vel,m):
'''
Calculates the kinetic energy of a group of N particles
Parameters
----------
vel
vx, vy, vz velocities
m
N particle masses
'''
if vel.ndim>1:
KE=0.5*m*(numpy.linalg.norm(vel,axis=1)**2.0)
else:
KE=0.5*m*(numpy.linalg.norm(vel)**2.0)
return KE
#-------------------------------------------------------------------------------
def GPE(pos1,pos2,m1,m2):
'''Calculates the gravitational potential energy between two particles'''
sep=numpy.linalg.norm(pos2-pos1)
GPE=-G*m1*m2/sep
return GPE
def grav_pot_energy_direct1(pos,m):
'''
Calculates the gravitational potential energy of each particle, in a group of N particles, relative to the origin
Uses for loops, so is sloooooowwwww
Parameters
----------
pos
x, y, z positions
m
N particle masses
'''
N=len(m)
PE=[]
for i in range(N):
#print "particle {}\r".format(i),
_PE=0
for j in range(N):
if j!=i:
sep=numpy.linalg.norm(pos[j,:]-pos[i,:])
_PE+=G*m[i]*m[j]/sep
PE.append(_PE)
return PE
#-------------------------------------------------------------------------------
def grav_pot_energy_direct2(pos,m):
'''
Calculates the gravitational potential energy of each particle, in a group of N particles, relative to the origin
Uses a matrix of separations for speed, works for up to 1e4 particles
Parameters
----------
pos
x, y, z positions
m
N particle masses
'''
N=len(m)
print "define sep matrix"
# if N<=1e4:
# print "big ol' matrix"
# rj_mat=numpy.tile(pos,(N,1,1))
# sep=numpy.linalg.norm(rj_mat-numpy.rot90(rj_mat,3,axes=(0,1)),axis=2) #separations of particle j to particle i (matrix: symmetric, diagonal = 0)
# elif N>1e4:
# print "big N, split things up"
# print "x component"
# rjx_mat=numpy.tile(pos[:,0],(N,1))
# sepx=rjx_mat-rjx_mat.T
# print "y component"
# rjy_mat=numpy.tile(pos[:,1],(N,1))
# sepy=rjy_mat-rjy_mat.T
# print "z component"
# rjz_mat=numpy.tile(pos[:,2],(N,1))
# sepz=rjz_mat-rjz_mat.T
# print "magnitude"
# sep=numpy.sqrt((sepx*sepx)+(sepy*sepy)+(sepz*sepz))
# else:
# print "problem with N"
rj_mat=numpy.tile(pos,(N,1,1))
sep=numpy.linalg.norm(rj_mat-numpy.rot90(rj_mat,3,axes=(0,1)),axis=2) #separations of particle j to particle i (matrix: symmetric, diagonal = 0)
print "define mass matrix"
mj=numpy.tile(m,(N,1))
mimj=mj*mj.T #matrix of all values m_j*m_i (symmetric, with diagonal values = m_j*m_j)
#create matrix of potential energy due to each particle, sum to get total PE on each particle
print "calculate PE"
PE=numpy.sum(numpy.divide(G*mimj,sep,where=sep!=0),axis=1) #note that we ignore/set equal to zero cases of division by zero, this drops diagonal terms
return PE
#-------------------------------------------------------------------------------
def grav_pot_energy_direct_matrix(pos,m):
'''
Calculates the gravitational potential energy of each particle, in a group of N particles, relative to the origin
Uses a matrix of separations for speed, works for up to 1e4 particles
Parameters
----------
pos
x, y, z positions
m
N particle masses
'''
N=len(m)
print "define sep matrix"
rj_mat=numpy.tile(pos,(N,1,1))
sep=numpy.linalg.norm(rj_mat-numpy.rot90(rj_mat,3,axes=(0,1)),axis=2) #separations of particle j to particle i (matrix: symmetric, diagonal = 0)
print "define mass matrix"
mj=numpy.tile(m,(N,1))
mimj=mj*mj.T #matrix of all values m_j*m_i (symmetric, with diagonal values = m_j*m_j)
#create matrix of potential energy due to each particle, sum to get total PE on each particle
print "calculate PE"
# PE=numpy.sum(numpy.divide(G*mimj,sep,where=sep!=0),axis=1) #note that we ignore/set equal to zero cases of division by zero, this drops diagonal terms
PE_matrix=numpy.divide(G*mimj,sep,where=sep!=0) #note that we ignore/set equal to zero cases of division by zero, this drops diagonal terms
return PE_matrix
#-------------------------------------------------------------------------------
def grav_force_direct_matrix(pos,m):
'''
Calculates the gravitational force of each particle, in a group of N particles, due to each other particle
Uses a matrix of separations for speed, works for up to 1e4 particles
Parameters
----------
pos
x, y, z positions
m
N particle masses
'''
N=len(m)
# print "define mass matrix"
mj=numpy.tile(m,(N,1))
mimj=mj*mj.T #matrix of all values m_j*m_i (symmetric, with diagonal values = m_j*m_j)
# print "define sep matrix"
rj_mat=numpy.tile(pos,(N,1,1))
pos_ij=rj_mat-numpy.rot90(rj_mat,3,axes=(0,1)) #vectos of particle j to particle i: 3d matrix
sep_ij=numpy.linalg.norm(pos_ij,axis=2) #separations of particle j to particle i (matrix: symmetric, diagonal = 0)
Force_matrix=numpy.dot(numpy.divide(-G*mimj,sep_ij**3,where=sep_ij!=0),pos_ij)
return Force_matrix,mimj,pos_ij,sep_ij
#-------------------------------------------------------------------------------
def N_body_sphere(N,M,R,rho,X,fname):
'''
Creates a file containing time on the first line, then particle properties (x,y,z,vx,vy,vz,m,r) for N particles in a uniform sphercial distribution
Parameters
----------
N
number of particles
M
total cloud mass
R
cloud radius
rho
particle material density
X
factor of circular rotation of cloud about the z axis
fname
name of file to save properties in
'''
t=0.0
m = M/N
r = (3.0*m/(4.0*pi*rho))**(1.0/3.0)
OM_circ=X*numpy.sqrt(G*M/(R**3))
f=open(fname,'w')
f.write('{:.18e}\n'.format(t))
for i in range(N):
#evenly distribute the particles in the rotating frame of radius r2
x1 = numpy.random.rand()
x2 = numpy.random.rand()
x3 = numpy.random.rand()
z = R*(1.0-(2.0*x2))
x = numpy.sqrt((R*R)-(z*z))*cos(2.0*pi*x3)
y = numpy.sqrt((R*R)-(z*z))*sin(2*pi*x3)
pos=numpy.array([x,y,z])
Om_circ=numpy.array([0,0,OM_circ]) #solid body rotation
vel=numpy.cross(Om_circ,pos)
vx = vel[0]
vy = vel[1]
vz = vel[2]
f.write("{:.18e}\t{:.18e}\t{:.18e}\t{:.18e}\t{:.18e}\t{:.18e}\t{:.18e}\t{:.18e}\n".format(x,y,z,vx,vy,vz,m,r))
f.close()
return
#-------------------------------------------------------------------------------
def calc_particle_energies(fname):
'''
Returns an array of total energy of each particle. Total energy is the sum of kinetic energy and potential energy of a particle (due to all other particles)
Parameters
----------
fname
txt file containing particle data to load
'''
sim = rebound.Simulation() #create rebound sim
sim.G=G #set grav constant
add_file_to_sim(sim,fname) #load particles from file to simualtion
sim.calculate_energy_out() #generate a txt file 'energy.txt' containing KE and PE of each particle
E=numpy.genfromtxt('energy.txt') #load energy.txt
KE=(E[:,0])
PE=(E[:,1])
E_tot = KE+PE #find total energy of each particle. E<0 is bound
return E_tot
def bound_ang_mom(fname):
'''
Returns the angular momentum vector of the cloud, relative to the origin, which is the sum of angular momentum of only the bound particles in a system
Also returns the number of bound particles, and the simulation timestamp of the data file
Parameters
----------
fname
txt file containing particle data to load
'''
# find bound particles by calculating the sum of KE and PE for each particle
t,df_dat=load_dat_file(fname)
# N=len(df_dat)
# print N
df_dat['E(J)']=numpy.nan
df_dat['E(J)']=calc_particle_energies(fname)
#calculate angular momentum, only for bound particles
df_dat=df_dat[df_dat['E(J)']<0]
pos=numpy.array(df_dat[['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat[['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
m=numpy.array(df_dat['m(kg)'])
N=len(df_dat)
L=numpy.sum(ang_mom(pos,vel,m),axis=0)
# print "number of bound particles = {}".format(N)
# print "angular momentum of cloud = {}".format(L)
# print sum(L)
return L,N,t
def bound_ang_mom2(fname,a):
# find bound particles by calculating the sum of KE and PE for each particle
t,df_dat=load_dat_file(fname)
df_dat['E(J)']=numpy.nan
df_dat['E(J)']=calc_particle_energies(fname)
df_dat=df_dat[df_dat['E(J)']<0] #only bound particles
# Transform to fixed reference frame
pos=numpy.array(df_dat[['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat[['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
m=numpy.array(df_dat['m(kg)'])
N=len(df_dat)
pos,vel=rotating_to_heliocentric_array_precalc(pos,vel,a,t)
# Transform to CoM
pos=centre_of_mass(pos,m)
vel=centre_of_mass(vel,m)
# find angular momentum
L=numpy.sum(ang_mom(pos,vel,m),axis=0)
return L,N,t
def bound_ang_mom3(df_dat,a,t):
# find bound particles by calculating the sum of KE and PE for each particle
# t,df_dat=load_dat_file(fname)
# df_dat['E(J)']=numpy.nan
# df_dat['E(J)']=calc_particle_energies(fname)
# df_dat=df_dat[df_dat['E(J)']<0] #only bound particles
# Transform to fixed reference frame
pos=numpy.array(df_dat[['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat[['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
m=numpy.array(df_dat['m(kg)'])
N=len(df_dat)
pos,vel=rotating_to_heliocentric_array_precalc(pos,vel,a,t)
# Transform to CoM
pos=centre_of_mass(pos,m)
vel=centre_of_mass(vel,m)
# find angular momentum
L=numpy.sum(ang_mom(pos,vel,m),axis=0)
return L,N,t
def line_fit(x,m,c):
y=(m*x)+c
return y
def find_multiple_system_mass(df_orb):
'''
Returns the total mass of all components that make up a binary/multiple system
Parameters
----------
df_orb
dataframe containing the orbital data (at a single timestep?)
'''
# if len(numpy.unique(df_orb['t(s)']))!=1:
# print 'error, only pass orbits for a single time?'
# M_tot=-1
# return M_tot
for k in range(len(df_orb)):
orb=df_orb.iloc[k]
primary_int=int(orb['i'])
secondary_int=int(orb['j'])
m2=float(orb['m2(kg)'])
m1=float(orb['m1(kg)'])
# Always add the first orbit, and initialise the lists
if k==0:
primary_list=[primary_int]
secondary_list=[secondary_int]
M_tot=m1+m2
continue
# Check if subsequent orbits are around a previous primary
if (primary_int in primary_list): #the main binary orbits
primary_list.append(primary_int)
secondary_list.append(secondary_int)
M_tot+=(m2)
elif (primary_int in secondary_list): # this primary was a secondary in a previous orbit
primary_list.append(primary_int)
secondary_list.append(secondary_int)
M_tot+=(m2)
else: # this binary is unconnected to the system
continue
return M_tot
#-------------------------------------------------------------------------------
def find_bin_num(x,bins):
'''
This function finds what bin number (described by array bins) a value, from the array x, resides in
Parameters
----------
x
an array of values to be tested
bins
the bins to sort values of x into (must be an ordered array, whose bounds contain all x values)
Returns
-------
x_bin_num
an array of ints for the bin number for each value of x: first bin = 1, second bin = 2...
'''
x_bin_num=[]
for j in range(len(x)):
_x=x[j]
bin_num=-1 # variable to detect problem values
for k in range(1,len(bins)):
if k==len(bins)-1: # special case for the point that defines the last bin
if _x>bins[k-1] and _x<=bins[k]:
x_bin_num.append(k)
bin_num=k
break
else:
if _x>=bins[k-1] and _x<bins[k]:
x_bin_num.append(k)
bin_num=k
break
if bin_num==-1:
x_bin_num.append('NaN')
print 'ERROR'
return x_bin_num
#-------------------------------------------------------------------------------
def plot_binary_system(df_orb):
'''
takes the orbits dataframe with all the data on run, orbits, and particles
'''
fig = pyplot.figure()
fig.set_size_inches(10, 10)
ax1 = fig.add_subplot(111,projection='3d')
ax1.set_aspect("equal")
ax1.set_xlabel('x(m)')
ax1.set_ylabel('y(m)')
ax1.set_zlabel('z(m)')
colours=pyplot.rcParams['axes.prop_cycle'].by_key()['color']
for k in range(len(df_orb)):
orb=df_orb.iloc[k]
primary_int=int(orb['i'])
secondary_int=int(orb['j'])
# M_tot=float(df_rp['M_tot(kg)'].iloc[0])
m2=float(orb['m2(kg)'])
m1=float(orb['m1(kg)'])
# particle positions
pos=numpy.array(orb[['x1(m)','y1(m)','z1(m)']])
pos2=numpy.array(orb[['x2(m)','y2(m)','z2(m)']])
# Helio and relative (to rotating frame origin) transform!
t=numpy.unique(numpy.array(df_orb['t(s)']).astype(float))[0] #simualtion time
a_orb=numpy.unique(numpy.array(df_orb['a_orb(m)']).astype(float))[0] #rotating frame radial distance
vel=numpy.zeros(3)
pos0=numpy.zeros(3)
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
pos0,vel=rotating_to_heliocentric_array(pos0,vel,a_orb,t)
pos,vel=rotating_to_heliocentric_array(pos,vel,a_orb,t)
pos2,vel=rotating_to_heliocentric_array(pos2,vel,a_orb,t)
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
pos=pos-pos0
pos2=pos2-pos0
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
# Find orbit
_orb=numpy.array(orb[['a(m)','e','I(rad)','OMEGA(rad)','omega(rad)']])
pos_orb=planet_orbit(_orb,1000)
pos_orb=pos_orb+pos
# if k==0:
# ax1.scatter(pos[0],pos[1],pos[2],color='k',label='m1={:.2e}kg'.format(m1))
# else:
# ax1.scatter(pos[0],pos[1],pos[2],color=colours[k],label='m1={:.2e}kg'.format(m1))
colour_k=k
while colour_k>8:
colour_k-=8
print "colour k = {}".format(colour_k)
ax1.scatter(pos[0],pos[1],pos[2],color=colours[colour_k],label='m1={:.2e}kg'.format(m1))
ax1.scatter(pos[0],pos[1],pos[2],color='k',marker='x',label='m1={:.2e}kg'.format(m1))
ax1.plot(pos_orb[:,0],pos_orb[:,1],pos_orb[:,2],color=colours[colour_k+1])
ax1.scatter(pos2[0],pos2[1],pos2[2],color=colours[colour_k+1],label='m2={:.2e}kg'.format(m2))
pyplot.show()
#-------------------------------------------------------------------------------
def plot_binary_system_2d(df_orb):
'''
takes the orbits dataframe with all the data on run, orbits, and particles
NOW IN 2D!!!
'''
fig = pyplot.figure()
gs = gridspec.GridSpec(1,2)
ax1 = pyplot.subplot(gs[0,0])
ax2 = pyplot.subplot(gs[0,1])
ax1.set_aspect("equal")
ax2.set_aspect("equal")
ax1.set_xlabel('x(m)')
ax1.set_ylabel('y(m)')
ax2.set_xlabel('x(m)')
ax2.set_ylabel('z(m)')
colours=pyplot.rcParams['axes.prop_cycle'].by_key()['color']
for k in range(len(df_orb)):
orb=df_orb.iloc[k]
primary_int=int(orb['i'])
secondary_int=int(orb['j'])
# M_tot=float(df_rp['M_tot(kg)'].iloc[0])
m2=float(orb['m2(kg)'])
m1=float(orb['m1(kg)'])
# particle positions
pos=numpy.array(orb[['x1(m)','y1(m)','z1(m)']])
pos2=numpy.array(orb[['x2(m)','y2(m)','z2(m)']])
# Helio and relative (to rotating frame origin) transform!
t=numpy.unique(numpy.array(df_orb['t(s)']).astype(float))[0] #simualtion time
a_orb=numpy.unique(numpy.array(df_orb['a_orb(m)']).astype(float))[0] #rotating frame radial distance
vel=numpy.zeros(3)
pos0=numpy.zeros(3)
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
pos0,vel=rotating_to_heliocentric_array(pos0,vel,a_orb,t)
pos,vel=rotating_to_heliocentric_array(pos,vel,a_orb,t)
pos2,vel=rotating_to_heliocentric_array(pos2,vel,a_orb,t)
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
pos=pos-pos0
pos2=pos2-pos0
# print pos0,pos,numpy.linalg.norm(pos),pos2,numpy.linalg.norm(pos2)
# Find orbit
_orb=numpy.array(orb[['a(m)','e','I(rad)','OMEGA(rad)','omega(rad)','f_true(rad)']])
pos_orb=planet_orbit(_orb,1000)
pos_orb=pos_orb+pos
# if k==0:
# ax1.scatter(pos[0],pos[1],pos[2],color='k',label='m1={:.2e}kg'.format(m1))
# else:
# ax1.scatter(pos[0],pos[1],pos[2],color=colours[k],label='m1={:.2e}kg'.format(m1))
colour_k=k
while colour_k>8:
colour_k-=8
print "colour k = {}".format(colour_k)
ax1.scatter(pos[0],pos[1],color=colours[colour_k],label='m1={:.2e}kg'.format(m1))
ax1.scatter(pos[0],pos[1],color='k',marker='x',label='m1={:.2e}kg'.format(m1))
ax1.plot(pos_orb[:,0],pos_orb[:,1],color=colours[colour_k+1])
ax1.scatter(pos2[0],pos2[1],color=colours[colour_k+1],label='m2={:.2e}kg'.format(m2))
ax2.scatter(pos[0],pos[2],color=colours[colour_k],label='m1={:.2e}kg'.format(m1))
ax2.scatter(pos[0],pos[2],color='k',marker='x',label='m1={:.2e}kg'.format(m1))
ax2.plot(pos_orb[:,0],pos_orb[:,2],color=colours[colour_k+1])
ax2.scatter(pos2[0],pos2[2],color=colours[colour_k+1],label='m2={:.2e}kg'.format(m2))
# # add the particle according to the orbit
# pos_f,vel_f=pos_vel_from_orbit(_orb,G*M_sun)
# pos_f=pos_f+pos
# ax1.scatter(pos_f[0],pos_f[1],color=colours[colour_k+1],marker='x',s=50)
# ax2.scatter(pos_f[0],pos_f[2],color=colours[colour_k+1],marker='x',s=50)
# print "distance between points = {}".format(numpy.absolute(numpy.linalg.norm(pos2-pos_f)))
# highlight a particular colour
# if colour_k==2:
# ax1.scatter(pos_f[0],pos_f[1],color='k',marker='+',s=500,zorder=3)
# ax2.scatter(pos_f[0],pos_f[2],color='k',marker='+',s=500,zorder=3)
pyplot.show()
#-------------------------------------------------------------------------------
def find_nearest_value_sorted(array,value):
'''Return the nearest value in an array.
Only works on ascending, sorted arrays'''
idx = numpy.searchsorted(array, value, side="left")
if idx > 0 and (idx == len(array) or math.fabs(value - array[idx-1]) < math.fabs(value - array[idx])):
return array[idx-1]
else:
return array[idx]
#-------------------------------------------------------------------------------
def find_nearest_index_sorted(array,value):
'''Return the nearest index in an array.
Only works on ascending, sorted arrays'''
idx = numpy.searchsorted(array, value, side="left")
if idx > 0 and (idx == len(array) or math.fabs(value - array[idx-1]) < math.fabs(value - array[idx])):
return idx-1
else:
return idx
def find_nearest_index(array, value):
array = numpy.asarray(array)
idx = (numpy.abs(array - value)).argmin()
return idx
#-------------------------------------------------------------------------------
def read_dat_file_time(dat_file):
''' Read only the first line of a dat file, the time stamp '''
with open(dat_file) as f:
first_line = f.readline().strip()
return float(first_line)
def find_dat_file_time(dpath,time_check):
'''Go backward through the dat files in the directory until we find the file with time closest to time_check'''
dat_files=create_file_list(dpath) #get dat file list
# dat_files=file_list_no_restart(dat_files)
t_check=0
dat_i=-1
times=[]
inds=[]
# load files until we reach the correct time
while t_check==0:
try:
#t,df_dat=load_dat_file("{}/{}".format(dpath,dat_files[dat_i]))
t=read_dat_file_time("{}/{}".format(dpath,dat_files[dat_i])) # faster function
except:
print "something wrong with last data file"
return
if t<time_check:
times.append(t)
inds.append(dat_i)
t_check=1
else:
times.append(t)
inds.append(dat_i)
dat_i-=1
times=numpy.array(times)
inds=numpy.array(inds)
# find index closest to time_check
nearest_i=find_nearest_index(times,time_check)
# print nearest_i,times[nearest_i]/time_check
return dat_files[inds[nearest_i]]
def find_dat_bin_file_time(dpath,time_check):
'''
Go backward through the dat files in the directory until we find the file with time closest to time_check.
Finds the last bin file specifically
'''
dat_files=next(os.walk(dpath))[2] #retrieve the files in the run directory
dat_files_bin = [ fi for fi in dat_files if fi.endswith(".bin") and fi.startswith("dat") ] #keep only dat*.bin files
dat_files_bin.sort() #ensure that the files are always sorted the same?
dat_files_txt = [ fi for fi in dat_files if fi.endswith(".txt") and fi.startswith("dat") ] #keep only dat*.bin files
dat_files_txt.sort() #ensure that the files are always sorted the same?
if (len(dat_files_txt)==0) or (len(dat_files_bin)==0):
print "no files"
return 0,0
# dat_files_txt=file_list_no_restart(dat_files_txt)
# dat_files_bin=file_list_no_restart(dat_files_bin)
t_check=0
dat_i=-1
times=[]
inds=[]
# load files until we reach the correct time
while t_check==0:
try:
# t,df_dat=load_dat_file("{}/{}".format(dpath,dat_files_txt[dat_i]))
t=read_dat_file_time("{}/{}".format(dpath,dat_files_txt[dat_i]))
except:
print "{}/{}".format(dpath,dat_files_txt[dat_i])
print "something wrong with last data file"
return 0,0
if t<time_check:
times.append(t)
inds.append(dat_i)
t_check=1
else:
times.append(t)
inds.append(dat_i)
dat_i-=1
times=numpy.array(times)
inds=numpy.array(inds)
# find index closest to time_check
nearest_i=find_nearest_index(times,time_check)
# print nearest_i,times[nearest_i]/time_check
# find the corresponding bin file
f_txt=dat_files_txt[inds[nearest_i]]
print f_txt
fnum=int(f_txt[3:10])
fnum_bin=[int(f_bin[3:10]) for f_bin in dat_files_bin]
nearest_i=find_nearest_index(fnum_bin,fnum)
f_bin=dat_files_bin[nearest_i] # find the bin file with the nearest file number
#f_txt="{}.txt".format(f_bin.split('.')[0]) # find the txt file that corresponds to the bin file
return f_txt,f_bin
#-------------------------------------------------------------------------------
def orbit_func_faster(f,df_params,m_lim_1=0,m_lim_2=2,N_lim=100,coord_trans=1):
'''
Search a file for orbits, returns a list of orbits (if any)
f - file name including path
df_params - pandas dataframe containing the run parameters
m_lim_1 - minimum mass ratio of interest. NB that this is the mass ratio to the largest body, could smaller body binaries be hidden?
m_lim_2 - minimum factor that particle mass must increase from the minimum
N_lim - max number of particles to search for orbits
coord_trans=1 #set to 1 if we want to transform from rotating to helio
'''
print "search {}".format(f)
#create rebound sim once at start
sim = rebound.Simulation()
sim.G=G
orb_list=[] # list will remain empty if no orbits are found
fnum=int(f.split("/")[-1][3:10])
# retrieve from run params
m_min=float(df_params.iloc[0]['M_tot(kg)'])/float(df_params.iloc[0]['N_tot'])
# load the data file
t,df_dat=load_dat_file(f)
# Sort from highest to lowest mass and keep only the first N_lim particles
df_dat=df_dat.sort_values(by=['m(kg)'],ascending=False)
m_cut_1=numpy.amax(df_dat['m(kg)'])*m_lim_1 # Only consider pairs of particles above a certain mass ratio
m_cut_2=m_min*m_lim_2 # only consider particles that have accreted to a mass m_cut_2
df_dat=df_dat[(df_dat['m(kg)']>=m_cut_1) & (df_dat['m(kg)']>m_cut_2)]
N=len(df_dat)
if N<=1: # deal with case of no viable particles, and therefore no orbits
return []
if N>N_lim: # Have an additional cut, never search between > N_lim particles
df_dat=df_dat.iloc[:N_lim]
N=len(df_dat)
df_dat['i']=range(len(df_dat)) # add an index to rank by mass, 0 is most massive
# print f,N
#Do coordinate transform from the simulation rotating frame, to the heliocentric frame
if coord_trans==1:
print "coord transform"
r=numpy.array(df_dat.loc[:,['x(m)','y(m)','z(m)']])
v=numpy.array(df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
R,V=rotating_to_heliocentric_array(r,v,float(df_params.iloc[0]['a_orb(m)']),t)
# # OPTIONAL: transform to relative frame
# R=R-R[0,:]
# V=V-V[0,:]
df_dat.loc[:,['x(m)','y(m)','z(m)']]=R
df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']]=V
else:
print "no coord transform"
# Add particles to rebound sim
for i in df_dat['i']:
pi=df_dat.iloc[i] #primary properties
sim.add(x=pi['x(m)'],y=pi['y(m)'],z=pi['z(m)'],
vx=pi['vx(ms^-1)'],vy=pi['vy(ms^-1)'],vz=pi['vz(ms^-1)'],
m=pi['m(kg)'])
# Search for orbits
for i in df_dat['i'][:-1]: # the index means we don't search the last particle, it will have already been searched (symmetry!)
#print "search particle {} out of {}".format(i,N-1)
pi=df_dat.iloc[i] #primary properties
for j in df_dat['i']:
if i==j or j<i: # Do not search for orbit with self, or with a particle of higher mass (it's already been checked)
continue
pj=df_dat.iloc[j] # secondary properties
# use rebound to calculate the orbit, better handling for certain cases, e.g. circular orbits
orbit = sim.particles[j].calculate_orbit(sim.particles[i])
# print "number of rebound particles = {}".format(len(sim.particles))
# Only save closed orbits
if orbit.e>=0 and orbit.e<1.0:
# Save the orbit as: ['t(s)','file','i','j','a(m)','e','I(rad)','omega(rad)','OMEGA(rad)','f_true(rad)','m1(kg)','m2(kg)'] where i,j is now the actual particle index in the file
# add to array
orb_list.append([t,fnum,int(pi.name),int(pj.name),orbit.a,orbit.e,orbit.inc,orbit.omega,orbit.Omega,orbit.f,pi['m(kg)'],pj['m(kg)']])
return orb_list # returns a list of orbits
#-------------------------------------------------------------------------------
def plot_pos(f,df_rp,lim=1,size=0.1,r='.',s='.'):
'''
Function to plot the xy positions of a data file, f, given a run parameters dataframe
Coordinates are in the rotatinf reference frame
'''
#df_rp=load_run_params("run_params_0.txt")
Ntot=float(df_rp['N_tot'].iloc[0])
rho=float(df_rp['rho(kgm-3)'].iloc[0])
Req=float(df_rp['R_eq(m)'].iloc[0])
M_tot=float(df_rp['M_tot(kg)'].iloc[0])
R_c=float(df_rp['R_c(m)'].iloc[0])
lim=lim*10*R_c
mp=M_tot/Ntot #kg
# print df_rp
fig = pyplot.figure() #open figure once
gs = gridspec.GridSpec(1, 1)
ax1 = pyplot.subplot(gs[0,0])
ax1.set_aspect("equal")
ax1.set_xlim(-lim,lim)
ax1.set_ylim(-lim,lim)
ax1.set_xlabel("x(m)")
ax1.set_ylabel("y(m)")
t,df_dat=load_dat_file("{}/{}".format(r,f))
df_dat=df_dat.sort_values('m(kg)')
color=(numpy.log10(df_dat['m(kg)'])-numpy.log10(mp))/(numpy.log10(M_tot)-numpy.log10(mp))
ax1.scatter(df_dat['x(m)'],df_dat['y(m)'],c=color,vmin=0,vmax=1,s=size)
fig.suptitle('time: {:.2e} s'.format(t))
#save the figure
picname="{}/plot_pos_{}.png".format(s,int(f[3:10]))
print "save {}".format(picname)
pyplot.savefig(picname)
# pyplot.show()
pyplot.close()
#-------------------------------------------------------------------------------
def plot_pos_com(f,df_rp,lim=1,size=0.1,r='.',s='.'):
'''
Function to plot the xy positions of a data file, f, given a run parameters dataframe.
Plot is centred on the centre of mass
Coordinates are in the rotatinf reference frame
'''
#df_rp=load_run_params("run_params_0.txt")
Ntot=float(df_rp['N_tot'].iloc[0])
rho=float(df_rp['rho(kgm-3)'].iloc[0])
Req=float(df_rp['R_eq(m)'].iloc[0])
M_tot=float(df_rp['M_tot(kg)'].iloc[0])
R_c=float(df_rp['R_c(m)'].iloc[0])
lim=lim*10*R_c
mp=M_tot/Ntot #kg
fig = pyplot.figure() #open figure once
gs = gridspec.GridSpec(1, 1)
ax1 = pyplot.subplot(gs[0,0])
ax1.set_aspect("equal")
ax1.set_xlabel("x(m)")
ax1.set_ylabel("y(m)")
t,df_dat=load_dat_file("{}/{}".format(r,f))
df_dat=df_dat.sort_values('m(kg)')
color=(numpy.log10(df_dat['m(kg)'])-numpy.log10(mp))/(numpy.log10(M_tot)-numpy.log10(mp))
pos=numpy.array(df_dat[['x(m)','y(m)','z(m)']])
pos_com=centre_of_mass(pos,numpy.array(df_dat['m(kg)']))
ax1.set_xlim(pos_com[0]-lim,pos_com[0]+lim)
ax1.set_ylim(pos_com[1]-lim,pos_com[1]+lim)
ax1.scatter(pos[:,0],pos[:,1],c=color,vmin=0,vmax=1,s=size)
fig.suptitle('time: {:.2e} s'.format(t))
#save the figure
picname="{}/plot_pos_com_{}.png".format(s,int(f[3:10]))
print "save {}".format(picname)
pyplot.savefig(picname)
# pyplot.show()
pyplot.close()
#-------------------------------------------------------------------------------
def create_df_tot(dir,df_orb,df_params,dat_fname):
'''
joins together dataframes
- dir: original name of the run (without the path)
- df_orb: the orbit dataframe, should probably be a single timestep
- df_params: the run parameter dataframe for that run
- dat_fname: the path and file name of the data file
We return a subset of df_orb, that contains the particle data for only that timestamp
Should contain the following columns from the merger:
['t(s)', 'file', 'i', 'j', 'a(m)', 'e', 'I(rad)', 'omega(rad)', 'OMEGA(rad)',
'f_true(rad)', 'm1(kg)', 'm2(kg)', 'run_name', 'run', 'run_dir', 'N_tot',
'a_orb(m)', 'R_eq(m)', 'rho(kgm-3)', 'M_tot(kg)', 'R_c(m)', 'OM_orb(s-1)',
'X', 'OM_circ(s-1)', 'f', 'dt(s)', 'initial', 'x1(m)', 'y1(m)', 'z1(m)',
'vx1(ms^-1)', 'vy1(ms^-1)', 'vz1(ms^-1)', 'r1(m)', 'x2(m)', 'y2(m)',
'z2(m)', 'vx2(ms^-1)', 'vy2(ms^-1)', 'vz2(ms^-1)', 'r2(m)']
And we calculate the additional columns, that contain particle data at that timestamp:
['R_hill(m)','m2/m1','i_largest','m_largest(kg)','m_tot(kg)','n_tot']
['t_largest(s)', 'file_largest']
49 columns total.
Does not contain:
['t_run(s)','n_cores']
['n_cores_x' 't_run(s)_x' 't_run(s)_y' 'n_cores_y']
NB, you may need to use the binary_selector function after creating this df
ADD A NAN ENTRY FOR EMPTY ORBIT DATAFRAME
'''
# print df_orb
# print len(df_orb)
# print dir
# merge the orb and rp files on a new column run_name
if len(df_orb)==1:
#df_orb.loc[0]['run_name']=dir
df_orb.at[0,'run_name']=dir #use at when setting single values
else:
df_orb['run_name']=[dir]*len(df_orb)
# print df_orb
# print len(df_orb)
df_params['run_name']=dir #str(df_params.iloc[0]['run_dir']).split('/')[-1] #add a run_name column for merge
df_tot=pd.merge(df_orb,df_params,on='run_name') #merge the params and orbit data frames
df_tot=df_tot.drop(labels=['t_run(s)','n_cores'],axis=1) #drop theses columns as they cause problems
# load the data file for a particular timestamp
t,df_dat=load_dat_file(dat_fname)
fnum=int(dat_fname.split("/")[-1][3:10])
fnum_orb=numpy.unique(numpy.array(df_orb['file']))#.astype(int))
if len(fnum_orb)>1:
print "ERROR too many orbit timestamps"
return 0
if fnum_orb[0]!=fnum:
print "ERROR orbit and particle data do not match"
return 0
df_tot=df_tot[df_tot['file']==fnum].copy()
# add the particle data
if df_orb.isnull().values.any(): # there is a nan orbit, add nan particle data
# print "no orbits, nan entry"
if len(df_orb)>1:
print "multiple nan orbits?"
return 0
df_tot['x1(m)']=numpy.nan
df_tot['y1(m)']=numpy.nan
df_tot['z1(m)']=numpy.nan
df_tot['vx1(ms^-1)']=numpy.nan
df_tot['vy1(ms^-1)']=numpy.nan
df_tot['vz1(ms^-1)']=numpy.nan
df_tot['r1(m)']=numpy.nan
df_tot['x2(m)']=numpy.nan
df_tot['y2(m)']=numpy.nan
df_tot['z2(m)']=numpy.nan
df_tot['vx2(ms^-1)']=numpy.nan
df_tot['vy2(ms^-1)']=numpy.nan
df_tot['vz2(ms^-1)']=numpy.nan
df_tot['r2(m)']=numpy.nan
else: # there are orbits, add particle data
pri=numpy.array(df_tot['i']).astype(int)
sec=numpy.array(df_tot['j']).astype(int)
if len(pri)!=len(sec):
print "pair error!"
return 0
df_dat_pri=df_dat.iloc[pri]
df_dat_sec=df_dat.iloc[sec]
# Check that the masses match, error if not true
# if numpy.array_equal(numpy.array(df_tot['m1(kg)']).astype(float),numpy.array(df_dat_pri['m(kg)']).astype(float)): # not always exactly equal
if not numpy.allclose(numpy.array(df_tot['m1(kg)']).astype(float),numpy.array(df_dat_pri['m(kg)']).astype(float)):
print "particle error"
print len(numpy.array(df_tot['m1(kg)']).astype(float)),len(numpy.array(df_dat_pri['m(kg)']).astype(float))
print numpy.array(df_tot['m1(kg)']).astype(float)-numpy.array(df_dat_pri['m(kg)']).astype(float)
return 0
df_tot['x1(m)']=numpy.array(df_dat_pri['x(m)']).astype(float)
df_tot['y1(m)']=numpy.array(df_dat_pri['y(m)']).astype(float)
df_tot['z1(m)']=numpy.array(df_dat_pri['z(m)']).astype(float)
df_tot['vx1(ms^-1)']=numpy.array(df_dat_pri['vx(ms^-1)']).astype(float)
df_tot['vy1(ms^-1)']=numpy.array(df_dat_pri['vy(ms^-1)']).astype(float)
df_tot['vz1(ms^-1)']=numpy.array(df_dat_pri['vz(ms^-1)']).astype(float)
df_tot['r1(m)']=numpy.array(df_dat_pri['r(m)']).astype(float)
df_tot['x2(m)']=numpy.array(df_dat_sec['x(m)']).astype(float)
df_tot['y2(m)']=numpy.array(df_dat_sec['y(m)']).astype(float)
df_tot['z2(m)']=numpy.array(df_dat_sec['z(m)']).astype(float)
df_tot['vx2(ms^-1)']=numpy.array(df_dat_sec['vx(ms^-1)']).astype(float)
df_tot['vy2(ms^-1)']=numpy.array(df_dat_sec['vy(ms^-1)']).astype(float)
df_tot['vz2(ms^-1)']=numpy.array(df_dat_sec['vz(ms^-1)']).astype(float)
df_tot['r2(m)']=numpy.array(df_dat_sec['r(m)']).astype(float)
# print df_tot
# print list(df_tot)
# add extra variables, m2/m1 and R_hill
df_tot['m2/m1']=numpy.array(df_tot['m2(kg)']).astype(float)/numpy.array(df_tot['m1(kg)']).astype(float)
a_orb=numpy.unique(numpy.array(df_params['a_orb(m)']).astype(float))[0] #rotating frame radial distance
pos_pri=df_tot[['x1(m)','y1(m)','z1(m)']]
vel_pri=df_tot[['vx1(ms^-1)','vy1(ms^-1)','vz1(ms^-1)']]
pos_pri,vel_pri=rotating_to_heliocentric_array(pos_pri,vel_pri,a_orb,t) #find heliocentric position of primary
df_tot['R_hill(m)']= numpy.linalg.norm(pos_pri,axis=1)*((numpy.array(df_tot['m1(kg)']).astype(float)/(3.0*M_sun))**(1.0/3.0))# calculate hill radius of primary
# add extra particle data
mass=numpy.array(df_dat['m(kg)']).astype(float)
M_tot=float(df_params.iloc[0]['M_tot(kg)']) # initial cloud mass
N_tot=float(df_params.iloc[0]['N_tot']) # initial particle number
# get the particle mass details
df_dat=df_dat.sort_values(by='m(kg)',ascending=False) # sort from largest to smallest
n_tot=int(len(df_dat)) # total number of particles
part_ind=df_dat.index.values.astype(int)[0] # index of most massive particle
mass_max=numpy.amax(mass) # most massive particle
mass_tot=numpy.sum(mass) # total mass of particles in file index
df_tot['i_largest']=[part_ind]*len(df_tot)
df_tot['m_largest(kg)']=[mass_max]*len(df_tot)
df_tot['m_tot(kg)']=[mass_tot]*len(df_tot)
df_tot['n_tot']=[n_tot]*len(df_tot)
df_tot['t_largest(s)']=[t]*len(df_tot)
df_tot['file_largest']=[fnum]*len(df_tot)
return df_tot
#-------------------------------------------------------------------------------
def binary_selector(df,m_ratio_cut=0.001,R_hill_cut=0.5):
'''
Function to cut an orbits dataframe based on mass ratio and separation/R_hill
'''
df=df[df['m2/m1']>m_ratio_cut]
a_r_hill=numpy.array(df['a(m)']).astype(float)/numpy.array(df['R_hill(m)']).astype(float)
df=df[df['a(m)']<(R_hill_cut*df['R_hill(m)'])]
return df
def create_nan_df_orb(dir,dat_file):
'''
makes an orbit dataframe with nan entries
Requires the run directory name (without the preceding path) and the t=100yr dat file
'''
orb_cols=['t(s)','file','i','j','a(m)','e','I(rad)',
'omega(rad)','OMEGA(rad)','f_true(rad)','m1(kg)','m2(kg)']
df_orb=pd.DataFrame(numpy.nan,index=[0],columns=orb_cols)
t=read_dat_file_time(dat_file)
fnum=int(dat_file.split("/")[-1][3:10])
df_orb['run_name']=dir
df_orb['t(s)']=t
df_orb['file']=fnum
return df_orb
#-------------------------------------------------------------------------------
def dat_file_rot_to_helio(dat_file,df_rp,keep_list=[],load_path=".",save_path=".",update_radius=1,write_to_file=1):
''' function to convert a rotating frame dat file to a helio one'''
# get variables
a_orb=float(df_rp.iloc[0]['a_orb(m)']) # orbital distance
rho=float(df_rp.iloc[0]['rho(kgm-3)']) # material density
# load data file
print "load {}/{}".format(load_path,dat_file)
t,df_dat=load_dat_file("{}/{}".format(load_path,dat_file))
if len(keep_list)>0:
# filter the data file
df_dat=df_dat.loc[keep_list]
#do coordinate transform
pos_rot=numpy.array(df_dat.loc[:,['x(m)','y(m)','z(m)']])
vel_rot=numpy.array(df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
pos_hel,vel_hel=rotating_to_heliocentric_array(pos_rot,vel_rot,a_orb,t)
df_dat.loc[:,['x(m)','y(m)','z(m)']]=pos_hel
df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']]=vel_hel
if update_radius==1:
# Recalculate radius from mass, radius is the size of a uniform density sphere, of mass and rho
print "update radius"
df_dat.loc[:,'r(m)']=((3.0*numpy.array(df_dat.loc[:,'m(kg)']))/(4.0*numpy.pi*rho))**(1.0/3.0)
# add the sun particle
df_sun=pd.Series(data=numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,M_sun,R_sun]),index=list(df_dat))
df_dat=df_dat.append(df_sun,ignore_index=True)
df_dat=df_dat.sort_values('m(kg)',ascending=False)
# move to COM frame
m=numpy.array(df_dat.loc[:,'m(kg)'])
pos=numpy.array(df_dat.loc[:,['x(m)','y(m)','z(m)']])
vel=numpy.array(df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']])
pos_com=centre_of_mass(pos,m)
vel_com=centre_of_mass(vel,m)
df_dat.loc[:,['x(m)','y(m)','z(m)']]=pos-pos_com
df_dat.loc[:,['vx(ms^-1)','vy(ms^-1)','vz(ms^-1)']]=vel-vel_com
# print numpy.amin(numpy.array(df_dat['r(m)']))
# print t
# print df_dat
if write_to_file==1:
# write to file
dat_file_hel="{}_hel.txt".format(dat_file.split(".")[0])
print "save {}/{}_hel.txt".format(save_path,dat_file.split(".")[0])
with open("{}/{}_hel.txt".format(save_path,dat_file.split(".")[0]), 'w') as f:
# f.write("{:.18e}\n".format(t))
# df_dat.to_csv(f,header=False,index=False,sep="\t",float_format="%.18e")
# Save to a ludicrous precision
f.write("{:.64e}\n".format(t))
df_dat.to_csv(f,header=False,index=False,sep="\t",float_format="%.64e")
return t,df_dat
|
import requests
import hashlib
import random
import os
import json
import getpass
import io
import argparse
import traceback
import datetime
import typing as T
SERVER_URL = "54.85.95.128"
SERVER_PORT = 5000
VERSION = "0.1"
class MPResponse:
def __init__(self, ok: bool, message: str, payload: T.Optional[T.Any]):
self.ok = ok
self.message = message
self.payload = payload
def __str__(self):
s = io.StringIO()
if self.ok:
print("SUCCESS.", file=s)
else:
print("FAILURE.", file=s)
print("Message: ", self.message, file=s)
if self.payload is not None:
print("Payload: ", json.dumps(self.payload, indent=4), file=s)
s.seek(0)
return s.read()
def plaintext_to_password(password:str) -> str:
hasher = hashlib.md5()
hasher.update(password.encode())
return hasher.hexdigest()
def login(prompt_if_needed = True):
my_path = os.path.dirname(os.path.abspath(__file__))
cache_path = os.path.join(my_path, "credentials.json")
if os.path.isfile(cache_path):
with open(cache_path) as f:
creds = json.load(f)
return creds
elif prompt_if_needed:
kerb = input("Kerberos? - ")
pw = getpass.getpass("Miniplaces Password [Hidden]? - ")
pw = plaintext_to_password(pw)
creds = {"kerberos": kerb, "password": pw}
with open(cache_path, "w") as f:
json.dump(creds, f, indent=4)
return creds
else:
return None
def get_url(endpoint):
return f"http://{SERVER_URL}:{SERVER_PORT}/{endpoint}"
def server_get(endpoint, payload=None, attach_login=False):
url = get_url(endpoint)
if payload is None and not attach_login:
resp = requests.get(url)
else:
pl = {}
if payload is not None:
pl.update(payload)
if attach_login:
pl.update(login())
resp = requests.post(url, json=pl)
try:
body = resp.json()
return MPResponse(body['success'], body['message'], body['payload'])
except json.decoder.JSONDecodeError as err:
return MPResponse(False, f"Server Failure - Expected JSON, got: {resp.content.decode()}", None)
except Exception as err:
traceback.print_exc()
return MPResponse(False, f"Client Failure - {err}", None)
def create_user():
creds = login(prompt_if_needed=False)
assert creds is None, "Already found credentials. Delete credentials.json to log in again."
print("Creating a user. Please enter your kerb create a password.")
creds = login()
conf = plaintext_to_password(getpass.getpass())
assert conf == creds['password'], "Mismatch password. Terminating."
return server_get("create_user", attach_login=True)
def create_team():
team_name = input("Please enter a team name:\n")
creds = login()
return server_get("create_team", {"team_name": team_name}, attach_login=True)
def join_team():
team_name_or_id = input("Please enter a team ID [start with #] or name:")
if team_name_or_id.startswith("#"):
team_id = int(team_name_or_id[1:])
pl = {"team_id": team_id}
else:
team_name = team_name_or_id
pl = {"team_name": team_name}
join_code = int(input("Enter the join code: "))
pl.update({"join_code": join_code})
return server_get("join_team", pl, attach_login=True)
def my_team():
return server_get("my_team", None, attach_login=True)
def submit():
input_file = input("Please enter the path to the submission:\n")
assert os.path.isfile(input_file)
with open(input_file, "r") as f:
answers = json.load(f)
assert type(answers) == dict
assert all(map(lambda k: type(k) == str, answers.keys())), "JSON should map filenames to lists of string guesses"
assert all(map(lambda v: type(v) == list, answers.values())), "JSON should map filenames to lists of string guesses"
assert all(map(lambda v: all(map(lambda guess: type(guess) == str, v)), answers.values())), "Found a non-string in guess list"
return server_get("submit", {"answers": answers}, attach_login=True)
def tableify(data, headers, formatters=None) -> str:
assert type(data) == list, "Expected data to be a list"
assert all(map(lambda v: type(v) == list, data)), "Expected data to be a list of lists"
assert all(map(lambda v: len(v) == len(headers), data)), "Expected data to be same length as headers"
def fmt(ix, val):
if formatters is None:
return val
else:
formatter = formatters.get(headers[ix], None)
if formatter is None:
return str(val)
else:
return str(formatter(val))
max_lens = [len(hdr) for hdr in headers]
stringified = []
stringified.append(headers[:])
for each_row in data:
stringified.append([])
for (ix, each_column) in enumerate(each_row):
pretty = fmt(ix, each_column)
stringified[-1].append(pretty)
if len(pretty) > max_lens[ix]:
max_lens[ix] = len(pretty)
s = io.StringIO()
for each_row in stringified:
for (ix, each_column) in enumerate(each_row):
field_len = max_lens[ix]+3
print(f"%{field_len}s" % each_column, end="", file=s)
print("", file=s)
s.seek(0)
return s.read()
def time_formatter(timestamp):
dt = datetime.datetime.fromtimestamp(timestamp)
return dt.strftime("%m-%d %H:%M")
def view_leaderboard():
return server_get("leaderboard")
def view_my_recent_submissions():
return server_get("my_submissions", {"kind": "recent"}, attach_login=True)
def view_my_best_submissions():
return server_get("my_submissions", {"kind": "best"}, attach_login=True)
def get_server_version():
return server_get("version")
def main():
resp = get_server_version()
if resp.ok:
server_version = resp.message
if server_version != VERSION:
print(f"SERVER VERSION is {server_version}. CLIENT VERSION is {VERSION}. Please update.")
exit()
commands = {
fn.__name__.split(".")[-1]: fn for fn in
[create_user, create_team, join_team, my_team, submit, view_leaderboard, view_my_recent_submissions, view_my_best_submissions, get_server_version]
}
parser = argparse.ArgumentParser(description="Connects to Miniplaces Server")
parser.add_argument("command", choices=commands.keys(), help="Action to perform")
args = parser.parse_args()
fn_to_exec = commands[args.command]
server_return = fn_to_exec()
if server_return.ok and args.command in ['view_leaderboard', 'view_my_recent_submissions', 'view_my_best_submissions']:
to_print = tableify(
server_return.payload,
["Team Name", "Score", "Submission ID", "Timestamp", "Period"],
formatters={"Timestamp": time_formatter}
)
else:
to_print = str(server_return)
print(to_print)
if __name__ == "__main__":
main()
|
from collections import Counter
import algebra
def majority_vote(labels):
"""assumes that labels are ordered from nearest to farthest"""
vote_counts = Counter(labels)
winner, winner_count = vote_counts.most_common(1)[0]
num_winners = len([count
for count in vote_counts.values()
if count == winner_count])
if num_winners == 1:
return winner
else:
return majority_vote(labels[:-1])
def knn_classify(k, labeled_points, new_point):
"""each labeled point should be a pair (point, label)"""
by_distance = sorted(labeled_points,
key=lambda (point, _): algebra.distance(point,
new_point))
k_nearest_labels = [label for _, label in by_distance[:k]]
return majority_vote(k_nearest_labels)
|
from flaskr.blog import blog
blog.blog_bp.add_url_rule('/', view_func=blog.home, methods=('GET',))
blog.blog_bp.add_url_rule('/contact/', view_func=blog.contact, methods=('GET', 'POST',))
blog.blog_bp.add_url_rule('/search/', view_func=blog.search_post, methods=('GET',))
blog.blog_bp.add_url_rule('/author/', view_func=blog.author_details, methods=('GET',)) # GET param - author_id
blog.blog_bp.add_url_rule('/post/create/', view_func=blog.post_create, methods=('GET', 'POST'))
blog.blog_bp.add_url_rule('/post/view/', view_func=blog.post_details, methods=('GET',))
blog.blog_bp.add_url_rule('/post/update/', view_func=blog.post_update, methods=('GET', 'POST'))
blog.blog_bp.add_url_rule('/post/delete/', view_func=blog.post_delete, methods=('GET',))
blog.blog_bp.add_url_rule('/category/', view_func=blog.post_by_categories, methods=('GET',))
blog.blog_bp.add_url_rule('/me/', view_func=blog.profile, methods=('GET',))
blog.blog_bp.add_url_rule('/add-comment/', view_func=blog.add_comment, methods=('POST',))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.