blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a17bf64bdf3d746da913a8f85bd3814b5586680b | 2bc60297e1dd6ae3b80db049832c388361a30fef | /garmin_connect_login.py | 0e7fdb37a75c2ed6c96375cac2e40cf1c15d7b9c | [] | no_license | archester/garmin_connect_scraper | 69385cc10f21e88a0fdf1952f9b87cb0e39b0cf1 | 0fc61b415eca26f7abfcd330eda12e6bc36cb4d1 | refs/heads/master | 2020-05-24T08:15:04.475097 | 2017-03-14T22:17:19 | 2017-03-14T22:17:19 | 84,839,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,241 | py | """
The MIT License (MIT)
Copyright (c) 2015 Kyle Krafka
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
The code in this file is a great effort from Kyle Krafka's project:
https://github.com/kjkjava/garmin-connect-export
I copied his code and slightly modified to fit my needs.
"""
from urllib import urlencode
import urllib, urllib2, cookielib, json
cookie_jar = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
# url is a string, post is a dictionary of POST parameters
def http_req(url, post=None):
request = urllib2.Request(url)
request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2816.0 Safari/537.36') # Tell Garmin we're some supported browser.
if post:
post = urlencode(post) # Convert dictionary to POST parameter string.
response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError.
if response.getcode() != 200:
raise Exception('Bad return code (' + str(response.getcode()) + ') for: ' + url)
return response.read()
def log_in(args):
REDIRECT = "https://connect.garmin.com/post-auth/login"
BASE_URL = "http://connect.garmin.com/en-US/signin"
GAUTH = "http://connect.garmin.com/gauth/hostname"
SSO = "https://sso.garmin.com/sso"
CSS = "https://static.garmincdn.com/com.garmin.connect/ui/css/gauth-custom-v1.1-min.css"
hostname_url = http_req(GAUTH)
hostname = json.loads(hostname_url)['host']
data = {'service': REDIRECT,
'webhost': hostname,
'source': BASE_URL,
'redirectAfterAccountLoginUrl': REDIRECT,
'redirectAfterAccountCreationUrl': REDIRECT,
'gauthHost': SSO,
'locale': 'en_US',
'id': 'gauth-widget',
'cssUrl': CSS,
'clientId': 'GarminConnect',
'rememberMeShown': 'true',
'rememberMeChecked': 'false',
'createAccountShown': 'true',
'openCreateAccount': 'false',
'usernameShown': 'false',
'displayNameShown': 'false',
'consumeServiceTicket': 'false',
'initialFocus': 'true',
'embedWidget': 'false',
'generateExtraServiceTicket': 'false'}
# URLs for various services.
url_gc_login = 'https://sso.garmin.com/sso/login?' + urllib.urlencode(data)
url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?'
print("Authenticating...")
# Initially, we need to get a valid session cookie, so we pull the login page.
http_req(url_gc_login)
# Now we'll actually login.
post_data = {'username': args.user, 'password': args.password, 'embed': 'true', 'lt': 'e1s1', '_eventId': 'submit', 'displayNameRequired': 'false'} # Fields that are passed in a typical Garmin login.
http_req(url_gc_login, post_data)
try:
login_ticket = [cookie.value for cookie in cookie_jar if cookie.name == "CASTGC"][0]
except ValueError:
raise Exception("Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?")
# Chop of 'TGT-' off the beginning, prepend 'ST-0'.
login_ticket = 'ST-0' + login_ticket[4:]
http_req(url_gc_post_auth + 'ticket=' + login_ticket)
print("Success")
| [
"areliga@o2.pl"
] | areliga@o2.pl |
1004d99114b248ed344c94db0103a2832b8dcf5b | 3a4ef39655fbea6d5a59f630fd4777c8eab2a17a | /staticfiles/staticfiles/pan_kanapka/wsgi.py | 95d14b8e2a49a6d4de7764edeba712f9eb41f960 | [] | no_license | jundymek/pan-kanapka-api | 8a88c3ae8ff5be295ab4c102a2db32780f14726f | df734bef6295d3869455939382849449d589984f | refs/heads/master | 2022-12-17T06:26:42.409765 | 2020-02-12T17:11:03 | 2020-02-12T17:11:03 | 231,992,946 | 1 | 0 | null | 2022-12-08T04:26:18 | 2020-01-06T00:06:27 | JavaScript | UTF-8 | Python | false | false | 400 | py | """
WSGI config for pan_kanapka project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pan_kanapka.settings')
application = get_wsgi_application()
| [
"jundymek@gmail.com"
] | jundymek@gmail.com |
3b19190eaa60a2fbc86aa9054c4b80341945f24f | ef14c3b4634a0bdf86a93997f3f7ddd370cf9474 | /analysis.py | 8258f09cd05213d84386493d20cce63cdde6531e | [
"MIT"
] | permissive | hacongdinh/FlapPyBird-Reinforcement-Learning | c31ac5b819ead2b59c9e236b3977fea4bf48de4d | 33948adb8a7af407c6cfd03eb2a57b327195a4fb | refs/heads/master | 2023-05-03T19:53:18.471485 | 2021-05-19T17:26:52 | 2021-05-19T17:26:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | import json
import numpy as np
from typing import Dict, List
import matplotlib.pyplot as plt
def load_data(filename: str) -> dict:
"""load training results and compute max_score."""
with open(f"data/{filename}.json", "r") as f:
training_state = json.load(f)
max_reached = 0
training_state['max_scores'] = []
for i in training_state['scores']:
max_reached = max(i, max_reached)
training_state['max_scores'].append(max_reached)
return training_state
def plot_performance(agent_states: Dict[str, List], window=50, xlim=None, ylim=None, logy=False) -> None:
"""Plot the training performance."""
episodes, scores, max_scores = agent_states['episodes'], agent_states['scores'], agent_states['max_scores']
fig, ax = plt.subplots()
plt.ylabel('Score', fontsize=16)
plt.xlabel('Episode', fontsize=16)
if logy:
ax.set_yscale('log')
plt.ylabel('log(Score)', fontsize=14)
scores = [x+1 for x in scores]
max_scores = [x+1 for x in max_scores]
plt.scatter(episodes, scores, label='scores', color='b', s=3)
plt.plot(episodes, max_scores, label='max_score', color='g')
plt.plot(episodes, np.convolve(scores, np.ones((window,)) / window, mode='same'),
label='rolling_mean_score', color='orange')
if xlim:
plt.xlim(xlim)
if ylim:
plt.ylim(ylim)
ax.tick_params(axis="x", labelsize=12)
ax.tick_params(axis="y", labelsize=12)
plt.legend(loc='upper left', fontsize=14)
fig.tight_layout()
plt.show()
if __name__ == '__main__':
filename = 'validation_resume'
agent_performance = load_data(filename)
plot_performance(agent_performance, window=3, logy=True)
| [
"anthonyli358@gmail.com"
] | anthonyli358@gmail.com |
28fe05503890e1d58e8f3360c2c2d65753534bd2 | 8d6ae21b78b3b40382e21198c571a7957e055be5 | /July20/Statements/factors.py | 0711fc04d9ad0d4b19c9061f222235e998ee6070 | [] | no_license | vj-reddy/PythonBatch1 | 6c1a429e0ac57ea1db7b04af18187e84cd52f2d5 | b86a5a16b1004d1e4f855a57b019704c71425bbf | refs/heads/master | 2023-03-16T06:05:48.104363 | 2020-10-16T13:55:03 | 2020-10-16T13:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | number = int(input("Enter the number: "))
index = 2
while index <= number//2:
if number%index == 0:
print(index)
index = index+1 | [
"qtdevops@gmail.com"
] | qtdevops@gmail.com |
79482782fcb8b6667337beddd4268b5a30566b9b | d268e0c28cc213b65285d6a8dc5997495c672ab1 | /gcode/mandelbrot.py | 9eb6d5d987d53f78ccf7f08707832f20277ce319 | [] | no_license | theojulienne/compclub-stuff | cb716aa71ae69baab4357dc8abacc3e7228d4ed5 | 0d7a800107126adb3f13fa2be7c50bde11cee896 | refs/heads/master | 2016-09-06T15:30:29.874062 | 2010-01-08T16:42:25 | 2010-01-08T16:42:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | import gcode
IMG_WIDTH = 1000
IMG_HEIGHT = 1000
IMG_DEPTH = 5.0
BOTTOM_LEFT = (-2, -1.0)
TOP_RIGHT = ( 0.5, 1.0)
MAX_ITERATIONS = 256
def mandelbrot(px, py):
count = 0
widthRatio = px/float(IMG_WIDTH)
heightRatio = py/float(IMG_HEIGHT)
px = BOTTOM_LEFT[0] + widthRatio * (TOP_RIGHT[0]-BOTTOM_LEFT[0])
py = BOTTOM_LEFT[1] + heightRatio * (TOP_RIGHT[1]-BOTTOM_LEFT[1])
x, y = 0, 0
while (x**2+y**2)**0.5 < 2 and count < MAX_ITERATIONS:
x, y = (x**2 - y**2 + px), (2*x*y + py)
count += 1
depth = ((count - 1) / 255.0)*5
return depth
def createMandelbrotGCode(gcode):
previousDepth = IMG_DEPTH + 1
for y in range(IMG_HEIGHT):
for x in range(IMG_WIDTH):
currentDepth = mandelbrot(x, y)
if currentDepth > previousDepth: # if currentdepth higher than prevdepth
gcode.cutAbsolute(z=currentDepth) # then first rise, then move
gcode.cutAbsolute(x=float(x), y=float(y))
if currentDepth < previousDepth: # currentdepth lower than prevdepth
gcode.cutAbsolute(z=currentDepth) # then move first, then sink
previousDepth = currentDepth
gcode.moveAbsolute(x=0, y=y)
def main():
o = gcode.simpleGenerator(open('output.txt', 'w'))
createMandelbrotGCode(o)
main() | [
"goldy@goldy"
] | goldy@goldy |
92ab4157ebd650e6d13fedc3a76a0f3f01379d4c | 813be230bf3d9aa532c23c8250e32262323e4e80 | /arpreq.py | f217512974f254a2e7cd9ed5162041e14f2475bd | [] | no_license | Odin-SG/ArpFake | b4cc7767fec04bb684ae6fb3a8d66e65a39de83f | bb6d41d0793675b2f78cd5d76a43b7b18b6a9776 | refs/heads/master | 2023-05-08T20:02:38.342511 | 2021-06-04T13:13:27 | 2021-06-04T13:13:27 | 347,054,301 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | import socket
from struct import pack
from uuid import getnode as get_mac
def main():
dest_ip = [10, 7, 31, 99]
local_mac = [int(("%x" % get_mac())[i:i+2], 16) for i in range(0, len("%x" % get_mac()), 2)]
local_ip = [int(x) for x in socket.gethostbyname(socket.gethostname()).split('.')]
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket. htons(0x0800))
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.bind(("eth0",socket.htons(0x0800)))
ARP_FRAME = [
pack('!H', 0x0001), # HRD
pack('!H', 0x0800), # PRO
pack('!B', 0x06), # HLN
pack('!B', 0x04), # PLN
pack('!H', 0x0001), # OP
pack('!6B', *local_mac), # SHA
pack('!4B', *local_ip), # SPA
pack('!6B', *(0x00,)*6), # THA
pack('!4B', *dest_ip), # TPA
]
print(ARP_FRAME)
sock.sendto(b''.join(ARP_FRAME), ('255.255.255.255', 0))
sock.close()
if __name__ == "__main__":
main() | [
"gongn640@gmail.com"
] | gongn640@gmail.com |
9ba8b9092d007d443d6e996caf75582f8692ddf8 | 61ad9e5de786b1665d0781210ab0d30d25dc23fd | /hacker-rank/algorithm/warmup_time_conversion.py | 60d63411788e45c3142bb18e389c469f57c37934 | [] | no_license | sydul-fahim-pantha/python-practice | ed63e66a7e7ea4ad6c8055c7ab73463933f4ba89 | 185d4f9acce1922f16ac9c1377965318a7c7898b | refs/heads/master | 2021-06-03T02:30:39.743659 | 2020-06-25T19:31:05 | 2020-06-25T19:31:05 | 144,102,218 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | #!/bin/python3
import os
import sys
def timeConversion(s):
am_pm = s[len(s) - 2: len(s)]
hh = int(s[0:2])
s = s[:len(s) - 2]
print(s)
if am_pm == 'AM' and hh == 12: s = '00' + s[2:]
elif am_pm == 'PM' and hh != 12: s = str(12 + hh) + s[2:]
print(s)
return s
if __name__ == '__main__':
f = open('/home/sydul/Work/all_git_repos/personal_sydul_fahim_pantha/python-practice/hacker-rank/out.txt', 'w')
s = input()
result = timeConversion(s)
f.write(result + '\n')
f.close()
| [
"fahim.pantha@gmail.com"
] | fahim.pantha@gmail.com |
88f092084337bcd4d9073c16381898f674a18ef3 | 81bad22641705683c68ff89f19362ba202891652 | /napari/plugins/exceptions.py | e9979de0d9e5c07e6d09e0f1592bcca062d4cf1c | [
"BSD-3-Clause"
] | permissive | sofroniewn/napari | ee2a39a1a1132910db6f2a47994671e8138edb51 | beaa98efe5cf04ba659086e7a514b2ade05277af | refs/heads/main | 2023-07-12T02:46:41.185932 | 2022-09-14T21:57:15 | 2022-09-14T21:57:15 | 154,751,137 | 2 | 3 | BSD-3-Clause | 2023-07-01T10:26:45 | 2018-10-25T23:43:01 | Python | UTF-8 | Python | false | false | 1,970 | py | from napari_plugin_engine import PluginError, standard_metadata
from ..utils.translations import trans
def format_exceptions(
plugin_name: str, as_html: bool = False, color="Neutral"
):
"""Return formatted tracebacks for all exceptions raised by plugin.
Parameters
----------
plugin_name : str
The name of a plugin for which to retrieve tracebacks.
as_html : bool
Whether to return the exception string as formatted html,
defaults to False.
Returns
-------
str
A formatted string with traceback information for every exception
raised by ``plugin_name`` during this session.
"""
_plugin_errors = PluginError.get(plugin_name=plugin_name)
if not _plugin_errors:
return ''
from napari import __version__
from ..utils._tracebacks import get_tb_formatter
format_exc_info = get_tb_formatter()
_linewidth = 80
_pad = (_linewidth - len(plugin_name) - 18) // 2
msg = [
trans._(
"{pad} Errors for plugin '{plugin_name}' {pad}",
deferred=True,
pad='=' * _pad,
plugin_name=plugin_name,
),
'',
f'{"napari version": >16}: {__version__}',
]
err0 = _plugin_errors[0]
if err0.plugin:
package_meta = standard_metadata(err0.plugin)
if package_meta:
msg.extend(
[
f'{"plugin package": >16}: {package_meta["package"]}',
f'{"version": >16}: {package_meta["version"]}',
f'{"module": >16}: {err0.plugin}',
]
)
msg.append('')
for n, err in enumerate(_plugin_errors):
_pad = _linewidth - len(str(err)) - 10
msg += ['', f'ERROR #{n + 1}: {str(err)} {"-" * _pad}', '']
msg.append(format_exc_info(err.info(), as_html, color))
msg.append('=' * _linewidth)
return ("<br>" if as_html else "\n").join(msg)
| [
"noreply@github.com"
] | noreply@github.com |
3ddd3c7a6ea93275b1daa3b4ed761c163f77e27e | 04afedaa658c61f463d7f79cad2376d1b1652115 | /corehq/apps/hqcase/management/commands/ptop_fast_reindex_domains.py | 7d01394fe9ba4a08bc7bcffe20e54cd822dd7e93 | [] | no_license | comm-scriptek/commcare-hq | 1897c86a8cce7422018b8be9bdcae76a6403a28a | a818a704e2439f7c0d66b432d052db909a97064d | refs/heads/master | 2020-12-24T09:44:46.036215 | 2013-07-02T07:52:37 | 2013-07-02T07:52:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.management.commands.ptop_fast_reindexer import PtopReindexer
from corehq.pillows.domain import DomainPillow
CHUNK_SIZE = 500
POOL_SIZE = 15
class Command(PtopReindexer):
help = "Fast reindex of domain elastic index by using the domain view and reindexing domains"
doc_class = Domain
view_name = 'domain/not_snapshots'
pillow_class = DomainPillow
| [
"yedispaghetti@gmail.com"
] | yedispaghetti@gmail.com |
5f7296a2c63f51459b3ce77f09584dbef613d994 | 76b064a76ffd23b0d0dff57d266abd6a111e9247 | /Ch04 Counting Elements/MissingIntegers.py | 819800f4b1f6de69459764150ccb264152e3f8ce | [] | no_license | startFromBottom/codility_problems | 78e0e0fcd914730e0dd8f725dde3dc96be83a255 | c8e128b5768e8140e658274e7cc8fee95c1bce9a | refs/heads/master | 2022-12-05T12:38:01.595250 | 2020-08-25T11:49:44 | 2020-08-25T11:49:44 | 289,836,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | """
problem link : https://app.codility.com/programmers/lessons/4-counting_elements/missing_integer/
result : https://app.codility.com/demo/results/trainingETTG9E-G32/
"""
def solution(A):
A = sorted(list(set(A)))
# ex) A = [98, 99, 100] -> 1
if A[0] > 1:
return 1
for i in range(1, len(A)):
# ex) A = [1,2,4,5] -> 3
if A[i - 1] >= 0 and A[i] > A[i - 1] + 1:
return A[i - 1] + 1
# ex) A = [-3,-1, 3] -> 1
elif A[i - 1] <= 0 and A[i] > 1:
return 1
# ex) A = [-3, -1] -> 1
if A[-1] <= 0:
return 1
# ex) A = [1, 2, 3] -> 4
return A[-1] + 1
| [
"uhh0701@gmail.com"
] | uhh0701@gmail.com |
98c54c85fd55e667a6486116bfe6a05dc5331860 | 258f1de0a4f5aff1c266a6028bbfc9a51413e715 | /legislation/models.py | 630fd40a8ba4e656385493be383d00b95fc8478d | [] | no_license | katrinamariehh/dj_moneypolitics | f7605a111f1fc487b580a4572869c781f98dea5d | 35552f7e545cc1226b0deda0dc422da5ad8fee45 | refs/heads/main | 2022-02-18T07:09:57.025773 | 2017-01-13T20:39:10 | 2017-01-13T20:39:10 | 54,608,202 | 0 | 0 | null | 2022-01-21T19:20:19 | 2016-03-24T02:22:50 | Python | UTF-8 | Python | false | false | 2,394 | py | from django.db import models, DataError
import csv
# Create your models here.
class LegislatorManager(models.Manager):
def load_current_legislators(self, path):
with open(path) as f:
reader = csv.reader(f, delimiter=',')
f.readline()
for row in reader:
last_name, first_name, birthday, gender, type, \
state, district, party, url, address, phone, \
contact_form, rss_url, twitter, facebook, \
facebook_id, youtube, youtube_id, bioguide_id, \
thomas_id, opensecrets_id, lis_id, cspan_id, \
govtrack_id, votesmart_id, ballotpedia_id, \
washington_post_id, icpsr_id, wikipedia_id = row
legislator = Legislator(
last_name=last_name,
first_name=first_name,
leg_type=type,
state=state,
district=district,
party=party,
bioguide_id=bioguide_id,
thomas_id=thomas_id,
opensecrets_id=opensecrets_id,
lis_id=lis_id,
govtrack_id=govtrack_id
)
legislator.save()
class Legislator(models.Model):
last_name = models.CharField(max_length=30)
first_name = models.CharField(max_length=30)
leg_type = models.CharField(max_length=5)
state = models.CharField(max_length=2)
district = models.CharField(max_length=3)
party = models.CharField(max_length=30)
bioguide_id = models.CharField(max_length=15)
thomas_id = models.CharField(max_length=15)
opensecrets_id = models.CharField(max_length=15)
lis_id = models.CharField(max_length=10)
govtrack_id = models.CharField(max_length=15)
class Bill(models.Model):
congress = models.IntegerField()
bill_type = models.CharField(max_length=5)
title = models.CharField(max_length=50, null=True)
class Vote(models.Model):
bill_id = models.CharField(max_length=10)
vote_id = models.CharField(max_length=30)
legislator_id = models.CharField(max_length=15)
legislator_id_type = models.CharField(max_length=15)
vote_value = models.CharField(max_length=15)
vote_type = models.CharField(max_length=15)
bill_object = models.ForeignKey(Bill)
| [
"katrina@Katrinas-MBP.home"
] | katrina@Katrinas-MBP.home |
e3aa0c3fafee820ac2eef394c6d8e4dea8bbd241 | 83b88fd648f034780b3f2af5730035660e9866cf | /accounts/views.py | 57bed70c4ef753a892c0fd8e5a9a0bcbd070e9d4 | [] | no_license | pukovnikkostadinovic/djangoApp | c1d7aa2997ca63e5185ad0bb00aedf1e738fd772 | 4c02c6466a7953432dc5d0ee758f5b32d75494e7 | refs/heads/master | 2020-03-19T06:26:13.374884 | 2018-06-05T12:48:52 | 2018-06-05T12:48:52 | 136,019,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from django.http import HttpResponse
from django.shortcuts import render
# Create your views here.
def index(request):
numbers = [1,2,3,4,5]
name = 'Hasan Hasanovic'
args = {'name':name, 'numbers':numbers}
return render(request, 'accounts/home.html',args) | [
"pukovnikkostadinovic@gmail.com"
] | pukovnikkostadinovic@gmail.com |
6d9c457530aa6d98022e26c72bdaf60d244eb673 | fc0d9b3107d7afeeed8d9df733219aa77c4a0d10 | /models/pumped_hydro/ev.py | c54c9197a686d518259c6e02dae6314b793bd62f | [
"Apache-2.0"
] | permissive | switch-model/DEPRECATED-switch-hawaii-studies | 007931f8bb58480db81b8c59031e8d911a14567e | 685085c7e3734db75b6bf9744aa05024389e1bc1 | refs/heads/master | 2021-06-01T00:16:50.557334 | 2016-06-29T09:42:11 | 2016-06-29T09:42:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,053 | py | import os
from pyomo.environ import *
from switch_mod import timescales
def define_components(m):
m.ev_gwh_annual = Param(m.LOAD_ZONES, m.PERIODS, default=0.0)
# TODO: calculate these data better and get them from a database
# total miles traveled by vehicle fleet (assuming constant at Oahu's 2007 level from http://honolulucleancities.org/vmt-reduction/ )
total_vmt = 13142000*365
# annual vehicle miles per vehicle (HI avg from http://www.fhwa.dot.gov/ohim/onh00/onh2p11.htm)
vmt_per_vehicle = 11583
ev_vmt_per_kwh = 4.0 # from MF's LEAF experience
ice_vmt_per_mmbtu = (40.0 / 114000.0) * 1e6 # assuming 40 mpg @ 114000 Btu/gal gasoline
ice_fuel = 'ULSD' # we assume gasoline for the ICE vehicles costs the same as ULSD
# note: this is the utility price, which is actually lower than retail gasoline
# extra (non-fuel) annual cost of owning an EV vs. conventional vehicle (mostly for batteries)
ev_extra_vehicle_cost_per_year = 1000.0
m.ev_vmt_annual = Param(m.LOAD_ZONES, m.PERIODS, initialize=lambda m, z, p:
m.ev_gwh_annual[z, p] * 1e6 * ev_vmt_per_kwh
)
m.ev_count = Param(m.LOAD_ZONES, m.PERIODS, initialize=lambda m, z, p:
m.ev_vmt_annual[z, p] / vmt_per_vehicle
)
# calculate the extra annual cost (non-fuel) of all EVs, relative to ICEs
m.ev_extra_annual_cost = Param(m.PERIODS, initialize=lambda m, p:
sum(ev_extra_vehicle_cost_per_year * m.ev_count[z, p] for z in m.LOAD_ZONES)
)
# calculate total fuel cost for ICE (non-EV) VMTs
m.ice_fuel_cost = Param(m.PERIODS, initialize=lambda m, p:
sum(
(total_vmt - m.ev_vmt_annual[z, p]) / ice_vmt_per_mmbtu * m.fuel_cost[z, ice_fuel, p]
for z in m.LOAD_ZONES
)
)
# add cost components to account for the vehicle miles traveled via EV or ICE
m.cost_components_annual.append('ev_extra_annual_cost')
m.cost_components_annual.append('ice_fuel_cost')
# calculate the amount of EV energy to provide during each timeseries
# (assuming that total EV energy requirements are the same every day)
m.ev_mwh_ts = Param(m.LOAD_ZONES, m.TIMESERIES, initialize=lambda m, z, ts:
m.ev_gwh_annual[z, m.ts_period[ts]] * 1000.0 * m.ts_duration_hrs[ts] / timescales.hours_per_year
)
# decide when to provide the EV energy
m.ChargeEVs = Var(m.LOAD_ZONES, m.TIMEPOINTS, within=NonNegativeReals)
# make sure to charge all EVs
m.ChargeEVs_min = Constraint(m.LOAD_ZONES, m.TIMESERIES, rule=lambda m, z, ts:
sum(m.ChargeEVs[z, tp] for tp in m.TS_TPS[ts]) == m.ev_mwh_ts[z, ts]
)
# add the EV load to the model's energy balance
m.LZ_Energy_Components_Consume.append('ChargeEVs')
def load_inputs(m, switch_data, inputs_dir):
"""
Import ev data from a .tab file.
"""
switch_data.load_aug(
filename=os.path.join(inputs_dir, 'ev_energy.tab'),
auto_select=True,
param=(m.ev_gwh_annual))
| [
"mfripp@hawaii.edu"
] | mfripp@hawaii.edu |
ec4ab8529e4b4dd8832fa8a89bc0dd1e5ae514e9 | a3120b88d457e917d647e871d5e1e26bda69b2bc | /삼성SDS기본/8979_올림픽.py | 45c900389bfefc6c2522470317ba4ac1201943d7 | [] | no_license | MMyungji/algorithm2 | 91d5c6860f38096e9ed8bfde897760da8295e4d7 | d8c4b6dd318a8ac1b25c7ff6e24f1c5181b3a3eb | refs/heads/main | 2023-08-15T22:05:43.570267 | 2021-10-22T11:12:31 | 2021-10-22T11:12:31 | 374,083,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | num, who = map(int,input().split())
medals = [list(map(int,input().split())) for _ in range(num)]
medals.sort(key=lambda x:(x[1],x[2],x[3]), reverse=True)
grade=0
scores = [medals[0][1], medals[0][2], medals[0][3]]
same_grade=0
#print(medals)
for n,g,s,b in medals:
#print(scores, grade, same_grade)
if scores == [g,s,b]:
same_grade+=1
else:
grade+=1+same_grade
same_grade = 0
scores = [g,s,b]
if n == who:
if grade==0:
grade=1
print(grade)
break
| [
"hallomj1@gmail.com"
] | hallomj1@gmail.com |
3a7a563b2888ad99f23bf322475f3a934141adf9 | ce53a777068906866cd97d211f51c986492f4e4f | /blog/views.py | f0fcb461ebcb44709629880bae642816a2c159f8 | [] | no_license | LittelOrange/my-first-blog | f1744b8174d76cfa9b28721d17f12555bc041392 | 59851fe653e3a1f2ba063a9da088ef243ec77018 | refs/heads/master | 2020-03-31T08:22:46.517470 | 2018-10-10T06:57:41 | 2018-10-10T06:57:41 | 152,054,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | from django.shortcuts import render, get_object_or_404,redirect
from .models import Post
from django.utils import timezone
from .forms import PostForm
# Create your views here.
def post_list(request):
posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('published_date')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form}) | [
"1024351971@qq.com"
] | 1024351971@qq.com |
3c6a227d5f61bb7232ecf80295a3a3341a7bcd48 | b22931f39920db60c705ce18f8e3f32ca2e5aec8 | /naive_k2onnx/utils.py | 098a620c76cd39c6d5e9c4b752279a05e1a01980 | [
"Unlicense"
] | permissive | liob/naive_k2onnx | d939e9e5efcbafda119d8e2d9f302bc6f5766833 | ac03d5d4f6bf89d9a1939af8d30ffbd48efeeb6d | refs/heads/master | 2023-02-10T21:12:17.973208 | 2021-01-11T09:59:05 | 2021-01-11T09:59:05 | 284,772,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import onnx
def tf_name2canonical(tf_name):
return tf_name.rsplit(':', 1)[0]
def to_onnx_shapes(shapes):
output = []
for shape in shapes:
shape = np.array(shape) # bhw...c
shape = np.roll(shape, 1) # cbhw...
shape[0:2] = shape[0:2][::-1] # bchw...
output.append(shape.tolist())
return output
def to_onnx_axis(axis):
if axis == 0:
return 0
elif axis == -1:
return 1
else:
return axis + 1
def np2tensor(data, name):
return onnx.helper.make_tensor(
name=name,
data_type=onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[data.dtype],
dims=data.shape,
vals=data.flatten())
def np2constant(data, name):
tensor = np2tensor(data, f'{name}/value')
node = onnx.helper.make_node(
'Constant',
inputs=[],
outputs=[name],
value=tensor)
return node, tensor | [
"hbwinther@metalabs.de"
] | hbwinther@metalabs.de |
e9bd9123e21c45297fb5c25a99af80f250bdb59d | 352b7ddc1ae628b746fb863714071286e99abb9a | /drsa/_nbdev.py | daac99fab25b9afe006d0e6c35e6f47134535916 | [
"Apache-2.0"
] | permissive | kiminh/drsa | db4cbd5f3ffafc90603f3ae73fe16e512c8f3944 | 071148fa81188dd02793ccd90c7812a3f53bbf8b | refs/heads/master | 2022-10-20T21:29:35.778880 | 2020-06-15T22:18:33 | 2020-06-15T22:18:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 861 | py | # AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"assert_correct_input_shape": "00_functions.ipynb",
"assert_correct_output_shape": "00_functions.ipynb",
"survival_rate": "00_functions.ipynb",
"event_rate": "00_functions.ipynb",
"event_time": "00_functions.ipynb",
"log_survival_rate": "00_functions.ipynb",
"log_event_rate": "00_functions.ipynb",
"log_event_time": "00_functions.ipynb",
"event_time_loss": "00_functions.ipynb",
"event_rate_loss": "00_functions.ipynb",
"DRSA": "01_model.ipynb"}
modules = ["functions.py",
"model.py"]
doc_url = "https://collinprather.github.io/drsa/drsa/"
git_url = "https://github.com/collinprather/drsa/tree/master/"
def custom_doc_links(name): return None
| [
"collin.prather742@gmail.com"
] | collin.prather742@gmail.com |
7a8717cdd5a7690f45841111b5d029110b87fda7 | 22562fc3aafb40dbd2a0f26a3001e3f494c57643 | /employee_register/views.py | 0e2e40849fd34fc4e037998fcbcf2c2a57651f5b | [] | no_license | Geeky-har/Employee-Management-System | bd7e8fa695feaba12b162c2e3d59c482110c9cd0 | efd4a42c4fa28df67c6473bc3437c3c4e9799adc | refs/heads/main | 2023-03-05T15:55:40.053493 | 2021-02-15T11:59:53 | 2021-02-15T11:59:53 | 338,606,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,181 | py | from django.shortcuts import render, HttpResponse, redirect
from .forms import EmployeeForm
from .models import Employee
from django.contrib import messages
# Create your views here.
def employee_form(request, id=0):
if request.method == 'GET':
if id == 0: # means new registration page is running
form = EmployeeForm()
else: # means update needs to take place(to fill the existing form)
# will return the object having the specified id
employee = Employee.objects.get(pk=id)
# will pass the object the constructor of our model form
form = EmployeeForm(instance=employee)
return render(request, 'employee_form.html', {'form':form})
else: # when request is post
if id == 0: # new registration page is running
isUpdate = False
# will store the details of the form fields
form = EmployeeForm(request.POST)
else: # means update needs to take place
isUpdate = True
# will return the object containing the specified id
employee = Employee.objects.get(pk=id)
# will store the details of the form fields
form = EmployeeForm(request.POST, instance=employee)
# will check if the form details are valid or not
if form.is_valid():
form.save() # will save the details to the db
# block for alert
if isUpdate:
messages.success(request, 'Record is Successfully updated')
else:
messages.success(request, 'New record is successfully added')
return redirect('/employee/list')
def employee_list(request):
# will store all the objects of employees stored in the db
context = { 'employee_list': Employee.objects.all() }
# will send the list of objects to our markup
return render(request, 'employee_list.html', context)
def employee_delete(request, id):
# will return the object containing the specified id
employee = Employee.objects.get(pk=id)
# will delete the record
employee.delete()
return redirect('/employee/list') | [
"harshnegi6477@gmail.com"
] | harshnegi6477@gmail.com |
126d3821725ea2fb5c962f763d7c95c961fecd3b | 9660f1ac71cefdedc6764037b2810177a82435f8 | /api/urls.py | 89703d3a0e1fa189aa180c4de87453349ca77457 | [] | no_license | elinahovakimyan/hotelify-backend | 3b8d8c4aea91970c77fcf14e0f969c52a81b646e | c055bafb888923e83d958ab7d8900c2fc52559a9 | refs/heads/master | 2022-12-09T20:19:30.827806 | 2020-03-10T09:29:51 | 2020-03-10T09:29:51 | 246,209,380 | 0 | 0 | null | 2022-12-08T03:46:21 | 2020-03-10T04:38:05 | JavaScript | UTF-8 | Python | false | false | 443 | py | from django.urls import path, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register('signup', views.SignupViewSet, basename='signup')
router.register('login', views.LoginViewSet, basename='login')
router.register('profile', views.ProfileViewSet, basename='profile')
router.register('hotel', views.HotelViewset, basename='hotel')
urlpatterns = [
path('', include(router.urls)),
]
| [
"elinahovakimyan@gmail.com"
] | elinahovakimyan@gmail.com |
4b80f8f908a20e3b7fc3b6b9b66760d52059ec6f | 1c58faeadc7c3b3c51beb3367e3e3d88c8892362 | /SAE_resnext_legacy.py | b06b3f47aff955f266a59254301864f63f249e88 | [] | no_license | TaoKai/TSAE_resnext | a71a9c91740c7a2791f84e0774f3c31478ded967 | 51856d96f178b92aa5632e2a081c47c93959f135 | refs/heads/master | 2023-02-22T22:50:23.269091 | 2021-01-20T10:05:54 | 2021-01-20T10:05:54 | 317,475,131 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,686 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models as models
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class SAE_RESNEXT_ENCODER(nn.Module):
def __init__(self):
super(SAE_RESNEXT_ENCODER, self).__init__()
self.resnext50 = models.resnext50_32x4d(pretrained=True)
self.conv1 = self.resnext50.conv1
self.bn1 = self.resnext50.bn1
self.relu = self.resnext50.relu
self.maxpool = self.resnext50.maxpool
self.L1 = self.resnext50.layer1
self.L2 = self.resnext50.layer2
self.L3 = self.resnext50.layer3
self.L4 = self.resnext50.layer4
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
l1_out = self.L1(x)
l2_out = self.L2(l1_out)
l3_out = self.L3(l2_out)
l4_out = self.L4(l3_out)
return l4_out, l3_out, l2_out, l1_out
class UPSAMPLE_LAYER(nn.Module):
def __init__(self, feature_in, feature_out):
super(UPSAMPLE_LAYER, self).__init__()
self.feature_in = feature_in
self.feature_out = feature_out
self.conv = nn.Conv2d(feature_in, feature_out, (1, 1))
self.bn = nn.BatchNorm2d(feature_out)
self.activ = nn.ReLU()
def forward(self, layer_d, layer_u):
layer_d = self.conv(layer_d)
layer_u = F.upsample(layer_d, size=layer_u.shape[2:], mode='bilinear')+layer_u
layer_u = self.bn(layer_u)
layer_u = self.activ(layer_u)
return layer_u
class SAE_DECODER(nn.Module):
def __init__(self, encoder_grad=True):
super(SAE_DECODER, self).__init__()
self.encoder_grad = encoder_grad
self.encoder = SAE_RESNEXT_ENCODER()
self.up_layer43 = UPSAMPLE_LAYER(2048, 1024)
self.up_layer32 = UPSAMPLE_LAYER(1024, 512)
self.up_layer21 = UPSAMPLE_LAYER(512, 256)
self.conv_u0 = nn.Conv2d(256, 32, (1, 1))
self.conv_u0_3x3 = nn.Conv2d(256, 32, (3, 3), padding=1, padding_mode='reflect')
self.bn_u0 = nn.BatchNorm2d(32)
self.conv_u1 = nn.Conv2d(32, 3, (1, 1))
self.conv_u1_3x3 = nn.Conv2d(32, 3, (3, 3), padding=1, padding_mode='reflect')
self.bn_u1 = nn.BatchNorm2d(3)
self.activ = nn.ReLU()
self.loss_func = nn.MSELoss()
def forward(self, x):
if self.encoder_grad:
l4_out, l3_out, l2_out, l1_out = self.encoder(x)
else:
with torch.no_grad():
l4_out, l3_out, l2_out, l1_out = self.encoder(x)
l3_out = self.up_layer43(l4_out, l3_out)
l2_out = self.up_layer32(l3_out, l2_out)
l1_out = self.up_layer21(l2_out, l1_out)
u0_layer = F.upsample(l1_out, size=(int(x.shape[2]/2), int(x.shape[3]/2)), mode='bilinear')
u03_layer = self.conv_u0_3x3(u0_layer)
u0_layer = self.activ(self.bn_u0(self.conv_u0(u0_layer)+u03_layer))
u1_layer = F.upsample(u0_layer, size=x.shape[2:], mode='bilinear')
u13_layer = self.conv_u1_3x3(u1_layer)
u1_layer = self.bn_u1(self.conv_u1(u1_layer)+u13_layer)
return u1_layer
def loss(self, x, u1_layer):
cost = self.loss_func(u1_layer, x)
return cost
if __name__ == "__main__":
mean = torch.tensor([0.485, 0.456, 0.406])
std = torch.tensor([0.229, 0.224, 0.225])
x = torch.randint(0, 256, (5, 112, 96, 3)).float()/255
x = (x-mean)/std
x = x.permute(0, 3, 1, 2)
sae = SAE_DECODER(encoder_grad=True)
out = sae(x)
cost = sae.loss(x, out)
print(cost, cost.shape)
torch.save(sae.state_dict(), 'tmp.pth')
| [
"kai.tao@inveno.com"
] | kai.tao@inveno.com |
2b5db389de18f58a080bc46fd39a2f1c1e09f874 | ac478d935566ae1f977d5312ad8d5215088554d3 | /models.py | e331ba8641b60bb03ad283146341f25c7b4fa384 | [] | no_license | calarts/othertownsend | e2f8cab1f10e8dd04756284a31818ea4bd1f79c0 | 50641399804881b759721294a897a4d221ed7821 | refs/heads/master | 2022-12-09T20:35:08.491380 | 2019-06-23T14:55:03 | 2019-06-23T14:55:03 | 189,501,370 | 0 | 0 | null | 2022-12-08T05:11:51 | 2019-05-31T00:39:22 | Python | UTF-8 | Python | false | false | 5,664 | py | from random import choice
from datetime import time, datetime
from peewee import *
from shapely.wkt import dumps, loads
from _config import DEBUG
if DEBUG:
mydb = SqliteDatabase(':memory:')
else:
mydb = SqliteDatabase("other.db")
def gimmecurseconds():
now = datetime.now() # should be local time!
secs_since_midnight = (now - now.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()
return int(secs_since_midnight)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# MODELS
# from models import Person, Heart, Brain, Place, Step, Look, Conversation
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
class BaseModel(Model):
class Meta:
database = mydb
class Person(BaseModel):
name = CharField()
telegram_id = BigIntegerField()
created_at = DateTimeField()
chat_name = CharField()
first_name = CharField()
last_name = CharField()
login = CharField()
language_code = CharField()
def get_mytimepoints(self):
return self.timepoints
def get_myheartbeats(self):
return self.heartbeats
def get_myconversations(self):
return self.conversations
def get_personalreply(self,update,themeat):
pleasentries = ['Hi',
'yawn',
'are you still here?'
'I was just getting back to it.',
'could you ask me in a few minutes?',
"I'll sleep when I'm dead."]
personalreply = "Hi again " + str(update.message.from_user.name) + "!\n"
personalreply = personalreply + choice(pleasentries)
personalreply = personalreply + themeat
return personalreply
def get_mymood(self,myday=int(datetime.today().day)):
feels = ["💛","💜","💜","💛","💜","💜","💛",
"💛","💛","💛","💛","💛","💜","💜",
"💛","💜","💜","💛","💛","💜","💛",
"💜","💛","💛","💜","💜","💛","💛",
"💜","💛","💛","💛","💜","💛","💜"]
self.feels = feels[myday]
if self.feels == "💜": self.mood = 0
if self.feels == "💛": self.mood = 1
return self.feels, self.mood
def get_mysleep(self,myday=int(datetime.today().day)):
sleeps = ["--","~","--","--","--","--","~",
"--","~","--","~","--","--","--",
"--","--","--","~","--","--","--",
"--","--","--","--","~","--","~",
"--","~","--","--","--","--","--"]
self.sleep = sleeps[myday]
return self.sleep
def gimmebeats(self,mykeys):
# mykeys = set().union(*(d.keys() for d in alistofdicts))
mykey = min(mykeys, key=lambda x:abs(x - gimmecurseconds() ))
q = Heart.select().where(Heart.timestamp == int(mykey))
for entry in q:
self.mybpm = entry.bpm
return self.mybpm
def gimmecurrsteps(self,mykeys):
mykey = min(mykeys, key=lambda x:abs(x - gimmecurseconds() ))
q = Step.select().where(Step.timestamp == int(mykey))
for entry in q:
self.mysteps = entry.steps
return self.mysteps
def gimmecurrlooks(self):
looklist = []
for l in Look.select():
mystr = "<a href='%s'>%s</a>" %(l.link,l.look)
looklist.append(mystr)
self.looklist = looklist
return self.looklist
def gimmeclosestpoint(self):
# mykeys = set().union(*(d.keys() for d in alistofdicts))
# get the keys by querying the places
mykeys = []
q = Place.select()
for entry in q:
mykeys.append(int(entry.timestamp))
mykey = min(mykeys, key=lambda x:abs(x - gimmecurseconds() ))
q = Place.select().where(Place.timestamp == int(mykey))
for entry in q:
self.myplce = entry.point
geom = loads(self.myplce)
return geom.x, geom.y # (37.9609969049851, -122.404216421264)
def gimmeclosestplace(self):
# mykeys = set().union(*(d.keys() for d in alistofdicts))
# get the keys by querying the places
mykeys = []
q = Place.select()
for entry in q:
mykeys.append(int(entry.timestamp))
mykey = min(mykeys, key=lambda x:abs(x - gimmecurseconds() ))
q = Place.select().where(Place.timestamp == int(mykey))
for entry in q:
self.myplce = entry.point
return self.myplce
class Conversation(BaseModel):
# record conversations with users
actor = ForeignKeyField(Person, backref='conversations')
message = TextField()
timestamp = DateTimeField(default=datetime.now)
class Heart(BaseModel):
actor = ForeignKeyField(Person, backref='heartbeats')
timestamp = IntegerField()
bpm = IntegerField()
class Place(BaseModel):
actor = ForeignKeyField(Person, backref='timepoints')
timestamp = IntegerField()
point = CharField()
mode = CharField()
def __repr__(self):
return self.timestamp, self.mode, loads(self.point)
class Step(BaseModel):
# do we count steps individually
# or count them in a 24 hour period?
actor = ForeignKeyField(Person, backref='steps')
steps = IntegerField()
timestamp = IntegerField()
class Look(BaseModel):
# do we count steps individually
# or count them in a 24 hour period?
actor = ForeignKeyField(Person, backref='looks')
look = CharField()
link = CharField()
# timestamp = IntegerField()
| [
"goodwind@metro.net"
] | goodwind@metro.net |
aa6025ca3f596c50a066dfe23bd9e32f3de84ba2 | ebe422519443dbe9c4acd3c7fd527d05cf444c59 | /modular_equation.py | 117dc8a748507e9c28c073df70bd420e73642f56 | [] | no_license | SaiSudhaV/coding_platforms | 2eba22d72fdc490a65e71daca41bb3d71b5d0a7b | 44d0f80104d0ab04ef93716f058b4b567759a699 | refs/heads/master | 2023-06-19T18:05:37.876791 | 2021-07-15T18:02:19 | 2021-07-15T18:02:19 | 355,178,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | # cook your dish here
def modular_equation(n, m):
res, tem = [], [1] * (n + 1)
for i in range(2, n + 1):
res.append(tem[m % i])
j = m % i
while j < n + 1:
tem[j] += 1
j += i
return sum(res)
if __name__ == "__main__":
t = int(input())
for i in range(t):
n, m = map(int, input().split())
print(modular_equation(n, m)) | [
"saisudhavadisina@gmail.com"
] | saisudhavadisina@gmail.com |
6abbabf714d5860cf6102168545f6d8a0a791408 | f9c068e230cee2bedfe6b8ee0d1b57b291aa8faa | /test/MainNet.py | d020c1cf678a824353d385994055da18d7737c20 | [] | no_license | MaryamHamad/DI4SLF | fb8bb8b48aee40c74a2edc11cc417fdbb26d7fbd | 38afff3061d869f48af121c8e006acd86286781e | refs/heads/main | 2023-09-05T16:34:04.817648 | 2021-11-22T02:51:20 | 2021-11-22T02:51:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,132 | py | import torch
import matplotlib.pyplot as plt
import warnings
import numpy as np
from RefNet import FlowRefNet, ViewRefNet
from MlpNet import MlpNet
warnings.filterwarnings("ignore")
plt.ion()
# Main Network construction
class MainNet(torch.nn.Module):
def __init__(self,opt):
super(MainNet,self).__init__()
self.mlpNet = MlpNet(opt)
self.flowRefNet = FlowRefNet(opt)
self.viewRefNet = ViewRefNet(opt)
self.range_disp = opt.range_disp
self.r_patch_size = opt.refined_patch_size
self.ind_input_view = opt.ind_input_view
def forward(self, lf_sparse, flow_sparse, warped_lf_sparse, patch_left, patch_right):
b,an_sparse,r_patch_size,patch_size = lf_sparse.shape
_,an_novel,_,_ = patch_left.shape
N = r_patch_size * r_patch_size
ind_input_view = np.array(self.ind_input_view)-1
ind_novel_view = np.delete(np.arange(an_sparse+an_novel),ind_input_view)
lf_sparse = lf_sparse.reshape(-1,1,r_patch_size,patch_size)
flow_sparse = flow_sparse.reshape(-1,1,r_patch_size,patch_size)
warped_lf_sparse = warped_lf_sparse.reshape(-1,1,r_patch_size,patch_size)
# content embeddings
feat_flow_sparse = self.flowRefNet(flow_sparse,lf_sparse,warped_lf_sparse)
# concate spatial and angular code to the content embeddings
spatialCode = (torch.arange(self.range_disp)-self.range_disp//2).type_as(flow_sparse).reshape(1,1,self.range_disp,1).expand(b*an_novel*an_sparse,-1,-1,N)
ang_code = torch.tensor([ind_input_view - ind_novel_view[i] for i in range(len(ind_novel_view))]).type_as(flow_sparse)
ang_code = ang_code.reshape(1,an_novel,an_sparse,1,1,1).expand(b,-1,-1,-1,self.range_disp,N).reshape(b*an_novel*an_sparse,1,self.range_disp,N)
flow_sparse = torch.nn.functional.unfold(flow_sparse,kernel_size = (1,self.range_disp)).reshape(b,1,an_sparse,1,self.range_disp,N).expand(-1,an_novel,-1,-1,-1,-1).reshape(b*an_novel*an_sparse,1,self.range_disp,N)
feat_flow_sparse = torch.nn.functional.unfold(feat_flow_sparse,kernel_size = (1,self.range_disp)).reshape(b,1,an_sparse,64,self.range_disp,N).expand(-1,an_novel,-1,-1,-1,-1).reshape(b*an_novel*an_sparse,64,self.range_disp,N)
features = torch.cat([flow_sparse,feat_flow_sparse,spatialCode,ang_code],1)
# predict dynamic weights and confidences
weight, confs = self.mlpNet(features.permute(0,3,2,1).reshape(b*an_novel*an_sparse*N,self.range_disp,67))
weight = weight.reshape(b*an_novel,an_sparse,N,self.range_disp)
weight_left = torch.nn.functional.softmax(weight[:,0], dim = 2)
weight_right = torch.nn.functional.softmax(weight[:,1], dim = 2)
weight = torch.cat([weight_left.unsqueeze(1),weight_right.unsqueeze(1)],1)
# reconstruct novel views
sparseEPIs = torch.nn.functional.unfold(lf_sparse,kernel_size = (1,self.range_disp))
sparseEPIs = sparseEPIs.reshape(b,1,an_sparse,self.range_disp,N).expand(b,an_novel,an_sparse,self.range_disp,N).reshape(b*an_novel,an_sparse,self.range_disp,N)
leftNovelView = torch.bmm( sparseEPIs.permute(0,3,1,2).reshape(b*an_novel*N,an_sparse,self.range_disp)[:,0:1,:], weight_left.reshape(b*an_novel*N,self.range_disp,1))
rightNovelView = torch.bmm( sparseEPIs.permute(0,3,1,2).reshape(b*an_novel*N,an_sparse,self.range_disp)[:,1:2,:], weight_right.reshape(b*an_novel*N,self.range_disp,1))
novelView = leftNovelView.reshape(b*an_novel,N) * confs[:,0,:] + rightNovelView.reshape(b*an_novel,N) * confs[:,1,:]
# geometry-based refinement
patch_novel = novelView.reshape(b*an_novel,1,r_patch_size,r_patch_size)
patch_left = patch_left.reshape(b*an_novel,1,r_patch_size,r_patch_size)
patch_right = patch_right.reshape(b*an_novel,1,r_patch_size,r_patch_size)
ref_patch_novel = self.viewRefNet(patch_novel,patch_left,patch_right)
return ref_patch_novel.reshape(b,an_novel,r_patch_size,r_patch_size)
| [
"noreply@github.com"
] | noreply@github.com |
aa489f1fe7cdd8c01481666d4bcbffd497c31c99 | 47040e0580b8fc79d9ec9d7aafdd6bebb89e7938 | /rx_modeling.py | 8e25bf74730770e486652acd53c9a91ca9dbed73 | [] | no_license | yesuuu/rx_tools | 97b817460632f0f7de8200685ecd10a7e0a2ce25 | 4fc3e7a03ad89b9d213b65b26de64785a5830452 | refs/heads/master | 2021-01-10T01:14:55.294869 | 2018-09-14T13:25:35 | 2018-09-14T13:25:35 | 54,356,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,905 | py | import sys
import os
import time
import datetime
import datetime as dt
import re
# import random
import subprocess
import numpy as np
import pandas as pd
import statsmodels.api as sm
import statsmodels.formula.api as smf
import scipy.stats as stats
import matplotlib
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.linear_model import LassoLars
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
import seaborn
matplotlib.use("Qt4Agg")
pd.set_option('expand_frame_repr', False)
pd.set_option('display.max_columns', 15)
class RxModeling(object):
class SysRelated(object):
@staticmethod
def createFolder(folderPath):
if not os.path.isdir(folderPath):
os.makedirs(folderPath)
return folderPath
@staticmethod
def batchRename(tarFolderPath, files, tarFiles):
for i, j in zip(files, tarFiles):
os.system('mv %s %s' % (os.path.join(tarFolderPath, i), os.path.join(tarFolderPath, j)))
@staticmethod
def batchRemove(folderPath, files):
for f in files:
os.system('rm %s' % (os.path.join(folderPath, f), ))
@staticmethod
def batch_run(run_list, max_batch=1, wait_time=0, is_print=True, omp_num_threads=1):
"""
input:
max_batch: batches run at same time
wait_time: when one run,
"""
run_list = run_list[:]
runnings = {}
while run_list or runnings:
for f in runnings.keys():
if runnings[f][0].poll() is not None:
time_diff = datetime.datetime.now() - runnings[f][1]
if is_print:
print '\n[BatchRun process end] %s' \
'\n[BatchRun process end] use_time: %s' % (f, time_diff)
if len(run_list) == 0:
print '[BatchRun] %d left' % (len(run_list) + len(runnings) - 1,)
runnings.pop(f)
if (len(runnings) < max_batch) and run_list:
run_now = run_list.pop(0)
f = subprocess.Popen("OMP_NUM_THREADS=%d %s" % (omp_num_threads, run_now), shell=True)
now = datetime.datetime.now()
if is_print:
print ('\n[BatchRun %d] OMP_NUM_THREADS=%d %s' % (f.pid, omp_num_threads, run_now))
# print '[BatchRun] time:', now
runnings[run_now] = [f, now]
time.sleep(wait_time)
@staticmethod
def batchFunc(func, kwargsList, pNum, lowPriority=False):
pool = mp.Pool(pNum, maxtasksperchild=1)
if lowPriority:
parent = psutil.Process()
for child in parent.children():
child.nice(1)
mpResults = [pool.apply_async(func, kwds=kwd) for kwd in kwargsList]
pool.close()
pool.join()
returnValues = [r.get() for r in mpResults]
return returnValues
class Log(object):
def __init__(self, file_name=None, is_to_memory=True, is_to_console=True):
self.log_obj = None
self.file_name = self.reformat_file_name(file_name)
self.is_to_memory = is_to_memory
self.is_to_console = is_to_console
@staticmethod
def reformat_file_name(file_name):
if file_name is None:
return None
if isinstance(file_name, str):
time_str = dt.datetime.now().strftime('%Y-%m-%dT%H-%M-%S')
if '%T' in file_name:
file_name = file_name.replace('%T', time_str)
if '%D' in file_name:
file_name = file_name.replace('%D', time_str.split('T')[0])
return file_name
def start(self, is_print=False):
self.log_obj = self.SavePrint(self.file_name, self.is_to_memory, self.is_to_console)
self.log_obj.start()
if is_print:
print '[log] log starts, to file %s' % (self.file_name,)
def close(self):
self.log_obj.close()
def save(self, target, is_print=False):
if self.is_to_memory:
self.log_obj.memoryToFile(target)
else:
os.system('cp %s %s' % (self.file_name, target))
if is_print:
print '[log] log save to %s' % (target,)
class SavePrint(object):
def __init__(self, files, is_to_memory=True, is_to_console=True):
self._memory = ''
self.is_to_memory = is_to_memory
self._console = sys.__stdout__
self.is_to_console = is_to_console
if files is None:
files = []
if isinstance(files, str):
files = [files]
self.files = files
self._file_objects = [open(file_, 'w') for file_ in files]
def write(self, message):
for file_object in self._file_objects:
file_object.write(message)
if self.is_to_memory:
self._memory += message
if self.is_to_console:
self._console.write(message)
def flush(self):
pass
def start(self):
sys.stdout = self
def close(self):
for file_object in self._file_objects:
file_object.close()
sys.stdout = self._console
def memoryToFile(self, file_path):
with open(file_path, 'w') as f:
f.write(self._memory)
class LogAnalysis(object):
@staticmethod
def single_re(log_str, re_expression, keys, functions=None):
if isinstance(keys, str):
keys = [keys]
if functions is not None:
assert len(keys) == len(functions)
mappings = re.findall(re_expression, log_str)
if not mappings:
print 'Warning: no matches in log_str'
return []
elif len(mappings) >= 1:
keys_dict_list = []
for mapping in mappings:
if functions:
if len(keys) == 1:
keys_dict_list.append({keys[0]: functions[0](mapping)})
else:
assert len(keys) == len(mapping) == len(functions)
keys_dict_list.append({keys[i]: functions[i](mapping[i]) for i in range(len(keys))})
else:
if len(keys) == 1:
keys_dict_list.append({keys[0]: mapping})
else:
assert len(keys) == len(mapping)
keys_dict_list.append({keys[i]: mapping[i] for i in range(len(keys))})
return keys_dict_list
class Basic(object):
@staticmethod
def logWrapper(func, logFile, isToConsole=True):
def funcWithLog(*args, **kwargs):
logObject = RxModeling.Log(logFile, is_to_console=isToConsole)
logObject.start()
returnValue = func(*args, **kwargs)
logObject.close()
return returnValue
return funcWithLog
@staticmethod
def timeWrapper(func):
def funcWithTime(*args, **kwargs):
timeObject = RxModeling.Time(is_margin=True)
timeObject.show()
returnValue = func(*args, **kwargs)
timeObject.show()
return returnValue
return funcWithTime
@staticmethod
def getBound(breakPoints=(), lowBound=-np.inf, highBound=np.inf):
bps = [lowBound] + list(breakPoints) + [highBound]
return [(bps[i], bps[i + 1]) for i in range(len(breakPoints) + 1)]
@staticmethod
def floatToEvent(floatDf, breakPoints):
markDf = pd.DataFrame(np.zeros_like(floatDf), index=floatDf.index, columns=floatDf.columns)
breakBound = RxModeling.Basic.getBound(breakPoints, )
for i in range(len(breakBound)):
bound = breakBound[i]
markDf[(floatDf > bound[0]) & (floatDf <= bound[1])] = i + 1
return markDf.astype(int)
@staticmethod
def getValid(*arrays):
valid = ~np.isnan(arrays[0])
for a in arrays[1:]:
valid = valid & (~np.isnan(a))
return tuple(a[valid] for a in arrays)
@staticmethod
def iterUpdateDict(dictRaw, dictAdd):
for k, v in dictAdd.iteritems():
if k in dictRaw and isinstance(v, dict) and isinstance(dictRaw[k], dict):
dictRaw[k] = RxModeling.Basic.iterUpdateDict(dictRaw[k], dictAdd[k])
else:
dictRaw[k] = v
return dictRaw
@staticmethod
def spreadChoice(*xLists):
choiceNum = len(xLists)
choices = [tuple()]
for i in range(choiceNum):
addXList = xLists[i]
choices = [choice+(x, ) for choice in choices for x in addXList]
return choices
class VariableSelection(object):
"""
cache keys:
conflicts: dict
for MarginF
{'x1': ['x2', 'x3', ...]
'x4': ['x5']
'x10': ['x1]
...
}
remove_x_path: list
['x1', 'x2', 'x3', ...]
"""
class AbstractSelection(object):
@staticmethod
def _check_data(x, y):
if isinstance(x, pd.DataFrame):
pass
elif isinstance(x, np.ndarray):
x = pd.DataFrame(x)
else:
raise TypeError('Unknown type of x')
assert len(x.shape) == 2
y = np.array(y).ravel()
assert x.shape[0] == len(y)
if len(y) < 100:
print 'Warning: data length %d too small ' % (len(y),)
return x, y
def select(self, x, y, x_columns=None, cache={}):
x, y = self._check_data(x, y)
if x_columns is None:
x_columns = list(x.columns)
else:
x_columns = list(x_columns)
for x_column in x_columns:
assert x_column in x.columns
return self._select(x, y, x_columns, cache=cache)
def _select(self, x, y, x_columns, cache={}):
raise NotImplementedError
class RemoveAllConst(AbstractSelection):
def __init__(self, is_print=False):
self.is_print = is_print
def _select(self, x, y, x_columns, cache={}):
if self.is_print:
print '[Remove All Const] selecting ...'
for x_column in x_columns:
x_single = x[x_column].values
if len(sm.add_constant(x_single).shape) == 1:
x_columns.remove(x_column)
if self.is_print:
print '[Remove All Const] %d remain, remove %s, all constant' \
% (len(x_columns), x_column,)
return x_columns
class BackwardSingleP(AbstractSelection):
def __init__(self, p_threshold=0.05, is_print=False):
self.p_threshold = p_threshold
self.is_print = is_print
def _select(self, x, y, x_columns, cache={}):
if self.is_print:
print '[Select Single P] selecting ...'
for x_column in x_columns:
x_single = x[x_column].values
x_reg = sm.add_constant(x_single)
model = sm.OLS(y, x_reg).fit()
p_value = model.pvalues[-1]
if p_value > self.p_threshold:
x_columns.remove(x_column)
if self.is_print:
print '[Select Single P] %d remain, remove %s, single p value %.4f' \
% (len(x_columns), x_column, p_value)
return x_columns
class BackwardMarginR2(AbstractSelection):
def __init__(self, r2_diff_threshold=-np.infty, n_min=1, is_print=False):
self.r2_diff_threshold = r2_diff_threshold
self.n_min = n_min
self.is_print = is_print
def _select(self, x, y, x_columns, cache={}):
if self.is_print:
print '[Select Margin R2] selecting ...'
if len(x_columns) <= self.n_min:
return x_columns
while len(x_columns) > self.n_min:
bench_r2 = sm.OLS(y, x[x_columns]).fit().rsquared_adj
best_r2_diff, best_x_column = -np.inf, None
for x_column in x_columns:
x_columns_tmp = x_columns[:]
x_columns_tmp.remove(x_column)
tmp_r2_diff = sm.OLS(y, x[x_columns_tmp]).fit().rsquared_adj - bench_r2
if tmp_r2_diff > best_r2_diff:
best_r2_diff, best_x_column = tmp_r2_diff, x_column
if best_r2_diff > self.r2_diff_threshold:
x_columns.remove(best_x_column)
if self.is_print:
print '[Select Margin R2] %d remain, remove %s, %.6f r2 diff' \
% (len(x_columns), best_r2_diff, best_x_column)
else:
if self.is_print:
print '[Select Margin R2] %d remain, stops, %.6f r2 diff' \
% (len(x_columns), best_r2_diff)
break
return x_columns
class BackwardMarginT(AbstractSelection):
def __init__(self, t_threshold=np.infty, n_min=1, is_print=False):
self.t_threshold = t_threshold
self.n_min = n_min
self.is_print = is_print
def _select(self, x, y, x_columns, cache={}):
if self.is_print:
print '[Select Margin T] selecting ... %d remain' % (len(x_columns),)
print '[Select Margin T] T threshold: %.4f, min num of var: %d' % (self.t_threshold, self.n_min)
while len(x_columns) > self.n_min:
t_values = sm.OLS(y, x[x_columns]).fit().tvalues.abs().sort_values()
x_column, min_t_value = t_values.index[0], t_values[0]
if min_t_value < self.t_threshold:
x_columns.remove(x_column)
if self.is_print:
print '[Select Margin T] %d remain, remove %s, t value: %.4f' \
% (len(x_columns), x_column, min_t_value)
else:
if self.is_print:
print '[Select Margin T] %d remain, stops, t value: %.4f' \
% (len(x_columns), min_t_value)
break
return x_columns
class BackwardMarginF(AbstractSelection):
def __init__(self, group_size=5, f_p_value=0.0, n_min=1, is_print=False):
self.group_size = group_size
self.f_p_value = f_p_value
self.n_min = n_min
self.is_print = is_print
def _select(self, x, y, x_columns, cache={}):
if self.is_print:
print '[Select Margin F] selecting ... %d remain' % (len(x_columns),)
print '[Select Margin F] group size: %d' % (self.group_size,)
print '[Select Margin F] F P-value: %.4f, min num of var: %d' % (self.f_p_value, self.n_min)
while len(x_columns) > self.n_min:
bench = sm.OLS(y, x[x_columns]).fit()
p_values_sorted = bench.pvalues.sort_values(ascending=False)
for count_i in range(self.group_size):
x_name = p_values_sorted.index[count_i]
conflicts = cache.get('conflicts', {})
for x_other in conflicts.get(x_name, []):
try:
p_values_sorted.drop(x_other, inplace=True)
except:
pass
group = p_values_sorted[:self.group_size]
print '[Select Margin F]', list(group.index)
print '[Select Margin F]', list(group.values)
if self.f_p_value == 0.0:
for x_column in list(group.index):
x_columns.remove(x_column)
if self.is_print:
print '[Select Margin F] %d remain, remove %s, f p-value: %.4f' \
% (len(x_columns), x_column, np.nan)
else:
restricted_model = sm.OLS(y, x[x_columns].drop(group.index, axis=1)).fit()
f_test_res = bench.compare_f_test(restricted_model)
print f_test_res
f_value = f_test_res[1]
if f_value > self.f_p_value:
for x_column in list(reversed(list(group.index))):
x_columns.remove(x_column)
if self.is_print:
print '[Select Margin F] %d remain, remove %s, f p-value: %.4f' \
% (len(x_columns), x_column, f_value)
else:
if self.is_print:
print '[Select Margin F] %d remain, stops, f value: %.4f' \
% (len(x_columns), f_value)
break
return x_columns
class VariableCluster(object):
@staticmethod
def basic(X):
from scipy.cluster.hierarchy import dendrogram, linkage
Z = linkage(X, "single", "correlation")
dendrogram(Z, labels=X.index, color_threshold=0)
plt.show()
return Z
class Fitting(object):
@staticmethod
def _testFit(fitObj=None, n=1000):
if fitObj is None:
fitObj = RxModeling.Fitting.PiecewiseRegContinous()
x = np.random.rand(n) - 0.5
epsilon = np.random.randn(n) * 0.1
y = (x + 1) * (x > 0) + ((-1) * x + 1) * (x < 0) + epsilon
x = np.array(list(x) + [np.nan])
y = np.array(list(y) + [np.nan])
fitObj.fit(x, y)
yHat = fitObj.predict(x)
plt.scatter(x, y)
ss = np.argsort(x)
plt.plot(x[ss], yHat[ss])
plt.show()
return {'fitObj': fitObj, 'yHat': yHat, 'x': x, 'y': y}
class DecisionTree(DecisionTreeRegressor):
def fit(self, X, y, sample_weight=None, check_input=True, X_idx_sorted=None):
if X.ndim == 1:
X = X.reshape((-1, 1))
valid = np.all(~np.isnan(X), axis=1) & (~np.isnan(y))
X, y = X[valid, :], y[valid]
super(RxModeling.Fitting.DecisionTree, self).fit(X, y, sample_weight, check_input, X_idx_sorted)
def predict(self, X, check_input=True):
if X.ndim == 1:
X = X.reshape((-1, 1))
y = np.full(X.shape[0], np.nan)
valid = np.all(~np.isnan(X), axis=1)
X = X[valid, :]
yValid = super(RxModeling.Fitting.DecisionTree, self).predict(X, check_input)
y[valid] = yValid
return y
class RandomForest(RandomForestRegressor):
def fit(self, X, y, sample_weight=None):
if X.ndim == 1:
X = X.reshape((-1, 1))
valid = np.all(~np.isnan(X), axis=1) & (~np.isnan(y))
X, y = X[valid, :], y[valid]
super(RxModeling.Fitting.RandomForest, self).fit(X, y, sample_weight=sample_weight)
def predict(self, X):
if X.ndim == 1:
X = X.reshape((-1, 1))
y = np.full(X.shape[0], np.nan)
valid = np.all(~np.isnan(X), axis=1)
X = X[valid, :]
yValid = super(RxModeling.Fitting.RandomForest, self).predict(X)
y[valid] = yValid
return y
class PloyReg(object):
def __init__(self, degree):
self.degree = degree
self.coef = None
self.polyFunc = None
def fit(self, xTrain, yTrain):
xTrain, yTrain = RxModeling.Basic.getValid(xTrain, yTrain)
polyArgs = np.polyfit(xTrain, yTrain, self.degree)
self.coef = polyArgs
self.polyFunc = lambda xReg: np.sum(
[self.coef[i] * xReg ** (self.degree - i) for i in range(self.degree + 1)])
return self
def predict(self, xTest):
return np.array([self.polyFunc(x) for x in xTest])
class PiecewiseRegContinous(object):
def __init__(self, breakPoints=(), setBreakPointsInFit=False, setQuantileNum=5):
self.breakPoints = breakPoints
self.breakBound = RxModeling.Basic.getBound(breakPoints)
self.setBreakPointsInFit = setBreakPointsInFit
self.setQuantileNum = setQuantileNum
self.model = None
def setBreakByQuantile(self, xTrain, fracNum=5):
xTrain = RxModeling.Basic.getValid(xTrain)
self.breakPoints = [np.percentile(xTrain, int(i * 100. / fracNum)) for i in range(1, fracNum)]
self.breakBound = RxModeling.Basic.getBound(self.breakPoints)
def fit(self, xTrain, yTrain):
xTrain, yTrain = RxModeling.Basic.getValid(xTrain, yTrain)
if self.setBreakPointsInFit:
self.setBreakByQuantile(xTrain, self.setQuantileNum)
dataDf = pd.DataFrame({'x': xTrain, 'y': yTrain})
for i, bp in enumerate(self.breakPoints):
dataDf['x_' + str(i)] = np.where(xTrain > bp, xTrain - bp, 0)
formula = 'y ~ ' + '+'.join(['x'] + ['x_' + str(i) for i in range(len(self.breakPoints))])
model = smf.ols(formula=formula, data=dataDf).fit()
self.model = model
return self
def predict(self, xTest):
xTest = np.array(xTest)
yHat = np.full(xTest.shape, np.nan)
valid = ~np.isnan(xTest)
xTest = xTest[valid]
dataDf = pd.DataFrame({'x': xTest})
for i, bp in enumerate(self.breakPoints):
dataDf['x_' + str(i)] = np.where(xTest > bp, xTest - bp, 0)
yHat[valid] = self.model.predict(dataDf)
return yHat
class PiecewiseReg(object):
def __init__(self, breakPoints=(), addConstant=True, setBreakPointsInFit=False, setQuantileNum=5):
self.breakPoints = breakPoints
self.breakBound = RxModeling.Basic.getBound(breakPoints)
self.addConstant = addConstant
self.setBreakPointsInFit = setBreakPointsInFit
self.setQuantileNum = setQuantileNum
self.models = None
def setBreakByQuantile(self, xTrain, fracNum=5):
xTrain = RxModeling.Basic.getValid(xTrain)
self.breakPoints = [np.percentile(xTrain, int(i * 100. / fracNum)) for i in range(1, fracNum)]
self.breakBound = RxModeling.Basic.getBound(self.breakPoints)
def fit(self, xTrain, yTrain):
xTrain, yTrain = RxModeling.Basic.getValid(xTrain, yTrain)
data = pd.DataFrame({'x': xTrain, 'y': yTrain})
datas = [data[(data['x'] > low) & (data['x'] < upper)] for low, upper in self.breakBound]
self.models = [sm.OLS(d['y'].values,
d['x'].values if not self.addConstant else sm.add_constant(d['x'].values)).fit()
for d in datas]
return self
def predict(self, xTest):
xTest = np.array(xTest)
yHat = np.full(xTest.shape, np.nan)
for i, (low, upper) in enumerate(self.breakBound):
con = (xTest > low) & (xTest <= upper)
xCon = xTest[con]
if len(xCon) == 0:
continue
if self.addConstant:
xCon = sm.add_constant(xCon) if len(xCon) != 1 else np.array([1, xCon])
yHat[con] = self.models[i].predict(xCon if not self.addConstant else sm.add_constant(xCon))
return yHat
class LinearRegression(LinearRegression):
def fit(self, X, y, sample_weight=None):
if X.ndim == 1:
X = X.reshape((-1, 1))
sample_weight = np.ones(y.shape) if sample_weight is None else sample_weight
valid = np.all(~np.isnan(X), axis=1) & (~np.isnan(y))
X, y, sample_weight = X[valid, :], y[valid], sample_weight[valid]
super(RxModeling.Fitting.LinearRegression, self).fit(X, y, sample_weight)
def predict(self, X):
if X.ndim == 1:
X = X.reshape((-1, 1))
y = np.full(X.shape[0], np.nan)
valid = np.all(~np.isnan(X), axis=1)
X = X[valid, :]
yValid = super(RxModeling.Fitting.LinearRegression, self).predict(X)
y[valid] = yValid
return y
class marginRegression(object):
def __init__(self, fit_intercept=True):
self.fit_intercept = fit_intercept
self.intercept_ = None
self.coef_ = None
def fit(self, xTrain, yTrain, sample_weight=None):
if xTrain.ndim == 1:
xTrain = xTrain.reshape((-1, 1))
sample_weight = np.ones(yTrain.shape) if sample_weight is None else sample_weight
valid = np.all(~np.isnan(xTrain), axis=1) & (~np.isnan(yTrain))
xTrain, yTrain, sample_weight = xTrain[valid, :], yTrain[valid], sample_weight[valid]
coef = np.full(xTrain.shape[1], np.nan)
intercept = 0.
yBench = yTrain
for i in range(xTrain.shape[1]):
xTmp = xTrain[:, i]
lr = RxModeling.Fitting.LinearRegression(fit_intercept=self.fit_intercept)
lr.fit(xTmp, yBench, sample_weight)
if self.fit_intercept:
intercept += lr.intercept_
coef[i] = lr.coef_
else:
coef[i] = lr.coef_
yBench = yBench - lr.predict(xTmp)
self.intercept_ = intercept
self.coef_ = coef
def predict(self, xTest):
if xTest.ndim == 1:
xTest = xTest.reshape((-1, 1))
return np.dot(xTest, self.coef_) + self.intercept_
class marginRegression2(object):
def __init__(self, fit_intercept=True):
self.fit_intercept = fit_intercept
self.intercept_ = None
self.coef_ = None
def fit(self, xTrain, yTrain, sample_weight=None):
if xTrain.ndim == 1:
xTrain = xTrain.reshape((-1, 1))
sample_weight = np.ones(yTrain.shape) if sample_weight is None else sample_weight
valid = np.all(~np.isnan(xTrain), axis=1) & (~np.isnan(yTrain))
xTrain, yTrain, sample_weight = xTrain[valid, :], yTrain[valid], sample_weight[valid]
coef = np.full(xTrain.shape[1], np.nan)
intercept = 0.
yBench = yTrain
for i in range(xTrain.shape[1]):
xTmp = xTrain[:, i]
if i == 0:
xResidual = xTmp
xIntercept = 0.
xCoef = np.array([])
else:
xLr = xTrain[:, :i]
xModel = RxModeling.Fitting.LinearRegression(fit_intercept=True)
xModel.fit(xLr, xTmp, sample_weight)
xResidual = xTmp - xModel.predict(xLr)
xIntercept = xModel.intercept_
xCoef = xModel.coef_
# xBench.append(xTmp)
lr = RxModeling.Fitting.LinearRegression(fit_intercept=self.fit_intercept)
lr.fit(xResidual, yBench, sample_weight)
resIntercept, resCoef = (lr.intercept_, lr.coef_) if self.fit_intercept else (0., lr.coef_)
intercept += (resIntercept - resCoef * xIntercept)
coef[:i] += (- resCoef * xCoef)
coef[i] = resCoef
print coef
yBench = yBench - lr.predict(xResidual)
self.intercept_ = intercept
self.coef_ = coef
def predict(self, xTest):
if xTest.ndim == 1:
xTest = xTest.reshape((-1, 1))
return np.dot(xTest, self.coef_) + self.intercept_
@staticmethod
def normalizeByVectors(rawDf, vectorDfs, addConstant=True, minObs=100):
vectors = [vdf.loc[rawDf.index, rawDf.columns].values for vdf in vectorDfs]
if addConstant:
vectors.insert(0, np.ones(rawDf.shape))
vectorPanel = pd.Panel(vectors, major_axis=rawDf.index, minor_axis=rawDf.columns)
coefDict = {}
resDf = pd.DataFrame(index=rawDf.index, columns=rawDf.columns)
for idx, rawLine in rawDf.iterrows():
y = rawLine.values
x = vectorPanel.loc[:, idx, :].values
valid = np.all(~np.isnan(x), axis=1) & (~np.isnan(y))
if np.sum(valid) <= minObs:
continue
xReg, yReg = x[valid], y[valid]
regModel = sm.OLS(yReg, xReg).fit()
coefDict[idx] = regModel.params
res = np.full(y.shape, np.nan, )
res[valid] = regModel.resid
resDf.loc[idx] = res
return {'residual': resDf, 'coefDict': coefDict}
@staticmethod
def normalizeByVectorPoly(rawDf, vectorDf, degree=3, minObs=100):
coefDict = {}
resDf = pd.DataFrame(index=rawDf.index, columns=rawDf.columns)
for idx, rawLine in rawDf.iterrows():
y = rawLine.values
x = vectorDf.loc[idx].values
valid = (~np.isnan(x)) & (~np.isnan(y))
if np.sum(valid) <= minObs:
continue
xValid, yValid = x[valid], y[valid]
polyArgs = np.polyfit(xValid, yValid, degree)
coefDict[idx] = polyArgs
polyFunc = lambda x: np.sum([polyArgs[i] * x ** (degree - i) for i in range(degree + 1)])
res = np.full(y.shape, np.nan, )
res[valid] = yValid - np.array([polyFunc(xi) for xi in xValid])
resDf.loc[idx] = res
return {'residual': resDf, 'coefDict': coefDict}
class X(object):
@staticmethod
def calc_outr2(y, y_hat):
y, y_hat = np.array(y), np.array(y_hat)
return 1 - np.nansum((y - y_hat) ** 2) / np.nansum((y - np.nanmean(y)) ** 2)
@staticmethod
def calc_basic_statistics(x, info=None):
info = ['mean', 'std', 'skew', 'kurt', 'num', 'nanNum', 'max', 'min', 'num_out2std', 'num_out3std',
'num_out5std', 'num_out10std'] \
if info is None else info
x = np.array(x).ravel()
nanNum = np.sum(np.isnan(x))
x = x[~np.isnan(x)]
if len(x) == 0:
return pd.Series({i: np.nan for i in info}, index=info)
func_map = {'mean': np.mean,
'std': np.std,
'skew': stats.skew,
'kurt': stats.kurtosis,
'num': len,
'nanNum': lambda _: nanNum,
'max': np.max,
'min': np.min,
'num_out2std': lambda x_func: np.sum((x_func - np.mean(x_func)) > 2 * np.std(x_func)) +
np.sum((x_func - np.mean(x_func)) < -2 * np.std(x_func)),
'num_out3std': lambda x_func: np.sum((x_func - np.mean(x_func)) > 3 * np.std(x_func)) +
np.sum((x_func - np.mean(x_func)) < -3 * np.std(x_func)),
'num_out5std': lambda x_func: np.sum((x_func - np.mean(x_func)) > 5 * np.std(x_func)) +
np.sum((x_func - np.mean(x_func)) < -5 * np.std(x_func)),
'num_out10std': lambda x_func: np.sum((x_func - np.mean(x_func)) > 10 * np.std(x_func)) +
np.sum((x_func - np.mean(x_func)) < -10 * np.std(x_func)),
}
basic_statistic_dict = {key: func_map[key](x) for key in info if key in func_map}
basic_statistic_series = pd.Series(basic_statistic_dict, index=info)
return basic_statistic_series
@staticmethod
def getConDfs(dfDict, condition, fillNaValue=-1):
if isinstance(condition, np.ndarray):
tmpdf = dfDict.values()[0]
condition = pd.DataFrame(condition, tmpdf.index, tmpdf.columns)
return pd.DataFrame({k: dfDict[k].fillna(fillNaValue)[condition == True].stack() for k in dfDict})
class XY(object):
@staticmethod
def getEventDecay(xEvent, yPanel, validDf=None, includeZero=False):
validDf = yPanel[1].notnull() if validDf is None else validDf
days = yPanel.items
events = pd.melt(xEvent).value.dropna().unique()
events = sorted(events if includeZero else events[events != 0])
eventDecay = pd.DataFrame(index=events, columns=days)
spotNum = pd.Series(index=events)
for event in events:
print event,
isEvent = pd.DataFrame(xEvent == event) & validDf
spotNum[event] = isEvent.sum().sum()
for fday in days:
print fday,
tmpReturn = yPanel[fday].loc[xEvent.index, xEvent.columns]
tmpReturn[~isEvent] = np.nan
eventDecay.loc[event, fday] = tmpReturn.fillna(0).sum().sum() / spotNum[event]
print
return eventDecay, spotNum
@staticmethod
def plotEventDecay(eventDecay, spotNum, eventNameMap={}, **plotkwargs):
events = list(eventDecay.index)
maxY, minY = 0., 0.
for event in events:
label = '%s, num%d' % (eventNameMap[event], spotNum[event]) if event in eventNameMap else \
'event%d, num%d' % (event, spotNum[event])
eventDecay.loc[event].plot(label=label, **plotkwargs)
maxY = max(maxY, eventDecay.loc[event].max())
minY = min(minY, eventDecay.loc[event].min())
ax = plt.gca()
ax.legend(loc='best')
ax.plot([0, 0], [minY, maxY], )
ax.set_title('decay')
@staticmethod
def corrCurve(yDict, xSeries, lookBackLength):
dataIdx = xSeries.index
xDf = pd.DataFrame({i: xSeries.shift(i) for i in range(lookBackLength)}, index=dataIdx)
yDf = pd.DataFrame(yDict)
corrDf = pd.DataFrame(index=range(lookBackLength), columns=yDf.columns)
for y in yDf:
corrDf[y] = xDf.corrwith(yDf[y])
corrSum = corrDf.cumsum()
corrSum.plot()
return {'xDf': xDf, 'yDf': yDf, 'corrDf': corrDf}
class NpTools(object):
@staticmethod
def rankNan(x):
y = np.argsort(np.where(np.isnan(x), np.inf, x), axis=0)
y2 = np.full(x.shape, np.nan, )
rankArray = np.arange(1, y.shape[0] + 1)
for i, j in enumerate(y.T):
y2[:, i][j] = rankArray
y2[np.isnan(x)] = np.nan
return y2
@staticmethod
def getPolyFunc(polyArgs):
degree = len(polyArgs) - 1
polyFunc = lambda x: np.sum([polyArgs[i] * x ** (degree - i) for i in range(degree + 1)])
return polyFunc
@staticmethod
def divide_into_group(arr, group_num=None, group_size=None):
if group_num is not None:
group_num = int(group_num)
assert group_size is None
group_size_small = len(arr) / group_num
group_num_big = (len(arr) % group_num)
nums = [(group_size_small + 1 if i < group_num_big else group_size_small)
for i in range(group_num)]
nums.insert(0, 0)
elif group_size is not None:
group_size = int(group_size)
group_num = int(np.ceil(len(arr) * 1.0 / group_size))
nums = [group_size] * (len(arr) / group_size) + [(len(arr) % group_size)]
nums.insert(0, 0)
else:
raise Exception
indexs = np.cumsum(np.array(nums))
new_arr = []
for i in range(group_num):
new_arr.append(arr[indexs[i]:indexs[i + 1]])
return new_arr
@staticmethod
def checkSame(matrix1, matrix2, maxDiff=1e-8, isNan=True, isPrint=True, ):
matrix1, matrix2 = np.array(matrix1, dtype=float), np.array(matrix2, dtype=float)
assert matrix1.shape == matrix2.shape
res = {}
if isNan:
nan1 = np.isnan(matrix1) & (~np.isnan(matrix2))
nan2 = (~np.isnan(matrix1)) & np.isnan(matrix2)
res['nan1'] = nan1
res['nan2'] = nan2
if isPrint:
print 'matrix1 nan alone:', np.sum(nan1)
print 'matrix2 nan alone:', np.sum(nan2)
diff = (np.abs(matrix1 - matrix2) >= maxDiff)
res['diff'] = diff
if isPrint:
print 'different values:', np.sum(diff)
return res
@staticmethod
def countChangePoints(series, isPrint=True):
"""
:return:{'changeNum': len(changePoints),
'changePoints': changePoints}
"""
array = np.array(series)
changePoints = []
lastState = np.isnan(array[0])
for i in range(1, len(array)):
newState = np.isnan(array[i])
if newState ^ lastState:
changePoints.append(i)
lastState = newState
if isPrint:
print 'change points:', len(changePoints)
return {'changeNum': len(changePoints),
'changePoints': changePoints}
@staticmethod
def D3ToD2(x):
s1 = x.shape[0]
return x.reshape(s1, -1)
@staticmethod
def dropna(x, axis=0, how='any'):
if how == 'any':
func = np.any
elif how == 'all':
func = np.all
else:
raise Exception
if axis == 0:
xNew = x[~func(np.isnan(x), axis=1)]
elif axis == 1:
xNew = x[:, ~func(np.isnan(x), axis=0)]
else:
raise Exception
return xNew
class PdTools(object):
@staticmethod
def getZscore(df, axis=1, level=2):
if level == 1:
return df.sub(df.mean(axis=axis), axis=1 - axis).div(df.abs().sum(axis=axis), axis=1 - axis)
if level == 2:
return df.sub(df.mean(axis=axis), axis=1 - axis).div(df.std(axis=axis), axis=1 - axis)
@staticmethod
def maskNotEnough(df, num, axis=0, ):
dfReturn = df.copy()
notValid = df.notnull().sum(axis=axis) < num
if axis == 0:
dfReturn.loc[:, notValid] = np.nan
elif axis == 1:
dfReturn[notValid] = np.nan
return dfReturn
@staticmethod
def ffillDecay(df, decayRate=1., limit=None):
dfFillNa = df.ffill(limit=limit)
if np.isclose(decayRate, 1., 1.e-8, 1.e-8, ):
return dfFillNa
else:
dfNanSum = df.isnull().astype(int).cumsum()
dfNanLength = dfNanSum - dfNanSum[df.notnull()].ffill().fillna(0)
ratio = decayRate ** dfNanLength
return dfFillNa.mul(ratio)
@staticmethod
def ffillDecayMulti(df, decayRate=1., limit=None):
if np.isclose(decayRate, 1., 1.e-8, 1.e-8, ):
return df.ffill(limit=limit)
else:
isNa = df.ffill(limit=limit).isnull()
dfFill0 = df.fillna(0.)
dfFill0ewma = dfFill0.ewm(alpha=1 - decayRate, adjust=False).mean()
dfFill0ewma[isNa] = np.nan
return dfFill0ewma
@staticmethod
def countNan(df):
dfNanSum = df.isnull().astype(int).cumsum()
dfNanLength = dfNanSum - dfNanSum[df.notnull()].ffill().fillna(0)
return dfNanLength
@staticmethod
def qcut(df, qNum, labels=None, returnBins=False):
labels = range(1, qNum + 1) if labels is None else labels
qcutDf = pd.DataFrame(np.nan, df.index, df.columns)
if returnBins:
binsDf = pd.DataFrame(np.nan, df.index, range(qNum + 1))
for idx, line in df.iterrows():
lineNa = line.dropna()
if len(lineNa) == 0:
continue
try:
res = pd.qcut(lineNa, qNum, labels, returnBins, )
if returnBins:
qcutDf.loc[idx][res[0].index] = res[0]
binsDf.loc[idx] = res[1]
else:
qcutDf.loc[idx][res.index] = res
except:
columnsGroup = RxModeling.NpTools.divide_into_group(list(lineNa.sort_values().index), qNum)
res = pd.Series(index=lineNa.index)
for label, gcolumns in zip(labels, columnsGroup):
res[gcolumns] = label
qcutDf.loc[idx][res.index] = res
if returnBins:
binsDf.loc[idx] = [lineNa[gcolumns[0]] for gcolumns in columnsGroup] + [
lineNa[columnsGroup[-1][-1]]]
return {'qcutDf': qcutDf, 'binsDf': binsDf} if returnBins else qcutDf
@staticmethod
def showNear(dfs, dfNames, recordIndex, backNum=0, forwardNum=0, column=None, ):
recordIdx = dfs[0].index.get_loc(recordIndex)
startIdx = max(0, recordIdx - backNum)
endIdx = min(len(dfs[0]), recordIdx + forwardNum)
return pd.DataFrame({dfName: df[column].iloc[startIdx:endIdx] for df, dfName in zip(dfs, dfNames)})
@staticmethod
def winsorize(df, limits=(0.05, 0.05), winType='percent', axis=1):
"""
winType: 'percent'('p') or 'std'('s')
"""
if winType in ('std', 's'):
mean = df.mean(axis=axis)
std = df.std(axis=axis)
lowerLimit = (mean - limits[0] * std).fillna(-np.inf)
upperLimit = (mean + limits[1] * std).fillna(np.inf)
elif winType in ('percent', 'p'):
dfQuantile = pd.DataFrame(index=[limits[0], 1 - limits[1]], columns=df.index)
for idx in df.index:
dfQuantile[idx] = df.loc[idx].dropna().quantile([limits[0], 1 - limits[1]])
lowerLimit = dfQuantile.iloc[0].fillna(-np.inf)
upperLimit = dfQuantile.iloc[1].fillna(np.inf)
else:
raise Exception('Unknown winType %s' % (winType,))
return df.clip(lowerLimit, upperLimit, axis=1 - axis)
@staticmethod
def getNearPanel(df, backNum=1, forwardNum=1):
idxNum = range(-backNum, forwardNum + 1)
return pd.Panel({idx: df.shift(-idx) for idx in idxNum})
@staticmethod
def getValueByGroup(groupValue, groupDf, ):
"""
:param groupValue: dates x groups, values: value
:param groupDf: dates x symbols, values: group
:return:
"""
recordsGroup = groupDf.stack().reset_index()
recordsGroup.columns = ['dates', 'symbols', 'group']
recordsValue = groupValue.stack().reset_index()
recordsValue.columns = ['dates', 'group', 'value']
recordsAll = pd.merge(recordsGroup, recordsValue, how='left', on=['dates', 'group'])
return recordsAll.pivot('dates', 'symbols', 'value').loc[groupDf.index, groupDf.columns]
@staticmethod
def calGroupInfo(dataDf, groupDf, funcs):
if isinstance(groupDf, pd.Series):
groupDf = pd.DataFrame(np.repeat(groupDf.loc[dataDf.columns].values.reshape(1, -1),
len(dataDf.index), axis=0),
index=dataDf.index, columns=dataDf.columns)
groups = sorted(list(groupDf.stack().unique()))
result = pd.Panel(items=funcs.keys(), major_axis=dataDf.index, minor_axis=groups)
for group in groups:
dataDfGroup = dataDf[groupDf == group]
for funcName in funcs:
result.loc[funcName, :, group] = funcs[funcName](dataDfGroup)
return result
@staticmethod
def toPickle(obj, saveFilePath, name='default'):
with pd.HDFStore(saveFilePath) as store:
store.put(name, obj)
@staticmethod
def readPickle(saveFilePath, name='default'):
with pd.HDFStore(saveFilePath) as store:
return store.get(name)
@staticmethod
def toFrame(xPanel, dropna=None):
arr = RxModeling.NpTools.D3ToD2(xPanel.values).T
if dropna:
arr = RxModeling.NpTools.dropna(arr, axis=0, how=dropna)
return pd.DataFrame(arr, columns=xPanel.items)
class Time(object):
def __init__(self, is_now=False, is_all=False, is_margin=False):
self.start_time = None
self.last_time = None
self.is_now = is_now
self.is_all = is_all
self.is_margin = is_margin
def show(self):
now = dt.datetime.now()
if self.start_time is None:
self.start_time = now
print '[Time] Start at:', now
if self.is_margin:
self.last_time = now
else:
if self.is_now:
print '[Time] now:', now
if self.is_all:
print '[Time] Since start:', now - self.start_time
if self.is_margin:
print '[Time] Since last call:', now - self.last_time
self.last_time = now
class Plot(object):
@staticmethod
def pie(series, names=None, num=None, is_sorted=True, figKwargs=None, pieKwargs=None):
"""
:param series: pandas.series
:param names: None, list, func
if None:
series.index
if func:
func(i) for i in series.index
:param num: None or int
"""
if callable(names):
names = [names(i) for i in series.index]
elif names is None:
names = series.index
series = series.copy()
series.index = names
if num is not None:
series = series.sort_values(ascending=False)
if num < len(series):
othersNum = np.sum(series[num - 1:])
series = series[:num - 1]
series['OTHERS'] = othersNum
if is_sorted:
series.sort_values(ascending=False, inplace=True)
plt.figure(**({} if figKwargs is None else figKwargs))
plt.pie(series.values, labels=series.index, **({} if pieKwargs is None else pieKwargs))
@staticmethod
def plotQuantile(x, y, plotNum=20, isReg=True, isStd=False, isShowCorr=False, **plotKwargs):
x, y = np.array(x).ravel(), np.array(y).ravel()
valid = (~np.isnan(x)) & (~np.isnan(y))
x, y = x[valid], y[valid]
xArg = np.argsort(x)
x, y = x[xArg], y[xArg]
xMean = np.array([np.mean(x[i * (len(x) / plotNum):(i + 1) * (len(x) / plotNum)]) for i in range(plotNum)])
yMean = np.array([np.mean(y[i * (len(x) / plotNum):(i + 1) * (len(x) / plotNum)]) for i in range(plotNum)])
df = pd.DataFrame({'x': xMean, 'y': yMean})
df.plot.scatter('x', 'y', **plotKwargs)
plt.title('quantile plot')
if isStd:
yStd = np.array(
[np.std(y[i * (len(x) / plotNum):(i + 1) * (len(x) / plotNum)], ddof=1) for i in range(plotNum)])
plt.fill_between(xMean, yMean + yStd, yMean - yStd, alpha=0.3)
if isReg:
model = sm.OLS(yMean, sm.add_constant(xMean)).fit()
yHat = xMean * model.params[1] + model.params[0]
plt.plot(xMean, yHat)
if isShowCorr:
ax = plt.gca()
ax.text(0.01, 0.99, 'corr: %s' % (np.corrcoef(x, y)[0, 1],),
horizontalalignment='left',
verticalalignment='top',
transform=ax.transAxes, color='red', size=16)
return df
@staticmethod
def polyfit(x, y, degree, plotNum=100, **plotKwargs):
x, y = RxModeling.NpTools.getValid((x, y))
xArg = np.argsort(x)
x, y = x[xArg], y[xArg]
polyArgs = np.polyfit(x, y, deg=degree)
polyFunc = RxModeling.NpTools.getPolyFunc(polyArgs)
xMean = np.array([np.mean(x[i * (len(x) / plotNum):(i + 1) * (len(x) / plotNum)]) for i in range(plotNum)])
yMean = np.array([np.mean(y[i * (len(x) / plotNum):(i + 1) * (len(x) / plotNum)]) for i in range(plotNum)])
df = pd.DataFrame({'x': xMean, 'y': yMean})
df.plot.scatter('x', 'y', **plotKwargs)
plt.title('quantile plot')
x = list()
xMin, xMax = np.nanmin(xMean), np.nanmax(xMean)
xList = np.arange(xMin, xMax, step=(xMax - xMin) * 1. / 300)
yHatList = [polyFunc(x) for x in xList]
plt.plot(xList, yHatList)
return {'polyArgs': polyArgs,
'polyFunc': polyFunc,
'quantileDf': df,}
@staticmethod
def plot3D(df, **plotKwargs):
x = range(df.shape[0])
y = range(df.shape[1])
z = df.values.T
x1, y1 = np.meshgrid(x, y)
fig = plt.figure()
ax = fig.gca(projection='3d')
from matplotlib import cm
surf = ax.plot_surface(x1, y1, z, rstride=1, cstride=1, cmap=cm.coolwarm, antialiased=False,
**plotKwargs)
plt.xticks(x, df.index)
plt.yticks(y, df.columns)
fig.colorbar(surf, shrink=0.5, aspect=5)
@staticmethod
def plotBox(dataDf, axis=1, **figKwargs):
if axis == 0:
dataDf = dataDf.T
dataList = [line.values.ravel() for num, line in dataDf.iterrows()]
dataList2 = [d[~np.isnan(d)] for d in dataList]
plt.figure(**figKwargs)
sns.boxplot(data=dataList2, )
@staticmethod
def plotFunc(func, start=0, end=1, step=0.01):
x = np.arange(start, end, step)
y = np.array([func(i) for i in x])
plt.plot(x, y)
class Test(object):
@staticmethod
def pair_test(series1, series2, series1_name='series1', series2_name='series2',
level=0.05, is_plot=True, is_print=True):
assert len(series1) == len(series2)
if len(series1) <= 100:
print 'Warning: length of data is %d, smaller than 100' % (len(series1),)
dif = np.array(series1) - np.array(series2)
dif_cum = np.cumsum(dif)
corr1 = np.corrcoef(series1, series2)[0, 1]
t_value = np.float(np.mean(dif) / np.sqrt(np.var(dif) / len(dif)))
p_value = 2 * (1 - stats.t.cdf(np.abs(t_value), len(dif)))
if is_plot:
fig = plt.figure(figsize=(20, 15))
fig.suptitle('Pair Test')
ax = fig.add_subplot(211)
plt.plot(np.cumsum(series1), 'b')
plt.plot(np.cumsum(series2), 'g')
plt.title('Cum Return')
plt.legend([series1_name, series2_name], loc='best')
ax.text(0.01, 0.99, 'data length: %d' % (len(series1)),
horizontalalignment='left',
verticalalignment='top',
transform=ax.transAxes, color='red', size=16)
ax = fig.add_subplot(212)
plt.plot(dif_cum)
plt.title('Diff Cum Return')
ax.text(0.01, 0.99, 't_value: %0.4f\np_value: %0.4f\ncorr: %0.4f' % (t_value, p_value, corr1),
horizontalalignment='left',
verticalalignment='top',
transform=ax.transAxes, color='red', size=16)
plt.show()
cv = stats.norm.ppf(1 - level / 2)
is_h0_true = False if p_value < level else True
if is_print:
print ''
print '******* Pair T TEST *******'
if is_h0_true:
print 'h0 is True: diff is not significant'
else:
print 'h0 is False: diff is significant'
print 'p value: %f' % (p_value,)
print 't stat: %f' % (t_value,)
print 'critical value: %f' % (cv,)
return is_h0_true, p_value, t_value, cv
@staticmethod
def jb_test(series, level=0.05, is_print=True):
"""
output: (is_h0_true, p_value, jb_stat, critical value)
"""
series = series[~np.isnan(series)]
if len(series) < 100:
print 'Warning(in JB test): data length: %d' % (len(series),)
skew = stats.skew(series)
kurt = stats.kurtosis(series)
n = len(series)
jb = (n - 1) * (skew ** 2 + kurt ** 2 / 4) / 6
p_value = 1 - stats.chi2.cdf(jb, 2)
cv = stats.chi2.ppf(1 - level, 2)
is_h0_true = False if p_value < level else True
if is_print:
print ''
print '******* JB TEST *******'
print 'skew: %.4f' % (skew,)
print 'kurt: %.4f' % (kurt,)
if is_h0_true:
print 'h0 is True: data is normal'
else:
print 'h0 is False: data is not normal'
print 'p value: %f' % (p_value,)
print 'jb stat: %f' % (jb,)
print 'critical value: %f' % (cv,)
return is_h0_true, p_value, jb, cv
@staticmethod
def box_test(series, lag=10, type_='ljungbox',
level=0.05, is_plot=True, is_print=True):
"""
output: (is_h0_true, p_value, q_stat, critical value)
"""
series = series[~np.isnan(series)]
acf = sm.tsa.acf(series, nlags=lag)
if is_plot:
sm.graphics.tsa.plot_acf(series, lags=lag)
plt.show()
q_stat = sm.tsa.q_stat(acf[1:], len(series), type=type_)[0][-1]
p_value = stats.chi2.sf(q_stat, lag)
cv = stats.chi2.ppf(1 - level, lag)
is_h0_true = False if p_value < level else True
if is_print:
print ''
print '******* Ljung Box TEST *******'
if is_h0_true:
print 'h0 is True: data is independent'
else:
print 'h0 is False: data is not independent'
print 'p value: %f' % (p_value,)
print 'q stat: %f' % (q_stat,)
print 'critical value: %f' % (cv,)
return is_h0_true, p_value, q_stat, cv
class StatisticTools(object):
"""
normality test: JB test
auto-correlation test: Box test
"""
@staticmethod
def find_pca_order(x, thresholds=None, is_plot=True):
"""
input:
thresholds: must has attr '__len__'
default [0.5, 0.8, 0.9, 0.95, 0.99, 0.999]
"""
if thresholds is None:
thresholds = [0.5, 0.8, 0.9, 0.95, 0.99, 0.999, ]
assert hasattr(thresholds, '__len__')
pca = PCA()
pca.fit(x)
ratio_cumsum = np.cumsum(pca.explained_variance_ratio_)
print '-' * 50
i, j = 0, 0
nums = []
while i < len(thresholds) and j < len(ratio_cumsum):
if ratio_cumsum[j] < thresholds[i]:
j += 1
else:
print 'thres:', thresholds[i], '\t\tnums:', j
i += 1
nums.append(j)
print '-' * 50
if is_plot:
plt.plot(pca.explained_variance_ratio_, label='ratio')
plt.plot(ratio_cumsum, label='ratio_cumsum')
plt.legend(loc='best')
plt.show()
return pca
@staticmethod
def find_lasso_para(x, y, paras=None, start_exp=-10, end_exp=-1, ):
"""
Output:
test_paras, variable_num, coefs
"""
x = np.array(x)
y = np.array(y)
x = (x - np.mean(x, axis=0)) / np.std(x, axis=0)
if paras is None:
assert isinstance(start_exp, int)
assert isinstance(end_exp, int)
assert end_exp >= start_exp
paras = [10 ** i for i in range(start_exp, end_exp)]
variable_num = []
params = []
for para in paras:
tmp_model = LassoLars(alpha=para)
tmp_model.fit(sm.add_constant(x), y)
tmp_coef = tmp_model.coef_
variable_num.append(np.sum(tmp_coef != 0))
params.append(tmp_coef)
return paras, variable_num, params
| [
"rxfan@wizardquant.com"
] | rxfan@wizardquant.com |
0a6171fc4cb104471e8b97fb4390f74325a93efd | f25905a321a8ff8d5f75b5bc9bc05167a3a98a8b | /game.py | b5ba5b7ee4e7ff50d1a025fd2b9e19900af2a0e6 | [] | no_license | unknwn-dev/PythonStrategyGame | 3a75bc29a4a2960a1eeaac707015f022547a2acc | 9424f37dfee2d294f7a21ee85b64abb1b0a5843f | refs/heads/master | 2023-08-04T18:09:07.616560 | 2021-09-10T18:41:52 | 2021-09-10T18:41:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,824 | py | from random import randint
import cell
from cell import Cell
from player import Player
from settings import Settings
from units import Units
from tkinter import *
import math
Players = [ Player('tes', "red", 99999, None),
Player("gig", "blue", 000, None),
Player("Olg", "green", 000, None)]
Cells = []
root = Tk()
c = Canvas(root, width=Settings.ScreenRes[0], height=Settings.ScreenRes[1], bg='white')
c.pack()
cell.cellCanv = c
PlayersTurn = 0
UnitsSendNum = 10 #how many units has been sended
SelectedCell = None
def UpUnSnN(*args): #Up UnitSendNum
global UnitsSendNum
UnitsSendNum+=10
if UnitsSendNum > 100:
UnitsSendNum = 10
UpdateGui()
def DwnUnSnN(*args): #Down UnitSendNum
global UnitsSendNum
UnitsSendNum-=10
if UnitsSendNum < 10:
UnitsSendNum = 100
UpdateGui()
def OnClick(cl):
global PlayersTurn
global SelectedCell
print("click" + str(cl.ArrayPos))
checkUnits = not cl.Units == None and cl.Units.Num > 0 and cl.Units.Owner.Name == Players[PlayersTurn].Name
if SelectedCell == None and checkUnits:
SelectedCell = cl
elif not SelectedCell == None:
deltaPos = (SelectedCell.ArrayPos[0] - cl.ArrayPos[0], SelectedCell.ArrayPos[1] - cl.ArrayPos[1])
checkNearOdd = not SelectedCell.ArrayPos[1]%2 == 0 and deltaPos[0] <= 0 and deltaPos[0] >=-1 and deltaPos[1] <= 1 and deltaPos[1] >=-1
checkNearEven = SelectedCell.ArrayPos[1]%2 == 0 and deltaPos[0] <= 1 and deltaPos[0] >= 0 and deltaPos[1] <= 1 and deltaPos[1] >=-1
if checkNearEven or checkNearOdd:
cl.RecUnits(SelectedCell.SendUnits(UnitsSendNum))
SelectedCell = None
PlayersTurn += 1
if PlayersTurn > len(Players)-1:
PlayersTurn = 0
UpdateGui()
cell.OnClickFunc = OnClick
def UpdateGui():
c.itemconfig(gui, text="Turn:"+Players[PlayersTurn].Name+" SelectedUnits:"+str(UnitsSendNum))
if len(Cells) == 0 :
ypos=Settings.CellSize/2
y=0
while ypos <= Settings.NumCells[1] * Settings.CellSize:
XCells = []
x=0
while x < math.ceil(Settings.NumCells[0]):
if y%2 == 0:
xpos = Settings.CellSize * x + Settings.CellSize/2
else:
xpos = Settings.CellSize * (x + 1)
XCells.append(Cell(None, xpos, ypos, Settings.CellSize, None, (x,y)))
x+=1
Cells.append(XCells)
ypos+=Settings.CellSize/1.25
y+=1
gui = c.create_text(20, Settings.ScreenRes[1]-20, anchor="w")
UpdateGui()
root.bind("e",UpUnSnN)
root.bind("q",DwnUnSnN)
for i in range(len(Players)):
Cells[randint(0,Settings.NumCells[0])][randint(0,Settings.NumCells[1])].RecUnits(Units(randint(100,150),Players[i]))
root.mainloop()
| [
"oleg.mulya30@gmail.com"
] | oleg.mulya30@gmail.com |
bce17d2e70d1e8404ce944308c1bee408dd30116 | 6afee49388da5503cbbe96e9965bdf24517f09dd | /IBL.py | 823fe30658e9f38f0449010e28034992ed23ba94 | [] | no_license | sterlingsomers/generic-gridworld | 5df570372295eeefb0e1e3d3ca9a2ad91eef68b3 | d7181b0348e5cf36fe9f53969adcf9773983414e | refs/heads/main | 2022-02-10T15:58:33.875918 | 2021-01-11T20:05:21 | 2021-01-11T20:05:21 | 225,962,617 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,898 | py | import numpy as np
import random
# from autograd import numpy as np
# from autograd import elementwise_grad as egrad
# from autograd import grad
from sklearn.neighbors import KDTree
#TODO: instead of blending as weighted average use tensorflow for ONLY blending. So you assign a cross_entropy function
#TODO: have your data in memory ready for a placeholder and train! The problem will be the recall probabilities as these won't be of any use.
class IBL:
def __init__(self, capacity, num_feats, num_actions, neighbors, temp):
self.capacity = capacity
self.num_feats = num_feats
self.neighbors = neighbors
self.num_actions = num_actions
self.curr_capacity = 0
self.curr_act_capacity = 0
# self.memory = np.array([]) # First row is the timestep = 0. Going down we get more recent instances
self.memory = np.empty((0,self.num_feats))#-np.ones([1,self.num_feats])
#self.memo_actions = -np.ones([1,self.num_actions])
self.memo_actions = np.empty([0,self.num_actions])
self.activations = np.zeros([self.capacity]) # You need it a column vector as the indexing below doesnt work for a row vector
self.tree = None
self.similarity_f = self.relu
self.timestep = 0
self.temp = temp
self.tm = 0#np.zeros([self.capacity]) # General time counter
# NOTES: (below) least recently updated
self.lru = np.zeros([self.capacity])# np.zeros([capacity, num_actions]) # it stores the tm that the
# specific
# instance, decision was
# used. If it hasnt been used it remains 0 so it will be the least recently used.
self.rng = np.random.RandomState(123456)
def softmax(self, x): # Should be applied on a vector and NOT a matrix!
"""Compute softmax values for each sets of matching scores in x."""
e_x = np.exp(x - np.max(x)) # You need a scaling param so 0s wont contribute that much and the small probas become bigger
return e_x / e_x.sum(1).reshape(x.shape[0],1)
def relu(self,x):
x[x < 0] = 0
return x
def tanh(self,x):
return np.tanh(x)
# def add_instance_(self, instance):
# ''' Add an instance to the memory. If it is full remove the oldest one (top vector as we add below --most recent is the bottom).
# TODO: If it is a batch decision input we remove the NUMBER of entries from the top of the list.
# '''
# # Always it will get below for a vector (assuming that you use tables only at the beginning!)
# print('capacity',self.curr_capacity,'/', self.capacity)
# if instance.shape[0] - (self.capacity - self.curr_capacity) > 0:
# indx = instance.shape[0] - (self.capacity - self.curr_capacity)
# print('indx=',indx)
# self.memory[:-indx] = self.memory[indx:] # We shift all instances up one timestep
# self.memory[-indx:] = instance
# # elif ((self.curr_capacity >= self.capacity )):
# # indx = instance.shape[0]
# # print('indx2=', indx)
# # self.memory[:-indx] = self.memory[indx:] # We shift all instances up one timestep
# # self.memory[-indx:] = instance
# # if ((self.curr_capacity >= self.capacity )):
# # self.memory[:-indx] = self.memory[indx:] # We shift all instances up one timestep
# # self.memory[-indx:] = instance
# # self.memory[:-1] = self.memory[1:] # We shift all instances up one timestep
# # self.memory[-1] = instance # we assign the last slot to the new instance
# #self.memory = np.vstack((self.memory, instance))
#
# else:
# #self.memory = np.append(self.memory, instance, axis=0) # Appends horizontal
# self.memory = np.vstack((self.memory, instance))
# self.curr_capacity = self.curr_capacity + instance.shape[0]
#
# # self.tree = KDTree(self.states[:self.curr_capacity]) # till the current capacity (for pre-set static memory)
# self.tree = KDTree(self.memory, metric='manhattan') # choose num of trees!!!
# # if len(instance.shape)>1: # array ONLY IF WE ADD TO THE MEMORY WE INCREASE the capacity
# # self.curr_capacity = self.curr_capacity + instance.shape[0]
# #rows = instance.shape[0]
# # else: # one column vector (n,) # might not need this if input is [array] and not array cauz from (n,)-->[n,1]
# # self.curr_capacity = self.curr_capacity + 1
# #rows = 1
def add_instance(self, instance): # THIS (4-Oct-2020)
''' Add a batch of instances to the memory. If it is full remove the oldest one (top vector as we add below
--most recent is the bottom).
TODO: If it is a batch decision input we remove the NUMBER of entries from the top of the list.
'''
# Always it will get below for a vector (assuming that you use tables only at the beginning!)
# print('capacity',self.curr_capacity,'/', self.capacity)
# NOTES: We just add and expand the memory
self.memory = np.vstack((self.memory, instance))
self.curr_capacity = self.curr_capacity + instance.shape[0]
# NOTES: If the current capacity with the stuff we just added surpasses the memory pre-specified capacity then
# we delete
if self.curr_capacity - self.capacity > 0:
indx = self.curr_capacity - self.capacity
# print('indx=',indx)
self.memory[:-indx] = self.memory[indx:] # We shift all instances up one timestep
# self.memory[-indx:] = instance
rm = np.arange(self.curr_capacity, self.capacity, -1) - 1
self.memory = np.delete(self.memory, rm, 0) # array, index, axis
self.curr_capacity = self.memory.shape[0]
# print('curr_capacity = ',self.curr_capacity)
self.tm += 0.01 # update general timer of the memory (independent of which instances are getting updated)
self.tree = KDTree(self.memory)#, metric='manhattan') # choose num of trees!!! REbuild the tree!
def add(self, instances, values): # FINAL ONE # values are actions or expected return or decisions
num_instances = instances.shape[0] # = how_many_to_add if capacity is not full!
# self.curr_capacity = self.curr_capacity + num_instances
if self.curr_capacity + num_instances > self.capacity: # If memo is full find the least recently used
# instance and substitute its values with the new one. If you use >= then if max capacity is 10 and you
# are at 8 and you need to add just 2 then old_index will be empty as how_many_to_delete=0. THERE WAS NO
# ISSUE WITH EMPTY INDEX THOUGH!
# find the LRU entry (key is the state projection to lower dims)
how_many_to_delete = (self.curr_capacity + num_instances) - self.capacity # always > 0
how_many_to_add = num_instances - how_many_to_delete # always >=0 # how many instances to add at the
# bottom of the memory
# Below: find the how_many_to_delete instances that minimize the lru (we do not check the whole lru
# only the correct part of it.
lru_min_index = np.argpartition(self.lru[:self.curr_capacity], how_many_to_delete)[:how_many_to_delete]
# old_index = np.argmin(self.lru)
self.memory[lru_min_index] = instances[:how_many_to_delete] # can we do this? indexed array assigning an array
self.memo_actions[lru_min_index] = values[:how_many_to_delete]
# Update timing of instances that just inserted in the position of others
tms = self.tm * np.ones(how_many_to_delete)
self.lru[lru_min_index] = tms
if how_many_to_add > 0:
tms_ = self.tm * np.ones(how_many_to_add)
self.memory = np.vstack((self.memory, instances[-how_many_to_add:]))
self.memo_actions = np.vstack((self.memo_actions, values[-how_many_to_add:]))
self.lru[self.curr_capacity : self.curr_capacity + how_many_to_add] = tms_
self.curr_capacity += how_many_to_add #num_instances
else: # (MINE) Update and expand memory
self.memory = np.vstack((self.memory, instances))
self.memo_actions = np.vstack((self.memo_actions, values))
tms = self.tm * np.ones(num_instances)
self.lru[self.curr_capacity : self.curr_capacity + num_instances] = tms
self.curr_capacity = self.memory.shape[0]
# self.tm += 0.01 # update general timer of the memory (independent of which instances are getting updated)
self.tree = KDTree(self.memory)#, metric='manhattan') # choose num of trees!!! Rebuild the tree!
def add_action(self, decision): # THIS (4-Oct-2020)
'''Add a decision/action to the memory. If it is full remove the oldest one (first element of the array). Instance should be a list.
TODO: If it is a batch decision input we remove the number of entries from the top of the list.
:param decision: a binary vector indicating which action was chosen
'''
self.memo_actions = np.vstack((self.memo_actions, decision))
self.curr_act_capacity = self.curr_act_capacity + decision.shape[0]
if self.curr_act_capacity - self.capacity > 0:
indx = self.curr_act_capacity - self.capacity
# print('indx=',indx)
self.memo_actions[:-indx] = self.memo_actions[indx:] # We shift all instances up one timestep
# self.memo_actions[-indx:] = decision
rm = np.arange(self.curr_act_capacity, self.capacity, -1) - 1
self.memo_actions = np.delete(self.memo_actions, rm, 0)
self.curr_act_capacity = self.memo_actions.shape[0]
# def add_action_(self, decision):
# '''Add a decision/action to the memory. If it is full remove the oldest one (first element of the array). Instance should be a list.
# TODO: If it is a batch decision input we remove the number of entries from the top of the list.
# :param decision: a binary vector indicating which action was chosen
# '''
# if decision.shape[0] - (self.capacity - self.curr_act_capacity) > 0:
# indx = decision.shape[0] - (self.capacity - self.curr_act_capacity)
# print('indx=',indx)
# self.memo_actions[:-indx] = self.memo_actions[indx:] # We shift all instances up one timestep
# self.memo_actions[-indx:] = decision
# # if self.curr_capacity + decision.shape[0] > self.capacity:
# # indx = self.curr_capacity + decision.shape[0] - self.capacity
# # else:
# # indx = decision.shape[0]
# # if ( (self.curr_act_capacity >= self.capacity) ): # or (self.curr_act_capacity == 0) ):
# # # indx = decision.shape[0]
# # self.memo_actions[:-indx] = self.memo_actions[indx:] # We shift all instances up one timestep
# # self.memo_actions[-indx:] = decision
# #
# # # self.memo_actions[:-1] = self.memo_actions[1:]
# # # self.memo_actions[-1] = decision
# # #self.memo_actions = np.vstack((self.memo_actions, decision))
#
# else:
# self.memo_actions = np.vstack((self.memo_actions, decision))
# self.curr_act_capacity = self.curr_act_capacity + decision.shape[0]
#
# # if len(decision.shape)>1: # array
# # self.curr_act_capacity = self.curr_act_capacity + decision.shape[0]
# # else: # one column vector (n,)
# # self.curr_act_capacity = self.curr_act_capacity + 1
def update_(self, s, a, r):
# state = np.dot(self.matrix_projection, s.flatten()) # Dimensionality reduction with Random Projection (RP)-->21168 to 64
# r is q_return
self.peek(s, r, a, modify=True)
# q_value = self.peek(state,r,a, modify = True) # Query the memory if q for s exists and retrieve it.
# if q_value==None: # If none then two choices: Either subsittue the least recently used memory if memo is full or just expand memo and add new state
# self.ec_buffer[a].add(state,r)
def peek_(self, instance, k):
""" Find all the k instances that are most similar to the probe instance. """
dist, ind = self.tree.query(instance, k=k) # Distance is by defualt eucleidean, you can change it, look sklearn doc
# the distance for a=self.memory[ind[0]] is np.sqrt(np.sumnp.square(a-instance)))
self.sub_memory = self.memory[ind] # indices are not sorted!!! So careful with everything --> means that the first dist is NOT the closest neighbor. Its just the k neighbors without particular order
self.sub_memo_actions = self.memo_actions[ind]
# self.sub_activations = self.activations[ind]
#self.probs = self.softmax(-dist[0])
return dist, ind # by using the tree we get the matching score without using the function below
def update(self,instance, a, value):#, modify):
#TODO: It could be done with ALL the experience and out of the reward loop
# CAREFUL: Here we do not do knn!!! We search for the closest instances.
# if self.curr_capacity==0:
# return None
NACTIONS = self.num_actions
actions = a
returns = value
# tree = KDTree(self.states[:self.curr_capacity])
# print('query for the same instance in memory (k=1)')
dist, ind = self.tree.query(instance, k=1) # Here you get a stored value (its used in estimate when q(a) != 0
# ind are the indices in the MEMORY that the comparison took place (no matter the k) e.g. the closest inst in the memory with an incoming instance is the instance with ind 5 and the proximity (dist) is 9
idxnon0 = np.where(dist != 0) # Find identical instances in memory
# Check if any of the queries exist in memory
if 0 in dist:# NOTES: Some instances exist in memory (dist=0), so put
idx0 = np.where(dist == 0)
real_ind = ind[idx0]
a = a.reshape(a.size, 1) # Reshape so indices can be used appropriately
value = value.reshape(value.size, 1)
self.lru[ind] = self.tm # only existing instances that are being updated are having their lru updated
# BELOW is always True so you can remove it
# if modify: # we replace or no (depending which one is bigger) the entry with the new new one. This happens when the entry exists in the memory
# self.memo_actions[ind,a[idx0]] = max(self.q_values[ind],value) # According to Deepmind they replace the previous value with the new one if its bigger max(Qt,Rt) --> compare previous estimation with current return
self.memo_actions[real_ind, a[idx0]] = np.maximum(self.memo_actions[real_ind, a[idx0]],
value[idx0])
#return self.q_values[ind]
# You need a condition in case that all incoming instances exist in memory. If you return above though you wont be able to add
# instances that do not exist (cases: all instances exist, mixed, all instances do not exist) NO! you dont
# need cauz then idxnon0 will be empty so you won't have a problem
if idxnon0: # NOTES: Now add ONLY the instances that are NOT in the memory: these are the instances that do not have a match in the memory so put their Return in as estimation
# for these actions and add them into the memory. Also because it is mixed case, this case will search for
value = returns.reshape(returns.size, 1)
actions = actions.reshape(actions.size, 1)
indx_batch = tuple([idxnon0[0]]) # Only reason for the tuple here is to avoid the warnings for indexing with non tuples
instances = instance[indx_batch]
# self.add_instance(instance[indx_batch]) # I use the 1st list of indices which indicates the index of
# instances in the batch that are going to be added
# Create the decisions array [batch x NACTIONS] that you will put in memory
# We create (batch_ind, action_ind) pairs in order to be able to place the returns appropriately in a new array decisions which we will add in memo_actions
batch_indices = np.arange(0, indx_batch[0].shape[0])
indx_batch_cols = tuple([batch_indices, actions[idxnon0]]) # This is the same format that np.where creates
# Below we create the [Q(a1),...,Q(aNACTIONS)] cauz state instance doesn't exist in memory. Only the
# chosen actions will have their Qs updated with value
decisions = -10*np.ones([indx_batch[0].shape[0], NACTIONS]) # decisions should have number of entries equal to the number of instances that NEED to get into the memory
decisions[indx_batch_cols] = value[idxnon0] # put the values in
# self.add_action(decisions)
self.add(instances, decisions)
# self.tm +=0.01
def estimate(self,instance, knn):
''' Estimate does 2 things for EXPLOITATION phase (we need a Q in order to do a=argmaxQ(s,.) ):
1. If an instance exists in memory, it retrieves any existing past decision.
2. If an instance does not exist (εννοείται ότι οι decisions δεν υπάρχουν) then it uses knn function approximator
in order to estimate Q
:param instance: a batch of instances, one for every env
Here, we create a decision array full of -1000. Then we replace any -1000 with any decision that exists in memory.
Then we estimate via knn ALL the decisions in a different array and THEN we put the decisions array in the
decisions that have still -1000.
'''
# if self.curr_capacity==0: # TODO: We need smth like this if curr capacity < knn return random small numbers
# return None
NACTIONS = self.num_actions
batch_size = instance.shape[0]
if self.curr_capacity < knn: # With this no need to pre-fill the memory
return np.random.normal(0,0.08, (batch_size,NACTIONS)) # mu,sigma
decisions = -10 * np.ones([batch_size, NACTIONS])
dist, ind = self.tree.query(instance, k=1) # Here you get a stored value (its used in estimate when q(a) != 0
# ind are the indices in the MEMORY that the comparison took place (no matter the k) e.g. the closest inst in the memory with an incoming instance is the instance with ind 5 and the proximity (dist) is 9
# idxnon0 = np.where(dist != 0)
# Check if any of the queries exist in memory
if 0 in dist:# basically it compares the key vector with the ONE closest neighbor to see if this entry exist
# already
# find the queries indx
idx0 = np.where(dist == 0) # which incoming instances have entries in memory already
real_ind = ind[idx0] # Find them in the main memory
decisions[idx0[0]] = self.memo_actions[real_ind].copy() # Just retrieve the decisions as is. If there are
# -1000 they will be replaced below at the next if condition
self.lru[real_ind] = self.tm # time of being used
# NON EXISTENT ENTRIES ⟾ ESTIMATE THEM
# NOTES: if there are decisions that do not have entries Q(,a) in the memory (i.e [s,Q(a1), -10,
# Q(a3)]), estimate these with knn. To do this we use matrix multiplication so we evaluate even the Q(a1)
# and Q(a2) BUT we do not use these as we take only the estimates with indices that of the non existent
# entries. We also use a default value to indicate empty entries (-10)
dec_ind = np.where(decisions == -10)
if dec_ind: # if there are any empty ('-10') decisions (dec_ind is not empty)
# print('query for estimate (k=knn)')
dist, ind = self.peek_(instance, knn) # you might need ONLY one tree query --> YES you can do it with one query but the indexing code might not be working
#TODO: activations = baselines[ind] + dist + noise, def baselines(lru,tm): ...
probs = self.probabilities(-dist)
# print('Blending')
Q = self.blending(probs) # dimQ = [num_obs x NACTIONS]
decisions[dec_ind] = Q[dec_ind]
# a = 0.01 * np.ones(knn)
# a[0] = 0
# a = np.cumsum(a)
# b = a + [self.tm] * knn
# self.lru[ind[0]] = b
# self.tm += 0.01 # Update timer
# Now add ONLY the instances that are NOT in the memory: these are the instances that do not have a match in the memory
# if idxnon0: # CASE: Some instances do not exist in memory
# actions = actions.reshape(actions.size, 1)
# indx_batch = tuple([idxnon0[0]]) # Only reason for the tuple here is to avoid the warnings for indexing with non tuples
# self.add_instance(instance[indx_batch]) # I use the 1st list of indices which indicate the incoming state order
# # Create the decisions array [batch x NACTIONS] that you will put in memory
# # We create (batch_ind, action_ind) pairs in order to be able to place the returns appropriately in a new array decisions which we will add in memo_actions
# batch_indices = np.arange(0, indx_batch[0].shape[0])
# indx_batch_cols = tuple([batch_indices, actions[idxnon0]]) # This is the same format that np.where creates
# decisions = -1000*np.ones([indx_batch[0].shape[0], NACTIONS]) # decisions should have number of entries equal to the number of instances that NEED to get into the memory
# decisions[indx_batch_cols] = value[idxnon0]
return decisions
def matching_score(self, instance):
""" Compute Matching score for partial matching. """
# Polynomial kernel: tanh(x.T*y + c)
# Compute dot product between instance and selected memory entries
similarity = self.sub_memory.dot(instance)
# similarity = self.memory.dot(instance)
x = similarity# + self.sub_activations # We use the transpose in order to sum 2 column vectors
# return self.similarity_f(x)
return x
def probabilities(self, match_score):
''' Calculate retrieval probabilities for each instance '''
return self.softmax(self.temp*match_score) # 5 seems the lowest value that produces the max acc. 2 for the larger dataset
def blending(self, probabilities):#, slot):
""" Weighted average implementation """
# V = np.sum(self.sub_memo_actions[:,slot] * probabilities, axis=0)
# V = np.matmul(probabilities, self.sub_memo_actions)
# V = (probabilities.T * self.sub_memo_actions).sum(1)
V = np.einsum('ijk,ij->ik', self.sub_memo_actions, probabilities) # Multi-query version
# V = np.sum(self.memo_actions[:, slot] * probabilities, axis=0)
# V = np.sum(np.array(self.sub_memo_actions*probabilities[:,None]),axis=0)
# gradient_V = grad(V)
# vec_gradient_V = egrad(V)
return V#, gradient_V,vec_gradient_V
def choose_action(self, obs, epsilon, knn, nenvs):
self.tm += 0.01
# EXPLORE
if self.rng.random_sample() < epsilon:#random.random() < epsilon:
# print('EXPLORE')
return self.rng.choice(range(self.num_actions), nenvs)#np.random.randint(0, self.num_actions, nenvs) # do we need
# num_actions-1?
# dist, ind = self.peek_(obs, knn)
# probs = self.probabilities(-dist)
# Q = self.blending(probs) #
# NOTES: MFEC original uses knn only when it encounters new states else it uses np.argmax. The previous
# states are are having their returns updated as in any tabular method!
# EXPLOIT
Q = self.estimate(obs,knn) # dimQ = [num_obs x NACTIONS]
# NOTES: ↳ if memory exists then RETRIEVE IT, else use BLENDING
# Tie breaking in random choice (good google search for numpy)
maxes = Q.max(1)
a_max = []
for i in range(maxes.size):
a_maxt = np.random.choice(np.flatnonzero(Q[i,:] == Q[i,:].max())) # flatnonzero ignores Fasle (which is
# equal to 0) and gets only the indices of the instances of the maximum value along the vector
a_max.append(a_maxt)
# probs = self.softmax(4 * Q)
return np.array(a_max)#np.random.choice(5, 1, p=probs.reshape(-1))#np.array(a_max)#np.argmax(Q, axis=1)
def saliency(self, point):
""" Saliency calculation: Derivative of the blending wrt a feature
:param: point: is the point in which we compute the derivative"""
pass | [
"kmitsopou@gmail.com"
] | kmitsopou@gmail.com |
ab75c6540bfc599e6d2614a1e00831798e9b1afe | a1234d571331681fb837664aa73725f523ff2da0 | /gym/setup.py | 24b639850692a74e8d9db75fbc73963fe55d9f6f | [
"MIT"
] | permissive | zhxx1987/openaitest | 8034661086e4c190294fd9c5a6cc7f785a9ac0b6 | 16f974815922d67754791628aaa5d32ad5439889 | refs/heads/master | 2020-03-26T22:04:16.821845 | 2018-08-23T04:34:03 | 2018-08-23T04:34:03 | 145,429,316 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,684 | py | from setuptools import setup, find_packages
import sys, os.path
# Don't import gym module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gym'))
from version import VERSION
# Environment-specific dependencies.
extras = {
'atari': ['atari_py>=0.1.1', 'Pillow', 'PyOpenGL'],
'box2d': ['box2d-py>=2.3.4'],
'classic_control': ['PyOpenGL'],
'mujoco': ['mujoco_py>=1.50', 'imageio'],
'pybullet_envs': ['PyOpenGL'],
'robotics': ['mujoco_py>=1.50', 'imageio'],
}
# Meta dependency groups.
all_deps = []
for group_name in extras:
all_deps += extras[group_name]
extras['all'] = all_deps
setup(name='gym',
version=VERSION,
description='The OpenAI Gym: A toolkit for developing and comparing your reinforcement learning agents.',
url='https://github.com/openai/gym',
author='OpenAI',
author_email='gym@openai.com',
license='',
packages=[package for package in find_packages()
if package.startswith('gym')],
zip_safe=False,
install_requires=[
'scipy', 'numpy>=1.10.4', 'requests>=2.0', 'six', 'pyglet>=1.2.0',
],
extras_require=extras,
package_data={'gym': [
'envs/mujoco/assets/*.xml',
'envs/pybullet_envs/*',
'envs/pybullet_data/*',
'envs/classic_control/assets/*.png',
'envs/robotics/assets/LICENSE.md',
'envs/robotics/assets/fetch/*.xml',
'envs/robotics/assets/hand/*.xml',
'envs/robotics/assets/stls/fetch/*.stl',
'envs/robotics/assets/stls/hand/*.stl',
'envs/robotics/assets/textures/*.png']
},
tests_require=['pytest', 'mock'],
)
| [
"zhangshinshin@gmail.com"
] | zhangshinshin@gmail.com |
a96e8f00e432e1acd61b735d0a17ab0638f63465 | da94217a918b8c3717049e5079875b5e75dee8c7 | /Pwnerrank/Python Decompile/uncompyle6-2.10.1/uncompyle6/parsers/astnode.py | 9aeb3f59282925067a1fae273eea23948d36831e | [
"MIT"
] | permissive | SouthCoded/Cyber-Security | 117e343bef263ec53175370ada4dbc30130c405f | dd85c2a91aadf9d69d9ebf19ff08c5de826fc97a | refs/heads/master | 2023-06-25T14:26:42.272072 | 2021-07-13T12:30:54 | 2021-07-13T12:30:54 | 107,547,220 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,501 | py | import sys
from uncompyle6 import PYTHON3
from uncompyle6.scanners.tok import NoneToken
from spark_parser.ast import AST as spark_AST
if PYTHON3:
intern = sys.intern
class AST(spark_AST):
def isNone(self):
"""An AST None token. We can't use regular list comparisons
because AST token offsets might be different"""
return len(self.data) == 1 and NoneToken == self.data[0]
def __repr__(self):
return self.__repr1__('', None)
def __repr1__(self, indent, sibNum=None):
rv = str(self.type)
if sibNum is not None:
rv = "%2d. %s" % (sibNum, rv)
enumerate_children = False
if len(self) > 1:
rv += " (%d)" % (len(self))
enumerate_children = True
rv = indent + rv
indent += ' '
i = 0
for node in self:
if hasattr(node, '__repr1__'):
if enumerate_children:
child = node.__repr1__(indent, i)
else:
child = node.__repr1__(indent, None)
else:
inst = node.format(line_prefix='L.')
if inst.startswith("\n"):
# Nuke leading \n
inst = inst[1:]
if enumerate_children:
child = indent + "%2d. %s" % (i, inst)
else:
child = indent + inst
pass
rv += "\n" + child
i += 1
return rv
| [
"duncan.pf2@gmail.com"
] | duncan.pf2@gmail.com |
3fe2c84bde72e2715727d3d95441bd71841b53b0 | f5a4f340da539520c60c4bce08356c6f5c171c54 | /tests/integration/reqs/test_tx.py | e8551a73473e6e6ef046ce1ffa96278212f25855 | [
"ISC",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | yyolk/xrpl-py | e3935c0a0f488793153ca29e9d71c197cf88f857 | e5bbdf458ad83e6670a4ebf3df63e17fed8b099f | refs/heads/master | 2023-07-17T03:19:29.239838 | 2021-07-03T01:24:57 | 2021-07-03T01:24:57 | 355,299,041 | 1 | 0 | ISC | 2021-04-08T05:29:43 | 2021-04-06T18:57:06 | null | UTF-8 | Python | false | false | 528 | py | from tests.integration.integration_test_case import IntegrationTestCase
from tests.integration.it_utils import test_async_and_sync
from tests.integration.reusable_values import OFFER
from xrpl.models.requests import Tx
class TestTx(IntegrationTestCase):
@test_async_and_sync(globals())
async def test_basic_functionality(self, client):
response = await client.request(
Tx(
transaction=OFFER.result["hash"],
),
)
self.assertTrue(response.is_successful())
| [
"noreply@github.com"
] | noreply@github.com |
bc43583f980dc0e9c3943616e02cb5acb73ba03c | 2695d586778c3a19cad843f14f505f3e534f470d | /practice/Dynamic_Programming/Sticker_9465.py | cac0d3e543a6e3506d7453dc877d19c9cfa72409 | [] | no_license | kjh03160/Algorithm_Basic | efdb2473186d0aff983a8c0f961d6b86ce66b0d1 | 24842569237db95629cec38ca9ea8e459857c77e | refs/heads/master | 2023-07-14T21:34:29.074373 | 2021-09-11T10:13:00 | 2021-09-11T10:13:00 | 276,395,958 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | # https://www.acmicpc.net/problem/9465
def answer(L):
DP = [[0, 0, 0] for _ in range(len(L[0]))]
DP[0] = [L[0][0], L[1][0], 0]
for i in range(1, len(L[0])):
DP[i][0] = max(DP[i - 1][1], DP[i - 1][2]) + L[0][i] # 현재 위를 선택할 때
DP[i][1] = max(DP[i - 1][0], DP[i - 1][2]) + L[1][i] # 현재 아래를 선택할 때
DP[i][2] = max(DP[i - 1]) # 아무것도 선택 안할 때
return max(DP[-1])
import sys
input = sys.stdin.readline
t = int(input())
T = []
for i in range(t):
x = []
n = int(input())
x.append(list(map(int, input().split())))
x.append(list(map(int, input().split())))
T.append(x)
# print(x)
for i in T:
print(answer(i)) | [
"kis03160@likelion.org"
] | kis03160@likelion.org |
8e869543ad0440d33a31eb49e8ae70c926b3def4 | 5e81643567dea09a331a5b155a9e365b30aea276 | /events/migrations/0001_initial.py | f22cb6c7a4940cb9222a3be2676c297d003271c9 | [] | no_license | vitaliyharchenko/sportcourts | 1b260bc1bdda5e904fea18f1c137d7926d52978b | 05d1306c1df5bba73bda5b7097368340fd1b6a9d | refs/heads/master | 2021-01-19T10:46:17.367784 | 2015-09-28T09:14:47 | 2015-09-28T09:14:47 | 37,144,572 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,761 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('courts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Amplua',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name=b'\xd0\x9d\xd0\xb0\xd0\xb7\xd0\xb2\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
],
options={
'verbose_name': '\u0430\u043c\u043f\u043b\u0443\u0430',
'verbose_name_plural': '\u0430\u043c\u043f\u043b\u0443\u0430',
},
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name=b'\xd0\x9d\xd0\xb0\xd0\xb7\xd0\xb2\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
('description', models.CharField(max_length=300, verbose_name=b'\xd0\x9e\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
('is_public', models.BooleanField(default=True, help_text=b'\xd0\x94\xd0\xb5\xd0\xbb\xd0\xb0\xd0\xb5\xd1\x82 \xd0\xb2\xd0\xb8\xd0\xb4\xd0\xb8\xd0\xbc\xd1\x8b\xd0\xbc \xd0\xb2 \xd0\xbf\xd0\xbe\xd1\x82\xd0\xbe\xd0\xba\xd0\xb5', verbose_name=b'\xd0\x9f\xd1\x83\xd0\xb1\xd0\xbb\xd0\xb8\xd1\x87\xd0\xbd\xd1\x8b\xd0\xb9 \xd1\x81\xd1\x82\xd0\xb0\xd1\x82\xd1\x83\xd1\x81')),
('capacity', models.IntegerField(verbose_name=b'\xd0\x92\xd0\xbc\xd0\xb5\xd1\x81\xd1\x82\xd0\xb8\xd0\xbc\xd0\xbe\xd1\x81\xd1\x82\xd1\x8c')),
('cost', models.PositiveIntegerField(verbose_name=b'\xd0\xa6\xd0\xb5\xd0\xbd\xd0\xb0')),
('datetime', models.DateTimeField(verbose_name=b'\xd0\x94\xd0\xb0\xd1\x82\xd0\xb0 \xd0\xbf\xd1\x80\xd0\xbe\xd0\xb2\xd0\xb5\xd0\xb4\xd0\xb5\xd0\xbd\xd0\xb8\xd1\x8f')),
('datetime_to', models.DateTimeField(verbose_name=b'\xd0\x94\xd0\xb0\xd1\x82\xd0\xb0 \xd0\xbe\xd0\xba\xd0\xbe\xd0\xbd\xd1\x87\xd0\xb0\xd0\xbd\xd0\xb8\xd1\x8f', blank=True)),
],
options={
'ordering': ['-datetime'],
'get_latest_by': 'datetime',
'verbose_name': '\u0421\u043e\u0431\u044b\u0442\u0438\u0435',
'verbose_name_plural': '\u0421\u043e\u0431\u044b\u0442\u0438\u044f',
},
),
migrations.CreateModel(
name='GameType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name=b'\xd0\x9d\xd0\xb0\xd0\xb7\xd0\xb2\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5 \xd1\x82\xd0\xb8\xd0\xbf\xd0\xb0 \xd0\xb8\xd0\xb3\xd1\x80\xd1\x8b')),
],
options={
'verbose_name': '\u0422\u0438\u043f \u0438\u0433\u0440\u044b',
'verbose_name_plural': '\u0422\u0438\u043f\u044b \u0438\u0433\u0440\u044b',
},
),
migrations.CreateModel(
name='SportType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(unique=True, max_length=50, verbose_name=b'\xd0\x9d\xd0\xb0\xd0\xb7\xd0\xb2\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb2\xd0\xb8\xd0\xb4\xd0\xb0 \xd1\x81\xd0\xbf\xd0\xbe\xd1\x80\xd1\x82\xd0\xb0')),
],
options={
'verbose_name': '\u0412\u0438\u0434 \u0441\u043f\u043e\u0440\u0442\u0430',
'verbose_name_plural': '\u0412\u0438\u0434\u044b \u0441\u043f\u043e\u0440\u0442\u0430',
},
),
migrations.CreateModel(
name='UserGameAction',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('datetime', models.DateTimeField(auto_now=True, verbose_name=b'\xd0\x94\xd0\xb0\xd1\x82\xd0\xb0 \xd0\xb4\xd0\xb5\xd0\xb9\xd1\x81\xd1\x82\xd0\xb2\xd0\xb8\xd1\x8f')),
('action', models.PositiveSmallIntegerField(verbose_name=b'\xd0\x94\xd0\xb5\xd0\xb9\xd1\x81\xd1\x82\xd0\xb2\xd0\xb8\xd0\xb5', choices=[(1, b'\xd0\x97\xd0\xb0\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbb\xd1\x81\xd1\x8f'), (2, b'\xd0\x9e\xd1\x82\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbb\xd1\x81\xd1\x8f'), (3, b'\xd0\x92 \xd1\x80\xd0\xb5\xd0\xb7\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb5'), (4, b'\xd0\x92\xd1\x8b\xd1\x88\xd0\xb5\xd0\xbb \xd0\xb8\xd0\xb7 \xd1\x80\xd0\xb5\xd0\xb7\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb0'), (5, b'\xd0\x9f\xd0\xbe\xd1\x81\xd0\xb5\xd1\x82\xd0\xb8\xd0\xbb'), (6, b'\xd0\x9d\xd0\xb5 \xd0\xbf\xd1\x80\xd0\xb8\xd1\x88\xd0\xb5\xd0\xbb'), (7, b'\xd0\x9d\xd0\xb5 \xd0\xb7\xd0\xb0\xd0\xbf\xd0\xbb\xd0\xb0\xd1\x82\xd0\xb8\xd0\xbb')])),
('user', models.ForeignKey(verbose_name=b'\xd0\x9f\xd0\xbe\xd0\xbb\xd1\x8c\xd0\xb7\xd0\xbe\xd0\xb2\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': '\u0437\u0430\u043f\u0438\u0441\u044c \u043d\u0430 \u0438\u0433\u0440\u0443',
'verbose_name_plural': '\u0437\u0430\u043f\u0438\u0441\u0438 \u043d\u0430 \u0438\u0433\u0440\u0443',
},
),
migrations.CreateModel(
name='Game',
fields=[
('event_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='events.Event')),
('reserved_count', models.PositiveIntegerField(default=0, verbose_name=b'\xd0\xa0\xd0\xb5\xd0\xb7\xd0\xb5\xd1\x80\xd0\xb2\xd0\xbd\xd1\x8b\xd1\x85 \xd0\xbc\xd0\xb5\xd1\x81\xd1\x82')),
('deleted', models.BooleanField(default=False, verbose_name=b'\xd0\x98\xd0\xb3\xd1\x80\xd0\xb0 \xd1\x83\xd0\xb4\xd0\xb0\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xb0')),
('is_reported', models.BooleanField(default=False, verbose_name=b'\xd0\x9e\xd1\x82\xd1\x87\xd0\xb5\xd1\x82 \xd0\xbe\xd1\x82\xd0\xbf\xd1\x80\xd0\xb0\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd')),
('coach', models.ForeignKey(related_name='coach', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'verbose_name': '\u0438\u0433\u0440\u0430',
'verbose_name_plural': '\u0438\u0433\u0440\u044b',
},
bases=('events.event',),
),
migrations.AddField(
model_name='gametype',
name='sporttype',
field=models.ForeignKey(verbose_name=b'\xd0\x92\xd0\xb8\xd0\xb4 \xd1\x81\xd0\xbf\xd0\xbe\xd1\x80\xd1\x82\xd0\xb0', to='events.SportType'),
),
migrations.AddField(
model_name='event',
name='content_type',
field=models.ForeignKey(editable=False, to='contenttypes.ContentType', null=True),
),
migrations.AddField(
model_name='event',
name='court',
field=models.ForeignKey(verbose_name=b'\xd0\x9f\xd0\xbb\xd0\xbe\xd1\x89\xd0\xb0\xd0\xb4\xd0\xba\xd0\xb0', to='courts.Court'),
),
migrations.AddField(
model_name='event',
name='created_by',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='event',
name='gametype',
field=models.ForeignKey(verbose_name=b'\xd0\xa2\xd0\xb8\xd0\xbf \xd0\xb8\xd0\xb3\xd1\x80\xd1\x8b', to='events.GameType'),
),
migrations.AddField(
model_name='event',
name='responsible_user',
field=models.ForeignKey(related_name='responsible_games', verbose_name=b'\xd0\x9e\xd1\x82\xd0\xb2\xd0\xb5\xd1\x82\xd1\x81\xd1\x82\xd0\xb2\xd0\xb5\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb9', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='amplua',
name='sporttype',
field=models.ForeignKey(related_name='+', verbose_name=b'\xd0\x92\xd0\xb8\xd0\xb4 \xd1\x81\xd0\xbf\xd0\xbe\xd1\x80\xd1\x82\xd0\xb0', to='events.SportType'),
),
migrations.AddField(
model_name='usergameaction',
name='game',
field=models.ForeignKey(verbose_name=b'\xd0\x98\xd0\xb3\xd1\x80\xd0\xb0', to='events.Game'),
),
]
| [
"harchenko.grape@gmail.com"
] | harchenko.grape@gmail.com |
7b631e19e522191e7c94a877f0b9acf61a4e7f76 | 1e05da139b414823fcc52759da7f95ffc5965a60 | /mysite/settings.py | 8b53b2c600b816d7145769ce0461c2864e52d407 | [] | no_license | elabdesunil/django-polls | 15c30919727fa880a1d3fb1fcfd8c5e0044695a7 | 54f2d7d25ed9b4071a3fdd47b64d6fc92c69c230 | refs/heads/master | 2022-12-22T21:52:50.005901 | 2019-09-22T19:05:17 | 2019-09-22T19:05:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,164 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sn*+slzg^=#w@%lu6kqsr%75r_$hk%yz9f2lt8syd-$th99$ta'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"sunilale0@gmail.com"
] | sunilale0@gmail.com |
142d006d519df3979de42375aa3d50a2ee755727 | 79f9aead4431895063d1537083a43c2f971a4b1a | /user_follow_rate/getid_from_mysql.py | 45d6398f9b06609eeccd41decf5d1362cf9cd124 | [] | no_license | mocorr/Machine_Learning | fd760a3121be38146dca045ff5cf77aeedfa91ea | 393751cc0e780f754faa7346c54da29c41a54bd6 | refs/heads/master | 2020-11-26T06:17:11.993842 | 2016-01-14T11:16:15 | 2016-01-14T11:16:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,719 | py | #!/usr/bin/env python
#coding=utf-8
import MySQLdb
from DBsetting import *
def connect_mysql(database):
conn=MySQLdb.connect(host=MYSQL_HOST, user=MYSQL_USER, passwd=MYSQL_PASSWORD, db=database ,charset="utf8")
cursor = conn.cursor()
return conn, cursor
def fetch_userid_from_listid(cursor, list_id):
query = 'select user_id from user_info_list where list_id =' +list_id
n = cursor.execute(query)
userid = None
if n !=0:
id_int = cursor.fetchone()[0]
userid = str(id_int)
return userid
def fetch_shopid_brandid_from_goodsid(cursor, good_id):
query = 'select shop_id, brand_id from goods_info_ext where goods_id='+good_id
n = cursor.execute(query)
shopid = None
brandid = None
if n !=0:
shop_int, brand_int = cursor.fetchone()
shopid = str(shop_int)
brandid = str(brand_int)
return shopid, brandid
def close_mysql(conn):
conn.close()
def get_distinct_user_id(cursor):
query='select distinct user_id from user_info_follow'
n = cursor.execute(query)
user_id_list = [x[0] for x in cursor.fetchall()]
return user_id_list
def get_followIdTypeTime_from_userid(cursor, user_id):
query='select follow_object_id, follow_object_type_id, follow_create_time from user_info_follow where user_id='+user_id
cursor.execute(query)
follow_id_type_list = [x for x in cursor.fetchall()]
return follow_id_type_list
if __name__ == '__main__':
conn, cursor = connect_mysql('api_wave87_com')
listid = '132'
userid = fetch_userid_from_listid(cursor, listid)
print 'userid get from list id: ', userid
goodid = '15365'
shopid, brandid = fetch_shopid_brandid_from_goodsid(cursor, goodid)
print 'shopid and brandid get from goods id: ', shopid, brandid
close_mysql(conn)
| [
"240083998@qq.com"
] | 240083998@qq.com |
e6af48993c7c26fd4ed95950dd100596814de47c | 05ceedee44c66ece52a9d7df9dc8ac2df536557b | /monero_glue/messages/StellarGetPublicKey.py | 0e6305747025ab95cfb7e0ed611cbf5e6901497d | [
"MIT"
] | permissive | tsusanka/monero-agent | 1e48042f7cbb77b3d3f6262c97de71da4f6beb3d | 526ca5a57714cdca3370021feda3ed5ad3e3ea1a | refs/heads/master | 2020-03-25T11:43:16.967931 | 2018-08-06T15:07:05 | 2018-08-06T15:07:05 | 143,745,130 | 1 | 0 | null | 2018-08-06T15:06:04 | 2018-08-06T15:06:03 | null | UTF-8 | Python | false | false | 614 | py | # Automatically generated by pb2py
# fmt: off
from .. import protobuf as p
if __debug__:
try:
from typing import List
except ImportError:
List = None # type: ignore
class StellarGetPublicKey(p.MessageType):
MESSAGE_WIRE_TYPE = 200
FIELDS = {
1: ('address_n', p.UVarintType, p.FLAG_REPEATED),
2: ('show_display', p.BoolType, 0),
}
def __init__(
self,
address_n: List[int] = None,
show_display: bool = None,
) -> None:
self.address_n = address_n if address_n is not None else []
self.show_display = show_display
| [
"dusan.klinec@gmail.com"
] | dusan.klinec@gmail.com |
3bcdff560e8830c41505379d37e1379df373635b | a9525859dcc421c446a0d501f2d30882fb73b707 | /node_modules/mongoose/node_modules/mongodb/node_modules/mongodb-core/node_modules/bson/build/config.gypi | c845bb30ecb4ece68143fb356035a3fb365a63db | [
"MIT",
"Apache-2.0"
] | permissive | TwilioDevEd/video-service-node | d5c3d3cdf8a96ef5a5d714585ee0e1101e5c1390 | 5527dd2bfb16e6606855fbcc76187a8bed66f559 | refs/heads/master | 2021-11-28T15:45:37.249071 | 2019-03-13T22:55:03 | 2019-03-13T22:55:03 | 34,689,393 | 4 | 4 | NOASSERTION | 2021-09-01T21:34:45 | 2015-04-27T20:26:09 | JavaScript | UTF-8 | Python | false | false | 3,231 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 1,
"host_arch": "x64",
"node_install_npm": "true",
"node_prefix": "",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"nodedir": "/Users/kwhinnery/.node-gyp/0.10.33",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"shell": "/bin/bash",
"parseable": "",
"shrinkwrap": "true",
"email": "kevin.whinnery@gmail.com",
"init_license": "ISC",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"registry": "https://registry.npmjs.org/",
"fetch_retries": "2",
"npat": "",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/kwhinnery/.npm-init.js",
"userconfig": "/Users/kwhinnery/.npmrc",
"node_version": "0.10.33",
"user": "1288690226",
"save": "true",
"editor": "vi",
"tag": "latest",
"global": "",
"username": "kwhinnery",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"dev": "",
"fetch_retry_factor": "10",
"group": "410487729",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/Users/kwhinnery/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/1.4.28 node/v0.10.33 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"umask": "18",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/sl/7xmch_4s0x109yzxgl85pqg96czpjk/T",
"unsafe_perm": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"kevin.whinnery@gmail.com"
] | kevin.whinnery@gmail.com |
945d0014a2407619a8a5caf94695cf35c28e0a16 | 55bdaf0f7c0642e21ed446a47d4ec7ab4a9451a9 | /tobuscando/dashboard/forms.py | fadfbdbe1815d0bb0f7a8afcab2cef5850a13fac | [] | no_license | edsonlb/tobuscando | 178863e8d27494c4b3638383360f99e03646891b | 178859b8fb0e85b932c4aaa33e5eac52a39ac831 | refs/heads/master | 2021-01-20T09:32:11.811733 | 2017-05-04T12:50:32 | 2017-05-04T12:50:32 | 90,261,543 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | # coding: utf-8
from django import forms
from django.utils.translation import ugettext as _
from tobuscando.core.models import Person
from tobuscando.ads.models import Offer
class ProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
self.fields['first_name'].label = _(u'Nome')
self.fields['username'].help_text = None
class Meta:
model = Person
fields = (
'avatar', 'first_name', 'username', 'email', 'phone', 'cellphone',
'zipcode', 'address', 'number', 'district', 'city', 'state', 'country',
'language', 'facebook_link', 'twitter_link', 'gplus_link',
'notification1', 'notification2', 'notification3', 'notification4',
'date_joined', 'last_login', 'password'
)
widgets = {
'username': forms.TextInput(attrs={'readonly': 'readonly'}),
'email': forms.TextInput(attrs={'readonly': 'readonly'}),
'password': forms.HiddenInput(),
'last_login': forms.HiddenInput(),
#'date_joined': forms.HiddenInput()
}
class OfferResponseForm(forms.ModelForm):
class Meta:
model = Offer
widgets = {
'parent': forms.HiddenInput(),
'person': forms.HiddenInput(),
'ad': forms.HiddenInput(),
}
| [
"oliveira.matheusde@gmail.com"
] | oliveira.matheusde@gmail.com |
eb7d6a4abda13af08dead3330c0e64c29fd40e93 | b012caadf2bbfa34db5d0144accc5aeb02f26c68 | /keystone/common/sql/contract_repo/versions/029_placeholder.py | a96cd6f3625240fefa55dfdb8f0df785e3aa29f3 | [
"Apache-2.0"
] | permissive | sapcc/keystone | c66345df04af12066ec27ad93959da7b0b742fdc | 03a0a8146a78682ede9eca12a5a7fdacde2035c8 | refs/heads/stable/train-m3 | 2023-08-20T07:22:57.504438 | 2023-03-06T15:56:44 | 2023-03-06T15:56:44 | 92,154,503 | 0 | 6 | Apache-2.0 | 2023-03-24T22:13:22 | 2017-05-23T09:27:56 | Python | UTF-8 | Python | false | false | 754 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This is a placeholder for Pike backports. Do not use this number for new
# Queens work. New Queens work starts after all the placeholders.
def upgrade(migrate_engine):
pass
| [
"lbragstad@gmail.com"
] | lbragstad@gmail.com |
3deacc17b483fd79573c192526fc20b8ae69b30f | be2a81f03e8a2dac7d356dde7a3ffdcfe3f77e00 | /providers/com/biomedcentral/migrations/0002_favicon.py | f3d7c663a9a58c872689d4481f4d3d62cbe13f76 | [
"Apache-2.0"
] | permissive | Stevenholloway/SHARE | 4193bbd3ca50765a24bf21c0cc14438175fbb678 | b9759106d12c2ff548bad22c4be8650e9f41e61e | refs/heads/develop | 2021-01-21T19:13:35.205983 | 2017-02-23T14:45:46 | 2017-02-23T14:45:46 | 63,431,390 | 0 | 0 | null | 2016-07-15T15:17:45 | 2016-07-15T15:17:44 | null | UTF-8 | Python | false | false | 463 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-02-10 14:32
from __future__ import unicode_literals
from django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('com.biomedcentral', '0001_initial'),
('share', '0018_store_favicons'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotFaviconMigration('com.biomedcentral'),
),
]
| [
"chriskseto@gmail.com"
] | chriskseto@gmail.com |
50f1b2c6c3f6bec0a574850bc96f48c8683609c8 | cd0987589d3815de1dea8529a7705caac479e7e9 | /webkit/WebKitTools/Scripts/webkitpy/tool/steps/build.py | 10fe1a806ce51955b95b23099c0fc1bcef93962e | [] | no_license | azrul2202/WebKit-Smartphone | 0aab1ff641d74f15c0623f00c56806dbc9b59fc1 | 023d6fe819445369134dee793b69de36748e71d7 | refs/heads/master | 2021-01-15T09:24:31.288774 | 2011-07-11T11:12:44 | 2011-07-11T11:12:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,415 | py | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
from webkitpy.common.system.deprecated_logging import log
class Build(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.build,
Options.quiet,
Options.build_style,
]
def build(self, build_style):
self._tool.executive.run_and_throw_if_fail(self.port().build_webkit_command(build_style=build_style), self._options.quiet)
def run(self, state):
if not self._options.build:
return
log("Building WebKit")
if self._options.build_style == "both":
self.build("debug")
self.build("release")
else:
self.build(self._options.build_style)
| [
"sdevitt@rim.com"
] | sdevitt@rim.com |
fd970270a1867d7e4d1426afa5d8c81c3785cf53 | cbb26b8c9722e33eaabecfee954ae668206dc431 | /mp2-code/solve.py | 51cc469b5884cd5e70686580a9a781d06f3f1eda | [] | no_license | jordanwu97/CS440-AI | dfa6063d745ec032fc812b1c50b17c82180af8b6 | 893a42b4d1b2f708d08f4518896bd8aaddcf1af3 | refs/heads/master | 2020-04-18T09:15:18.587710 | 2019-04-23T00:32:13 | 2019-04-23T00:32:13 | 167,427,817 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,827 | py | import numpy as np
import time
# modified ALGORITHM X code from https://www.cs.mcgill.ca/~aassaf9/python/algorithm_x.html
class ALGOX(object):
def __init__(self,X,Y):
self.X = ALGOX._preprocess(X, Y)
self.Y = Y
def solve(self):
return ALGOX._solve(self.X,self.Y)
def _solve(X, Y, solution=[]):
if not X:
sol = list(solution)
return True, sol
else:
# chose column with minimum 1s.
# "Fill out a position with minimum choice"
c = min(X, key=lambda c: len(X[c]))
# "chose iteratively the choice to fill out the position"
for r in list(X[c]):
solution.append(r)
cols = ALGOX._select(X, Y, r)
ret, sol = ALGOX._solve(X, Y, solution)
if ret:
return True, sol
ALGOX._deselect(X, Y, r, cols)
solution.pop()
return False, []
def _select(X, Y, r):
cols = []
for j in Y[r]:
for i in X[j]:
for k in Y[i]:
if k != j:
X[k].remove(i)
cols.append(X.pop(j))
return cols
def _deselect(X, Y, r, cols):
for j in reversed(Y[r]):
X[j] = cols.pop()
for i in X[j]:
for k in Y[i]:
if k != j:
X[k].add(i)
def _preprocess(X, Y):
Xnew = {j: set() for j in X}
for i in Y:
for j in Y[i]:
Xnew[j].add(i)
return Xnew
# check if pent can be added via element wise multiplication of pent and board
def can_add_pent(board, pent, coord):
if coord[0] < 0 or coord[1] < 0:
return False
if coord[0] + pent.shape[0] > board.shape[0] or coord[1] + pent.shape[1] > board.shape[1]:
return False
temp = np.multiply(board[coord[0]:coord[0]+pent.shape[0], coord[1]:coord[1]+pent.shape[1]], pent)
return not np.any(temp)
# add pent to board, if not possible, leave board unchanged
def add_pentomino(board, pent, coord):
# check for overlap
if not can_add_pent(board,pent,coord):
return False
board[coord[0]:coord[0]+pent.shape[0], coord[1]:coord[1]+pent.shape[1]] += pent
return True
# remove pent from board
def del_pentomino(board, pent, coord):
board[coord[0]:coord[0]+pent.shape[0], coord[1]:coord[1]+pent.shape[1]] -= pent
def generateAllPents(pents):
def nphash(arr):
return hash(str(arr))
all_pents = [[] for i in range(len(pents))]
# for every pent
for i, pent in enumerate(pents):
rot_pent = pent
no_repeat = set()
# rotate 4 times
for rot in range(4):
flip_pent = rot_pent
# flip 2 times
for flip in range(2):
# check for identical pent, if no repeat, add to list
if nphash(flip_pent) not in no_repeat:
all_pents[i].append(flip_pent)
no_repeat.add(nphash(flip_pent))
flip_pent = np.fliplr(flip_pent)
rot_pent = np.rot90(rot_pent)
return all_pents
def boardCoord2IDX(board, coord):
return coord[0] * board.shape[1] + coord[1]
def generateMapping(board, all_pents):
Y = {}
for pent_idx, orientations in enumerate(all_pents):
for pent_orientation_idx,pent in enumerate(orientations):
for coord, val in np.ndenumerate(board):
# try adding pent to board
if add_pentomino(board, pent, coord):
# key = (pent_idx, pent_orientation_idx, coord) : value = [-pent_idx, covered coordinates...]
Y[(pent_idx, pent_orientation_idx, coord)] = [(pent_idx * -1) - 1] + [ boardCoord2IDX(board, c) for c in np.argwhere(board > 0)]
# remove pent from board
del_pentomino(board,pent,coord)
return Y
def solve(board, pents):
# reformat board so empty space = 0, blocked off = -1
board = board.astype(int) - 1
# generate all pents
all_pents = generateAllPents(pents)
# Y is subsets of numbers we want to chose to cover X
Y = generateMapping(board, all_pents)
# X is set of numbers we want to cover
# [all pents used once ... , all coordinates used ... ]
X = list(range(-len(pents),0)) + [ boardCoord2IDX(board,coord) for coord in np.argwhere(board==0)]
_, sol = ALGOX(X,Y).solve()
# select out correct pents for final answer
final = [(all_pents[chosen[0]][chosen[1]], chosen[2]) for chosen in sol]
# print final answer
[add_pentomino(board, pent, coord) for (pent, coord) in final]
print (board)
return final
| [
"jordan@Jordans-MacBook-Pro.local"
] | jordan@Jordans-MacBook-Pro.local |
14332c584973d8b33b49bd86857d732747b2129e | f8fed54194051791a13aab2316b719adde186d06 | /net/3d_pose_model.py | dd00d56d0b0ab569b59c17d94aed69dc8b9cb85b | [] | no_license | BulletforFreedom/pytorch_cv | cd80908db7b7cb55cb9ae63cbcb8c20f94a1df8b | 285fb72c5add2e98bb0c87ff9a5e7eaf5cf4804c | refs/heads/master | 2020-04-13T03:56:05.088056 | 2019-02-27T06:34:02 | 2019-02-27T06:34:02 | 162,946,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,998 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 16 10:59:53 2018
@author: lvsikai
Email: lyusikai@gmail.com
"""
import torch
import torch.nn as nn
class LinearBlock(nn.Module):
def __init__(self, linearsize, dropout):
super(LinearBlock, self).__init__()
self.relu = nn.ReLU(inplace=True)
self.dropout = nn.Dropout(dropout)
self.linear1 = nn.Linear(linearsize, linearsize)
self.bn1 = nn.BatchNorm1d(linearsize)
self.linear2 = nn.Linear(linearsize, linearsize)
self.bn2 = nn.BatchNorm1d(linearsize)
def forward(self, x):
y = self.linear1(x)
y = self.bn1(y)
y = self.relu(y)
y = self.dropout(y)
y = self.linear2(y)
y = self.bn2(y)
y = self.relu(y)
y = self.dropout(y)
out = x + y
return out
class LinearModel(nn.Module):
def __init__(self, config):
super(LinearModel, self).__init__()
self.cfg=config
self.net_strucure=self.cfg.get_blocks()
self.linear_size=int(self.net_strucure['linearsize'])
self.num_linear_blocks=int(self.net_strucure['num_linear_blocks'])
self.drop_out=float(self.net_strucure['drop_out'])
self.linear1 = nn.Linear(self.cfg.get_final_inp_dim(), self.linear_size)
self.batch_norm = nn.BatchNorm1d(self.linear_size)
self.relu = nn.ReLU(inplace=True)
self.dropout = nn.Dropout(self.drop_out)
self.linearblocks = []
for i in range(self.num_linear_blocks):
self.linearblocks.append(LinearBlock(self.linear_size, self.drop_out))
self.linearblocks = nn.ModuleList(self.linearblocks)
self.linear2 = nn.Linear(self.linear_size, self.cfg.get_out_dim())
def forward(self, input):
output = self.linear1(input)
output = self.batch_norm(output)
output = self.relu(output)
output = self.dropout(output)
for i in range(self.num_linear_blocks):
output = self.linearblocks[i](output)
output = self.linear2(output)
return output
if __name__ == '__main__':
from src.configer import Configer
cfg = Configer('3d_pose_baseline.cfg')
inp = torch.rand(2, 32)
model = LinearModel(cfg)
model = model.cuda()
# load ckpt
if cfg.is_train():
ckpt = torch.load(cfg.get_ckpt())
start_epoch = ckpt['epoch']
err_best = ckpt['err']
glob_step = ckpt['step']
lr_now = ckpt['lr']
model.load_state_dict(ckpt['state_dict'])
print(">>> ckpt loaded (epoch: {} | err: {})".format(start_epoch, err_best))
else:
def weight_init(m):
if isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
model.apply(weight_init)
model.eval()
from torch.autograd import Variable
inputs = Variable(inp.cuda())
outputs = model(inputs)
print(outputs)
| [
"lyusikai@gmail.com"
] | lyusikai@gmail.com |
2a0b3a93e513b7f33f12ced12c7f3a932ee7c77e | 7111511ef0cca1bcf84a76d49419fad504d78f6e | /test331scaping_DictWriter.py | a15348ff338735c3fd8aa09bcf4f71bffd95733e | [] | no_license | blockchain99/pythonlecture | 7800033cd62251b0eec8cf3b93f253175d9cb2e8 | 198e1b6d68db72e4a5009f988c503958ad7ab444 | refs/heads/master | 2020-12-12T14:21:53.626918 | 2020-01-15T19:02:07 | 2020-01-15T19:02:07 | 234,148,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,469 | py | import requests
from bs4 import BeautifulSoup
from csv import writer, DictWriter
response = requests.get("https://www.rithmschool.com/blog")
# print(response.text)
with open('test331out.text', 'w') as file:
file.write(response.text)
print("==============================================================")
#go to above url -> open developer tool in chrome.
soup = BeautifulSoup(response.text, "html.parser")
articles = soup.find_all("article")
# print(articles)
print("-------------------csv DicWriter---------------------")
with open("test331blogDict.csv", "w") as file:
headers = ["title", "link", "date"]
csv_dictwriter = DictWriter(file, fieldnames=headers)
csv_dictwriter.writeheader()
for article in articles:
#get_text: access the inner text in an element("a")
# print(article.find("a").get_text()) #anchor tag -> convert to text
a_tag = article.find("a")
title = a_tag.get_text() #anchor tag -> convert to text
url = a_tag['href']
# print(article.find("time")) #<time datetime="2019-10-22" pubdate=""></time>
time = article.find("time")
date = time['datetime']
# print(date) #2019-09-03
# print(title, url, date)
# csv_writer.writerow(title, url, date) #TypeError: writerow() takes exactly one argument (3 given)
csv_dictwriter.writerow({
"title" : title,
"link" : url,
"date" : date
})
| [
"shinebytheriver@yahoo.com"
] | shinebytheriver@yahoo.com |
238624f18e184ded3178a8511c294178faf325d0 | 6ee26ef4cc5224c9410208c4399698faffbac10b | /dominions/constants_tables.py | 5859f42f367d7b98b19c4db6a94e164735729507 | [
"Apache-2.0"
] | permissive | socialloser1/dominions-tools | 60bd872f7794f0df0d9d7e6376f33b188440f1db | c04fae655db8ca2240fc6f6ea3ac65a4c8a48eb7 | refs/heads/master | 2021-05-07T09:38:54.068262 | 2017-06-13T19:47:11 | 2017-06-13T19:47:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,649 | py | ###############################################################################
# dominions #
#-----------------------------------------------------------------------------#
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
###############################################################################
""" Tables of constants. """
__docformat__ = "reStructuredText"
from sqlalchemy import (
Column as _SQLA_Column,
Integer as _SQLA_Integer,
String as _SQLA_String,
)
from sqlalchemy.ext.declarative import (
declarative_base as _SQLA_declarative_base,
)
from dominions.DataTable import (
DataTable_CSV as _DataTable_CSV,
DataTableRow_NamedInteger as _DataTableRow_NamedInteger,
DataTable_NamedInteger as _DataTable_NamedInteger,
DataTableRow_NamedBits as _DataTableRow_NamedBits,
DataTable_NamedBits as _DataTable_NamedBits,
)
class AttributeKey( _DataTableRow_NamedInteger ):
""" An attribute key. """
__tablename__ = "attribute_keys"
class AttributeKeys_DataTable( _DataTable_NamedInteger, _DataTable_CSV ):
""" A table of attribute keys. """
_TITLE = "Attribute Keys"
_LABEL = "Attribute Keys"
_FILE_NAME_BASE = "attribute-keys"
_ROW_CLASS = AttributeKey
class Sound( _DataTableRow_NamedInteger ):
""" A sound effect. """
__tablename__ = "sounds"
_TITLE = "Sound {Spl: #sound, Wpn: #sound}"
class Sounds_DataTable( _DataTable_NamedInteger, _DataTable_CSV ):
""" A table of sound effects. """
_TITLE = "Sounds {Spl: #sound, Wpn: #sound}"
_LABEL = "Sounds"
_FILE_NAME_BASE = "sounds"
_ROW_CLASS = Sound
class MonsterTag( _DataTableRow_NamedInteger ):
""" A monster tag. """
__tablename__ = "monster_tags"
_TITLE = "Monster Group {Spl: #damage, Wpn: #dmg}"
class MonsterTags_DataTable( _DataTable_NamedInteger, _DataTable_CSV ):
""" A table of monster tags. """
_TITLE = "Monster Tags {Spl: #damage, Wpn: #dmg}"
_LABEL = "Monster Tags"
_FILE_NAME_BASE = "monster-tags"
_ROW_CLASS = MonsterTag
class MagicSchool( _DataTableRow_NamedInteger ):
""" A magic school. """
__tablename__ = "magic_schools"
_TITLE = "School of Magic {Spl: #school}"
class MagicSchools_DataTable( _DataTable_NamedInteger, _DataTable_CSV ):
""" A table of magic schools. """
_TITLE = "Schools of Magic {Spl: #school}"
_LABEL = "Schools of Magic"
_FILE_NAME_BASE = "magic-schools"
_ROW_CLASS = MagicSchool
class MagicPath( _DataTableRow_NamedInteger ):
""" A magic path. """
__tablename__ = "magic_paths"
class MagicPaths_DataTable( _DataTable_NamedInteger, _DataTable_CSV ):
""" A table of magic paths. """
_TITLE = "Magic Paths {Spl: #path, #damage; Wpn: #dmg}"
_LABEL = "Magic Paths"
_FILE_NAME_BASE = "magic-paths"
_ROW_CLASS = MagicPath
class AnonymousProvinceEvent( _DataTableRow_NamedInteger ):
""" An anonymous province event. """
__tablename__ = "anon_province_events"
class AnonymousProvinceEvents_DataTable(
_DataTable_NamedInteger, _DataTable_CSV
):
""" A table of anonymous province events. """
_TITLE = "Anonymous Province Events {Spl: #damage, Wpn: #dmg}"
_LABEL = "Anonymous Province Events"
_FILE_NAME_BASE = "anon-province-events"
_ROW_CLASS = AnonymousProvinceEvent
class SpecialUniqueSummon( _DataTableRow_NamedInteger ):
""" A special unique summon. """
__tablename__ = "special_unique_summons"
class SpecialUniqueSummons_DataTable(
_DataTable_NamedInteger, _DataTable_CSV
):
""" A table of special unique summons. """
_TITLE = "Special Unique Summons {Spl: #damage, Wpn: #dmg}"
_LABEL = "Special Unique Summons"
_FILE_NAME_BASE = "special-unique-summons"
_ROW_CLASS = SpecialUniqueSummon
class TerrainSpecificSummon( _DataTableRow_NamedInteger ):
""" A terrain-specific summon. """
__tablename__ = "terrain_specific_summons"
class TerrainSpecificSummons_DataTable(
_DataTable_NamedInteger, _DataTable_CSV
):
""" A table of terrain-specific summons. """
_TITLE = "Terrain-Specific Summons {Spl: #damage, Wpn: #dmg}"
_LABEL = "Terrain-Specific Summons"
_FILE_NAME_BASE = "terrain-specific-summons"
_ROW_CLASS = TerrainSpecificSummon
class OtherPlane( _DataTableRow_NamedInteger ):
""" Another plane. """
__tablename__ = "other_planes"
class OtherPlanes_DataTable(
_DataTable_NamedInteger, _DataTable_CSV
):
""" A table of other planes. """
_TITLE = "Other Planes {#damage, Wpn: #dmg}"
_LABEL = "Other Planes"
_FILE_NAME_BASE = "other-planes"
_ROW_CLASS = OtherPlane
class MapTerrainType( _DataTableRow_NamedBits ):
""" A map terrain type. """
__tablename__ = "map_terrain_types"
class MapTerrainTypes_DataTable( _DataTable_NamedBits, _DataTable_CSV ):
""" A bitmask table of map terrain types. """
_TITLE = "Map Terrain Types"
_LABEL = "Map Terrain Types"
_FILE_NAME_BASE = "map-terrain-types"
_ROW_CLASS = MapTerrainType
###############################################################################
# vim: set ft=python ts=4 sts=4 sw=4 et tw=79: #
| [
"the.eric.mcdonald@gmail.com"
] | the.eric.mcdonald@gmail.com |
d5beca5ed8e459e345d112583d4a63556cbbeb03 | 7dd25d238cf6fe9a9f88e8f602b661bf263c2af4 | /Nests/wf_nests.py | 6088808846dafe9630859077b5fca03a94446928 | [] | no_license | IzumovaD/WF-software-support | dc74e96a90b6c8c709858abbed447be7f540e855 | cfd4853def40f77ddfa9b09c15c3ddaca101ab81 | refs/heads/master | 2023-04-26T15:00:24.703979 | 2021-05-25T15:47:55 | 2021-05-25T15:47:55 | 356,993,050 | 0 | 0 | null | 2021-05-12T21:19:00 | 2021-04-11T22:24:42 | Python | UTF-8 | Python | false | false | 7,019 | py | import re
from collections import OrderedDict
from root_allomorphs import root_allomorphs as allomorphs
class Nest:
def __init__(self, vert):
vert = vert.replace("\n", "")
self.nest = OrderedDict([((vert, 0), [])])
self.vertex = vert
root = self.form_root()
self.roots = self.form_roots(root)
#метод выделения корня в слове
def form_root(self):
pattern = re.compile(r"\+\w+")
res = pattern.search(self.vertex)
root = res.group(0)[1:]
root = root.lower()
return root
#метод формирования группы алломорфных корней
def form_roots(self, root):
res = set()
#проверка, есть ли алломорфы у корня
for group in allomorphs:
if root in group:
res.update(group)
return res
#у корня нет алломорфов, группа состоит из одного корня
res.add(root)
return res
#метод добавления слова в дерево
def add_word(self, word, tabs):
word = word.replace(" ", "")
word = word.replace("\n", "")
temp = [x for x in self.nest.keys() if x[1] == tabs - 3]
parent = temp[-1]
self.nest[parent].append(word)
self.nest[(word, tabs)] = []
#метод поиска слова в дереве
def find_word(self, word):
for key in self.nest:
if self.modify_word(key[0]) == word:
return key[0]
return False
#метод поиска корня в дереве
def find_root(self, root):
root = self.modify_word(root)
for elem in self.roots:
if elem == root:
return True
return False
def __iter__(self):
self.iterator = iter(self.nest)
return self
def __next__(self):
try:
key = next(self.iterator)
except StopIteration:
raise StopIteration
else:
return (key[0], key[1], self.nest[key])
#метод извлечения поддерева по заданной вершине
def restore_subtree(self, word, nest):
word = word.lower()
iterator = iter(nest)
key, tabs, value = next(iterator)
while self.modify_word(key) != word:
key, tabs, value = next(iterator)
subtree = Nest(key)
vertex_tab = tabs
key, tabs, value = next(iterator)
while tabs != vertex_tab:
subtree.nest[(key, tabs-vertex_tab)] = self.nest[(key, tabs)]
key, tabs, value = next(iterator)
return subtree
def __str__(self):
res = ""
tab = " "
for key in self.nest:
for i in range(0, key[1]):
res += tab
res += key[0] + "\n"
return res
def modify_word(self, word):
word = word.replace("+", "")
word = word.replace("-", "")
word = word.replace("*", "")
return word.lower()
#метод перевода цепочки в строку
def chain_to_str(self, chain, word):
string = ""
if word not in chain:
string += word + "\n"
else:
string += word + " --> " + self.chain_to_str(chain, chain[word])
return string
#метод формирования и печати цепочки по конечному слову
def restore_chain(self, word, chain, nest):
if word == self.vertex:
print(self.chain_to_str(chain, word))
return
for key, tabs, value in nest:
if word in value:
chain[key] = word
self.restore_chain(key, chain, nest)
class Nests:
def __init__(self, data):
self.nests = []
self.collect_nests(data)
#метод сбора деревьев из файла
def collect_nests(self, data):
nest = Nest(data[0])
data = data[1:]
for line in data:
tabs = line.count(" ")
if tabs == 0:
self.nests.append(nest)
nest = Nest(line)
else:
nest.add_word(line, tabs)
self.nests.append(nest)
#метод поиска слова во всех деревьях
def find_word_nest(self, word):
word = word.lower()
for nest in self.nests:
if nest.find_word(word) is not False:
return nest
raise Exception("Такого слова нет ни в одном дереве.")
#метод поиска корня во всех деревьях
def find_root_nest(self, root):
root = root.lower()
for nest in self.nests:
if nest.find_root(root):
return nest
raise Exception("Такого корня нет ни в одном дереве.")
def user_interface(all_nests):
while 1:
print("Выберите действие:")
print("1 - Найти дерево по слову")
print("2 - Найти дерево по корню")
print("3 - Восстановление поддерева по начальному слову")
print("4 - Восстановление цепочки по конечному слову")
print("5 - Выход")
enter = int(input())
if enter == 5:
break
if enter == 1:
print("Введите слово:")
word = input()
try:
nest = all_nests.find_word_nest(word)
except Exception as e:
print(e)
else:
print(nest)
if enter == 2:
print("Введите корень:")
root = input()
try:
nest = all_nests.find_root_nest(root)
except Exception as e:
print(e)
else:
print(nest)
if enter == 3:
print("Введите слово:")
word = input()
try:
nest = all_nests.find_word_nest(word)
except Exception as e:
print(e)
else:
subtree = nest.restore_subtree(word, nest)
print(subtree)
if enter == 4:
print("Введите слово:")
word = input()
try:
nest = all_nests.find_word_nest(word)
except Exception as e:
print(e)
else:
word = nest.find_word(word.lower())
nest.restore_chain(word, {}, nest)
| [
"tannenwald@inbox.ru"
] | tannenwald@inbox.ru |
10667b823be167c063f7d2da9cc7727e91fa1da8 | 7860d9fba242d9bdcb7c06c32ee4064e4a7fa2f1 | /litex_boards/platforms/qmtech_ep4ce15_starter_kit.py | 3af64cc4ac1a000aa96a6cd0a7954f16b29cc2f6 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | litex-hub/litex-boards | ef1f200fd6d34c96621f4efa094ede874f4c34ab | b92c96b3a445fde31037f593a40fe621f85cb58c | refs/heads/master | 2023-09-03T15:09:11.198560 | 2023-08-30T15:22:11 | 2023-08-30T15:22:11 | 191,191,221 | 291 | 283 | BSD-2-Clause | 2023-09-03T20:32:58 | 2019-06-10T15:09:10 | Python | UTF-8 | Python | false | false | 5,087 | py | #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2022 Franck Jullien <franck.jullien@gmail.com>
# SPDX-License-Identifier: BSD-2-Clause
from litex.build.generic_platform import *
from litex.build.altera import AlteraPlatform
from litex.build.altera.programmer import USBBlaster
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk
("clk50", 0, Pins("T2"), IOStandard("3.3-V LVTTL")),
# LED
("led", 0, Pins("E3"), IOStandard("3.3-V LVTTL")),
# Button
("key", 0, Pins("J4"), IOStandard("3.3-V LVTTL")),
("serial", 0,
Subsignal("tx", Pins("Y22"), IOStandard("3.3-V LVTTL")),
Subsignal("rx", Pins("Y21"), IOStandard("3.3-V LVTTL"))
),
# 7-segments display
("seven_seg_ctl", 0,
Subsignal("dig", Pins("Y13 W13 V13")),
Subsignal("segments", Pins("V15 U20 W20 Y17 W15 W17 U19")),
Subsignal("dot", Pins("W19")),
IOStandard("3.3-V LVTTL")
),
# VGA
("vga", 0,
Subsignal("hsync_n", Pins("AA13")),
Subsignal("vsync_n", Pins("AB10")),
Subsignal("r", Pins("AB19 AA19 AB20 AA20 AA21")),
Subsignal("g", Pins("AB16 AA16 AB17 AA17 AA18 AB18")),
Subsignal("b", Pins("AA14 AB13 AA15 AB14 AB15")),
IOStandard("3.3-V LVTTL")
),
# SPIFlash (W25Q64)
("spiflash", 0,
# clk
Subsignal("cs_n", Pins("E2")),
Subsignal("clk", Pins("K2")),
Subsignal("mosi", Pins("D1")),
Subsignal("miso", Pins("E2")),
IOStandard("3.3-V LVTTL"),
),
# SDR SDRAM
("sdram_clock", 0, Pins("Y6"), IOStandard("3.3-V LVTTL")),
("sdram", 0,
Subsignal("a", Pins(
"V2 V1 U2 U1 V3 V4 Y2 AA1",
"Y3 V5 W1 Y4 V6")),
Subsignal("ba", Pins("Y1 W2")),
Subsignal("cs_n", Pins("AA3")),
Subsignal("cke", Pins("W6")),
Subsignal("ras_n", Pins("AB3")),
Subsignal("cas_n", Pins("AA4")),
Subsignal("we_n", Pins("AB4")),
Subsignal("dq", Pins(
"AA10 AB9 AA9 AB8 AA8 AB7 AA7 AB5",
"Y7 W8 Y8 V9 V10 Y10 W10 V11")),
Subsignal("dm", Pins("AA5 W7")),
IOStandard("3.3-V LVTTL")
),
# GMII Ethernet
("eth_clocks", 0,
Subsignal("tx", Pins("R22")),
Subsignal("gtx", Pins("L21")),
Subsignal("rx", Pins("F21")),
IOStandard("3.3-V LVTTL")
),
("eth", 0,
Subsignal("rst_n", Pins("N22")),
Subsignal("mdio", Pins("W21")),
Subsignal("mdc", Pins("W22")),
Subsignal("rx_dv", Pins("D22")),
Subsignal("rx_er", Pins("K22")),
Subsignal("rx_data", Pins("D21 E22 E21 F22 H22 H21 J22 J21")),
Subsignal("tx_en", Pins("M22")),
Subsignal("tx_er", Pins("V21")),
Subsignal("tx_data", Pins("M21 N21 P22 P21 R21 U22 U21 V22")),
Subsignal("col", Pins("K21")),
Subsignal("crs", Pins("L22")),
IOStandard("3.3-V LVTTL")
),
]
_connectors = [
("J11", {
1: "R1", 7: "R2",
2: "P1", 8: "P2",
3: "N1", 9: "N2",
4: "M1", 10: "M2",
5: "-" , 11: "-",
6: "-" , 12: "-",
}),
("J10", {
1: "J1", 7: "J2",
2: "H1", 8: "H2",
3: "F1", 9: "F2",
4: "E1", 10: "D2",
5: "-" , 11: "-",
6: "-" , 12: "-",
}),
("JP1", {
1: "-", 2: "-",
3: "A8", 4: "B8",
5: "A7", 6: "B7",
7: "A6", 8: "B6",
9: "A5", 10: "B5",
11: "A4", 12: "B4",
13: "A3", 14: "B3",
15: "B1", 16: "B2",
17: "C1", 18: "C2",
}),
("J12", {
1: "-", 2: "-",
3: "C22", 4: "C21",
5: "B22", 6: "B21",
7: "H20", 8: "H19",
9: "F20", 10: "F19",
11: "C20", 12: "D20",
13: "C19", 14: "D19",
15: "C17", 16: "D17",
17: "A20", 18: "B20",
19: "A19", 20: "B19",
21: "A18", 22: "B18",
23: "A17", 24: "B17",
25: "A16", 26: "B16",
27: "A15", 28: "B15",
29: "A14", 30: "B14",
31: "A13", 32: "B13",
33: "A10", 34: "B10",
35: "A9", 36: "B9",
37: "-", 38: "-",
39: "-", 40: "-",
})
]
# Platform -----------------------------------------------------------------------------------------
class Platform(AlteraPlatform):
default_clk_name = "clk50"
default_clk_period = 1e9/50e6
def __init__(self, toolchain="quartus"):
AlteraPlatform.__init__(self, "EP4CE15F23C8", _io, _connectors, toolchain=toolchain)
self.add_platform_command("set_global_assignment -name CYCLONEII_RESERVE_NCEO_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
def create_programmer(self):
return USBBlaster()
def do_finalize(self, fragment):
AlteraPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk50", loose=True), 1e9/50e6)
| [
"franck.jullien@collshade.fr"
] | franck.jullien@collshade.fr |
0b7b57afcf61532938b60198598d11f741b12287 | 4e35423872fe989ccb1f876486b451326d89ba9a | /gustav/export/mixd.py | fcd6cafdaf310fe78156f94ce48be2bce90802e4 | [] | no_license | j042/gustav-alpha | 54efba1c60f2aed674285580c28f5ae0d4d500ec | f738dcd66fb70df376b8a954a75a9024155a5c7a | refs/heads/main | 2023-07-21T01:25:08.725352 | 2021-08-31T09:26:10 | 2021-08-31T09:26:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,045 | py | import logging
import struct
import os
import numpy as np
from .utils import *
def mixd(fname, mesh, space_time):
"""
Write mixd file with given information.
Currently supports triangle, quadrilateral, tetrahedron, and hexahedron
semi-discrete mesh outputs.
Parameters
-----------
fname: str
mesh: `Mesh`
Returns
--------
None
"""
####################
### Start Output ###
####################
# Check input
# First, mesh
if not hasattr(mesh, ("vertices" and "faces")):
raise TypeError("Your mesh object does not have `vertices` and `faces` "+
"attributes!")
# Second, BC_names and BC_global_indices
if len(mesh.bc_names_) != len(mesh.bc_global_indices_):
raise ValueError(
'length of bc_names_ and bc_global_indices_ do not match!'
)
# Gather some data
# -----------------
dim = mesh.vertices.shape[1]
# big endian
big_endian_int = ">i"
big_endian_double = ">d"
# Split ext
base, ext = os.path.splitext(fname)
# open files
if ext == ".campiga":
vertices_file = open(base + ".coords", "wb")
connectivity_file = open(base + ".connectivity", "wb")
boundary_file = open(base + ".boundary", "wb")
info_file = open(base + ".info", "w")
elif ext == ".xns":
# Special case if fname was "_.xns", output mxyz, mien, mrng, minf.
if os.path.basename(base) == "_":
logging.debug("Export - Congratulation!")
logging.debug("Export - You've found a special export name.")
logging.debug("Export - `_.xns` will be transformed into:")
logging.debug("Export - mxyz, mien, mrng, minf.")
prepend = "/" if os.path.isabs(base) else ""
base = prepend + os.path.join(*base.split("/")[:-1]) + "/"
else:
base += "."
vertices_file = open(base + "mxyz", "wb")
connectivity_file = open(base + "mien", "wb")
boundary_file = open(base + "mrng", "wb")
info_file = open(base + "minf", "w")
# Write vertices
for v in mesh.vertices.flatten():
vertices_file.write(
struct.pack(big_endian_double, v)
)
# For xns, spacetime meshes just have vertices twice.
if space_time and ext == ".xns":
for v in mesh.vertices.flatten():
vertices_file.write(
struct.pack(big_endian_double, v)
)
vertices_file.close()
# Write connectivity
# 2D: faces
# 3D: elements
quad = False
if dim == 2:
connectivity = mesh.faces
boundary_width = 3
mesh_type = "triangle"
if connectivity.shape[1] == 4:
quad = True
boundary_width = 4
mesh_type = "quadrilateral"
elif dim == 3:
connectivity = mesh.elements
boundary_width = 4
mesh_type = "tetrahedron"
if connectivity.shape[1] == 8:
quad = True
boundary_width = 6
mesh_type = "hexahedron"
# Connectivity index begins with 1.
for c in (connectivity.flatten() + 1):
connectivity_file.write(
struct.pack(big_endian_int, c)
)
connectivity_file.close()
# Write boundary. Boundary index begins with 1.
# Non-Boundary entries are all -1. This could be (-1 * neighbor_elem_ind),
# but it isn't.
boundaries = np.ones((connectivity.shape[0], boundary_width)) * - 1
for i, bgi in enumerate(mesh.bc_global_indices_):
(global_element_ind,
local_subelement_ind) = bc_global_and_local(bgi, dim, quad=quad)
boundaries[global_element_ind, local_subelement_ind] = i + 1
for b in boundaries.flatten():
boundary_file.write(
struct.pack(big_endian_int, int(b))
)
boundary_file.close()
# Conclude Info file
# Start with general info
info_file.write("# dim: "+ str(dim) + "\n")
info_file.write("# mesh type: " + mesh_type + "\n\n")
# Crucial info
# Supports semi-descrete and xns space-time.
st_factor = 2 if ext == ".xns" and space_time else 1
info_file.write("nn "+ str(int(mesh.vertices.shape[0] * st_factor)) + "\n")
info_file.write("ne "+ str(connectivity.shape[0]) + "\n")
info_file.write("nsd "+ str(dim) + "\n")
info_file.write("nen "+ str(int(connectivity.shape[1] * st_factor)) + "\n")
if space_time and ext == ".xns":
info_file.write("space-time on" + "\n\n\n")
else:
info_file.write("semi-discrete on" + "\n\n\n")
# BC guide
info_file.write("# Info: BCs should be referenced by the numbers stated "+\
"in `< >`." + "\n")
# BC info
for i, bc in enumerate(mesh.bc_names_):
info_file.write(
"# Name of boundary <" + str(i + 1) + "> : " + bc + "\n"
)
# Signature
info_file.write("\n\n\n" + "# MIXD Generated using `gustav`." + "\n")
info_file.close()
| [
"jaewooklee042@gmail.com"
] | jaewooklee042@gmail.com |
deae57d8466c67c0588f984d9edd9a77a8bac904 | ed38a50a81aeb206e7c735971bb874eb481e2e82 | /2A_2/python/funkcje03.py | a39d5694b51d6b1c312abac8c1751b4b94a38b3a | [] | no_license | lo1cgsan/rok202021 | e70db45494d891f179c08ddf3ef1ac55de25e76f | 07af7ea54d61d03f851de675744bada9083ecdca | refs/heads/main | 2023-06-01T03:38:20.534102 | 2021-06-21T12:21:42 | 2021-06-21T12:21:42 | 347,921,728 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# funkcje02.py
#
def zwieksz(a):
a += 2 # powiększenie wartości o dwa, tj: a = a + 2
print(a)
def zwieksz2(b):
b[0] += 2
print(b)
def main(args):
a = int(input("Podaj liczbę: ")) # zmienna lokalna
print(a)
zwieksz(a)
print(a)
b = [1]; # lista 1-elementowa
b[0] = int(input("Podaj liczbę: "))
zwieksz2(b)
print(b)
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
| [
"lo1cgsan@gmail.com"
] | lo1cgsan@gmail.com |
093dd5adf5896e90311dd8632e63e6d69af42f39 | 7d7a08723a2aee4368cf94e6bbca008798432a3b | /1.5 Логические операции и операции сравнения/OperLogic&Comparison.py | 6f7cc0912f87d391c4bc1f287ee13d3a2dcce59e | [] | no_license | DmitryMedovschikov/Programming_on_Python.Bioinformatics_Institute | 3412766050916c6ed6d9395b706406169f3de67c | 4e3a80de71f8c4ef39413d34bbfef3cde19ab4dd | refs/heads/master | 2023-04-22T12:54:53.276004 | 2021-05-09T16:03:51 | 2021-05-09T16:03:51 | 360,144,629 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | # Расставьте скобки в выражении: a and b or not a and not b
# В соответствии с порядком вычисления выражения (приоритетом операций).
# Ответ: ((a and b) or ((not a) and (not b)))
# Выполните код в интерпретаторе Python 3:
# x = 5
# y = 10
# y > x * x or y >= 2 * x and x < y
# Постарайтесь разобраться, почему интерпретатор выдал именно такой ответ.
# Помните, что любые арифметические операции выше по приоритету операций
# сравнения и логических операторов.
x = 5
y = 10
print(y > x * x or y >= 2 * x and x < y)
# x*x=25, 2*x=10, 10>25=False, 10>=10=True, 5<10=True, True and True = True,
# False or True = True
# Найдите результат выражения для заданных значений a и b:
# a = True
# b = False
# a and b or not a and not b
a = True
b = False
print(a and b or not a and not b)
| [
"medovschikovds@gmail.com"
] | medovschikovds@gmail.com |
5e2cf673953c7e75da73e268ab8efdf6d8b2dc7a | d9639fe9687f71d9fa3eda95762e37768a60bc09 | /cproject/comments/urls.py | fe8ec102f8d2dd42d8e4fa7e20bf3b464d744137 | [] | no_license | AntonMash/comment_tree | bde747c726498453d8a30ceca1505c6e10349456 | 43f7e3b449f8d3e5a2194c2499dfa241ed4a0a33 | refs/heads/master | 2023-02-27T20:31:35.186754 | 2021-02-09T11:41:42 | 2021-02-09T11:41:42 | 337,386,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | from django.urls import path
from .views import base_view,create_comment,create_child_comment
urlpatterns =[
path('post-comments/', base_view),
path('create-comment/', create_comment, name='comment_create'),
path('create-child-comment/', create_child_comment, name = 'comment_child_create'),
] | [
"giltias@inbox.ru"
] | giltias@inbox.ru |
f5a704200982ad9f39535af72df5277942e1c98c | 7925473a803f87f925cca98b56d3b04f5c00ef35 | /linear_regression_land.py | e937df90ed9f70abffc3a177521585ee454052b7 | [] | no_license | RakeshGourani/ZeroHunger | 3722b0307745e970aa3dcd3b7fc5feaa415bdc89 | 7d3f7c0b389d77619c4cc88276dc828d038e3b10 | refs/heads/master | 2023-09-01T21:38:29.445424 | 2019-09-28T06:53:53 | 2019-09-28T06:53:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,366 | py | import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
import seaborn as sns
import pickle
import os
from scipy import stats
scores = []
land_data = pd.read_csv("Land_data.csv")
def save_model(country_name, model):
filename = 'model.sav'
dir_path = os.getcwd()
try:
os.mkdir(dir_path + "/" + "land_data")
except:
pass
try:
os.mkdir(dir_path+ "/land_data" + "/"+country_name)
except:
pass
file_path = dir_path + "/" + "land_data" + "/" + country_name + "/" + filename
pickle.dump(model, open(file_path, 'wb'))
def get_country_rows(country_name):
country_data = land_data[land_data['Area'] == country_name]
return country_data
data = pd.read_csv('Land_data.csv')
def predict(model, X_test, y_test):
y_pred = model.predict(X_test)
for i in range(len(y_test)):
print(y_pred[i], y_test[i])
pass
def train(country_name, start_row, end_row):
global scores
X = data[['Area Code', 'Year']].values[start_row:end_row]
y = data[['Value']].values[start_row:end_row]
plt.scatter(data['Year'][start_row:end_row], y)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = LinearRegression()
model.fit(X_train, y_train)
score = model.score(X_test, y_test)
scores.append(score)
save_model(country_name, model)
def get_input():
country_name, year = input('Enter Country Name and Year').strip().split(' ')
year = int(year)
return country_name, year
def get_country_code(data):
country_code = country_rows.iloc[0]['Area Code']
return country_code
def get_row_numbers(data):
start_row = country_rows.index[0] + 1
end_row = country_rows.index[-1] + 2
return start_row, end_row
def get_all_country_names():
return land_data.Area.unique()
for country_name in get_all_country_names():
country_rows = get_country_rows(country_name)
start_row, end_row = get_row_numbers(country_rows)
country_code = get_country_code(country_rows)
train(country_name, start_row, end_row)
print(sum((sorted(scores)[2:]))/len(sorted(scores[2:]))) | [
"sampathanurag3@gmail.com"
] | sampathanurag3@gmail.com |
6ccfe46e32542e484141f4e7a516562743c5f87c | 869f3cc7c4b5c0c0b138c76cf457ebc5f7a35525 | /tests/v1/test_db_config.py | cb9f7cf6475a018ad2409c8fafa2453c000c347c | [
"MIT"
] | permissive | Curti-s/ireporter-api | dc3c1dea0373243c0035e8d5200c0a33bc106ab5 | 57b225508f58fd33c848bc480a6dd5b7a5ea5790 | refs/heads/develop | 2022-12-10T11:24:10.653374 | 2018-12-08T15:42:01 | 2018-12-08T15:42:01 | 158,517,000 | 2 | 0 | null | 2022-12-08T01:27:49 | 2018-11-21T08:41:03 | Python | UTF-8 | Python | false | false | 1,101 | py | import unittest
from app import create_app
class TestDevelopmentConfig(unittest.TestCase):
"""Test if configuration is development"""
def test_app_is_development(self):
app = create_app('development')
self.assertTrue(app.config['DEBUG'] is True)
self.assertTrue(
app.config['DATABASE_URL'] == 'postgresql:///ireporter_api' )
class TestTestingConfig(unittest.TestCase):
"""Test if configuration is testing"""
def test_app_is_testing(self):
app = create_app('testing')
self.assertTrue(app.config['TESTING'] is True)
self.assertTrue(app.config['DEBUG'] is True)
self.assertTrue(
app.config['DATABASE_URL'] == 'postgresql:///ireporter_api_test')
class TestProductionConfig(unittest.TestCase):
"""Test if configuration is development"""
def test_app_is_production(self):
app = create_app('production')
self.assertTrue(app.config['TESTING'] is False)
self.assertTrue(app.config['DEBUG'] is False)
if __name__ == '__main__':
unittest.main()
| [
"matthewscurtis81@gmail.com"
] | matthewscurtis81@gmail.com |
79916b4df806f4901817a88123389f923ea0c09c | cc99bef17c1ad843b6514b79254a8ab10f1fd32e | /app.py | 7e53afc69b6f34d8ea7b6b819ed985de773b0a13 | [] | no_license | TiagoAltstadt/FileSorter | 6190a808748896204e0b50335852c4c700034f51 | 141b022a6c3587ba79d75c8b88346f4ac3bd9564 | refs/heads/master | 2023-07-14T14:57:07.276455 | 2021-08-24T18:51:58 | 2021-08-24T18:51:58 | 399,572,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py | # Import Module
import os
from datetime import datetime
# Folder Path
path = './data'
# Change the directory
os.chdir(path)
def changeFilesName():
for count, filename in enumerate(os.listdir()):
aux = os.path.getmtime(filename)
hola = datetime.fromtimestamp(aux).strftime('%Y-%m-%d %H-%M-%S')
extension = os.path.splitext(filename)[1]
src = filename
dst = hola + extension
os.rename(src, dst)
def detectDirectories():
for count, filename in enumerate(os.listdir()):
if os.path.isdir(filename):
print(filename + ' is a directory!')
else:
print(filename + ' is a file')
aux = os.path.getmtime(filename)
hola = datetime.fromtimestamp(aux).strftime('%Y-%m-%d %H-%M-%S')
extension = os.path.splitext(filename)[1]
src = filename
dst = hola + extension
os.rename(src, dst)
def main():
detectDirectories()
# changeFilesName()
main()
| [
"tiagoaltstadt@gmail.com"
] | tiagoaltstadt@gmail.com |
621c24156116cefc947d5504a6bd59729de62186 | 5d74051293a4740c597abb016870a56a58cecf5b | /modules/persons/application/controllers/v1/delete_user_api.py | 76a6e1ba2a6489b962b624f13f7d278ed3e95ec2 | [
"BSD-3-Clause"
] | permissive | eduardolujan/hexagonal_architecture_django | 98e707148745f5a36f166c0584cfba21cca473f0 | 8055927cb460bc40f3a2651c01a9d1da696177e8 | refs/heads/develop | 2023-02-21T22:46:20.614779 | 2021-01-16T02:48:37 | 2021-01-16T02:48:37 | 305,813,872 | 5 | 2 | BSD-3-Clause | 2021-01-16T18:00:26 | 2020-10-20T19:32:46 | Python | UTF-8 | Python | false | false | 2,043 | py | # -*- coding: utf-8 -*-
from modules.shared.infrastructure.log import LoggerDecorator, PyLoggerService
from modules.shared.domain.http import status as http_status
from modules.shared.domain.requests import Request
from modules.shared.domain.responses import Response
from modules.shared.domain.serializers import SerializerManager
from modules.users.domain.repository import UserRepository
from modules.users.application.delete import UserDeleter as DeleteUserService
@LoggerDecorator(logger=PyLoggerService(file_path=__file__))
class DeleteUserApi:
"""
Delete User API
"""
def __init__(self,
request: Request,
response: Response,
request_serializer_manager: SerializerManager,
user_repository: UserRepository):
# Http objects
self.__request = request
self.__response = response
self.__request_serializer_manager = request_serializer_manager
# Delete user
self.__user_repository = user_repository
def __call__(self, id: int) -> None:
"""
Delete user by id
@param id: user id
@type id: int
"""
try:
delete_user_data = dict(id=id)
delete_user_dto = self.__request_serializer_manager.get_dto_from_dict(delete_user_data)
delete_user = DeleteUserService(self.__user_repository)
delete_user(**delete_user_dto)
response_data = dict(
success=True,
message='All ok',
)
return self.__response(response_data, status=http_status.HTTP_200_OK)
except Exception as err:
self.log.exception(f"Error in {__class__}::post, err:{err}")
response_data = dict(
success=False,
message=f"{err}"
)
if hasattr(err, 'errors'):
response_data.update(errors=err.errors)
return self.__response(response_data, status=http_status.HTTP_400_BAD_REQUEST)
| [
"eduardo.lujan.p@gmail.com"
] | eduardo.lujan.p@gmail.com |
cec6074f119ad36ec8c85cd99c263eaff3b26f7f | 5be5c151664b5b83c0fda1c30771251ee2eb5ead | /Test/com/ruidge/liaoxuefeng/oo/object.py | 4229160298952164bd88be7f4d29326cb2ddfab2 | [] | no_license | ruidge/TestPython | 6aef46728a9f785568418c07db1eee5ce63903f6 | 00abaa85e55ea347c51bb70751b8d57dd5864876 | refs/heads/master | 2021-10-07T10:32:34.941971 | 2021-09-29T07:26:46 | 2021-09-29T07:26:46 | 21,946,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | # coding=utf-8
'''
Created on 2015年1月30日
@author: zhangrui6
'''
# py 2.2 后继承 object 的目的是使这个类成为 new style class, 没有继承 object 的为传统 classic class,
class Foo(object):
pass
class Foo1:
pass
if __name__ == "__main__":
print type(Foo), type(Foo1)
print isinstance(Foo, object)
print isinstance(Foo1, object)
print dir(Foo)
print dir(Foo1)
| [
"ruidge@gmail.com"
] | ruidge@gmail.com |
d733825c2e17c40b8f21ad9a095658449cf1a3ab | ea983836372ca1dfcefd4761292ea7123c9a9790 | /profiles/migrations/0027_auto_20201124_1435.py | 0234ac208e5fb52319061630670d3469bb1ad664 | [] | no_license | Code-Institute-Submissions/SweetHome | ca0cb12fa7893a877398ead8104efa6ac901cb55 | 0aaf7b42c111355e1d71f8f893ce84177e509cf3 | refs/heads/master | 2023-01-16T00:40:53.019417 | 2020-11-30T15:54:27 | 2020-11-30T15:54:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | # Generated by Django 3.1.2 on 2020-11-24 14:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0026_auto_20201124_0814'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='gender',
field=models.IntegerField(blank=True, choices=[('Male', 'Male'), ('Female', 'Female'), ('Other', 'Other')], null=True),
),
]
| [
"francesvugts@hotmail.com"
] | francesvugts@hotmail.com |
f5c4f35fd2c565576bde29aca9db25fcd3e650fd | 87288a9896a7223881522a7a1c9fd0bc94a58dd4 | /picfall.py | 8bf7f3fa08e97dafbcbde6935ab8af0ff26255ee | [] | no_license | danseagrave/picfall | acdfd2d8de6283d84247a3aa3c655ae74cc258a5 | 49b088083523252d3ad156fbe68e71354bc7ba40 | refs/heads/master | 2020-06-02T05:08:05.015479 | 2015-06-27T16:00:04 | 2015-06-27T16:00:04 | 38,165,466 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,436 | py | import os
import re
import urllib
from urlparse import urlunparse
from google.appengine.ext import webapp
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import urlfetch
from django.utils import simplejson
from twitter import twitter
from httphelper import HTTPHelper
from picServices import PicServiceCollection
twitter = twitter()
httphelper = HTTPHelper()
picservices = PicServiceCollection()
class Pic:
url = ''
id = ''
pictype = ''
thumburl = ''
fullurl = ''
picservice = None
def __init__(self, url):
#setup url
self.url = url
#get service
picservice = picservices.getServiceForURL(self.url)
#get data via service
self.picType = picservice.name
self.id = picservice.getPicID(self.url)
self.thumburl = picservice.thumbnailURL(self.url)
self.fullurl = picservice.fullURL(self.url)
class Tweet:
id = ''
from_user = ''
user_id = ''
created_at = ''
text = ''
pics = []
def __init__(self, jsonResult):
self.id = jsonResult['id']
self.from_user = jsonResult['from_user'].encode('utf-8')
self.user_id = jsonResult['from_user_id']
self.created_at = jsonResult['created_at'].encode('utf-8')
self.text = jsonResult['text'].encode('utf-8')
self.pics = self.extractPicData(self.text)
def extractPicData(self, text):
#get all urls
allurls = httphelper.extractURLs(text)
#filter put all non pic urls and collect them into
picdata = [
Pic(url)
for url in allurls
if picservices.isPicURL(url)
]
#done
return picdata
class Trend(db.Model):
name = db.StringProperty()
position = db.IntegerProperty()
twitterTime = db.StringProperty()
dateAdded = db.DateTimeProperty(auto_now_add=True)
class SimpleTrend():
name = ''
position = 0
twitterTime = ''
def save(self):
#setup
dbTrend = Trend()
dbTrend.name = self.name
dbTrend.position = self.position
dbTrend.twitterTime = self.twitterTime
#save
dbTrend.put()
class TrendCollection():
twitterTime = None
trends = None
def __init__(self):
self.trends = []
def append(self, trend):
self.trends.append(trend)
def saveAll(self):
#save all trends in the trend collection...
x = 1
class TrendPicsData:
requestedat = None
trends = None
def __init__(self,):
#set request time
requestedat = now()
#get tweets
url = flitter.constructPicSearch(query)
results = twitter.performSearch(url)
resultsData = twitter.resultsFromJSON(results)
tweets = flitter.parseResults(resultsData['results'])
return tweets
class TrendEncoder(simplejson.JSONEncoder):
def default(self, trend):
# Convert objects to a dictionary of their representation
d = { 'name': trend.name,
'position': trend.position,
}
#d.update(trend.__dict__)
return d
class Picfall:
def constructPicSearch(self, query, sinceid, rpp):
orParam = "&ors=" + '%20'.join(picservices.basePicURLs)
sinceParam = ''
if len(sinceid) > 0:
sinceParam = "&since_id=" + sinceid
return twitter.constructSearchURL(query, rpp) + orParam + sinceParam
def parseResults(self, results):
tweets = []
for jsonTweet in results:
tweets.append(
#{
# 'from_user': tweet['from_user'],
# 'text': tweet['text'],
# 'time': tweet['created_at'],
# 'pics': self.extractPicData(tweet['text']),
#}
Tweet(jsonTweet)
)
return tweets
def parseTrendsData(self, trendsData, saveToStore=False):
#setup default return val
trends = TrendCollection()
#get the date key of the trends
timeKey = trendsData['trends'].keys()[0]
#set date in retrun
trends.twitterTime = timeKey
trends.lendata = len(trendsData['trends'][timeKey])
#collect all the trends
position = 1
for trendData in trendsData['trends'][timeKey]:
#create new trend
trend = SimpleTrend()
# - add a position number
trend.position = position;
# - corrrectly encode the name
trend.name = trendData['name'].encode('utf-8');
# - set the twittertime
trend.twitterTime = timeKey
#add the trend to the result set
trends.append(trend)
#inc the position counter
position = position + 1;
#Save if askded to
if saveToStore:
trend.put();
return trends
def addHTMLIDsToTrends(self, trends):
for trend in trends.trends:
trend.id = 'trend-' + str(trend.position)
return trends
def getPicService(self, url):
return picservices.getServiceForURL(url)
def getPicTweets(self, query):
url = self.constructPicSearch(query)
results = twitter.performSearch(url)
resultsData = twitter.resultsFromJSON(results)
tweets = self.parseResults(resultsData['results'])
return tweets
def AddTweetsToTrends(self, trends):
for trend in trends:
trend['pics'] = self.getPicTweets(trend['name'])
return trends
| [
"dan@dipsy2"
] | dan@dipsy2 |
b7c682b6962704e09f1e2495f4bd5b4a6334957c | ffe193727325099bea51bed9c5be44cf0cdbb20d | /k-way_merge.py | 6fff8f7b0f2a90d3f2091053bd75755f970e6e19 | [] | no_license | ivanezeigbo/statistics | 46e58a0d3564e235d085958ce24eaffb017e6bdd | 9780955300b3fa32af0646bae27b4d72536d9478 | refs/heads/master | 2021-01-19T05:10:32.147709 | 2018-04-20T10:55:03 | 2018-04-20T10:55:03 | 65,373,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,324 | py | #import pdb; pdb.set_trace()
from time import clock
import random; import math
random.seed(501)
import matplotlib.pyplot as plt
def insertion(list):
for i in range(1,len(list)):
w = i #w is for comparing
while w > 0 and list[w] < list[w-1]:
list[w], list[w-1] = list[w-1], list[w] #swap
w -= 1
return list
def merging(first_half, second_half):
sorted_list = []
i ,j = 0, 0
while i < len(first_half) and j < len(second_half):
if first_half[i] <= second_half[j]:
sorted_list.append(first_half[i])
i += 1
else:
sorted_list.append(second_half[j])
j += 1
sorted_list += first_half[i:]
sorted_list += second_half[j:]
return sorted_list
def merge(divide):
ind = 0
ind2 = ind + 1
sorted_list = merging(divide[ind], divide[ind2])
for g in range(2, len(divide)):
ind2 += 1
sorted_list = merging(sorted_list, divide[ind2])
return sorted_list
def mergesort(list):
if len(list) < k: #if list less than k, algorithm performs insertion/shell sort
if len(list) == 1:
return list
if len(list) == 0: #in case there are no elements in list
print("No element in list")
return list
else:
return insertion(list)
cutoff = int(round(len(list) / k)) #integar call here also allows for times when you have an odd list
copy = cutoff
divide = []
divide.append(mergesort(list[:cutoff]))
indx = cutoff * 2
while indx <= len(list):
divide.append(mergesort(list[copy : indx]))
copy, indx = indx, indx + cutoff
if copy != len(list):
divide.append(mergesort(list[copy:]))
return merge(divide)
x = [] #x coordinate
y = [] #y coordinate
A = random.sample(range(-2000, 2001), 1000)
for k in range(2, 31):
run_time = 0
for i in range(1000):
start = clock()
mergesort(A)
end = clock()
run_time += end - start
run_time = run_time/1000
x.append(k)
y.append(run_time)
plt.plot(x, y, 'r')
plt.ylabel("Running time for k")
plt.xlabel("Values of k")
plt.title("Graph for running time for k")
plt.show()
#end = clock()
#print("\nRunning time is:", end - start)
| [
"noreply@github.com"
] | noreply@github.com |
8da27b16ac1aedc55dd46d8dc3adb9357b99bcd0 | bb84fde639d202c4c1e6970074dc45fa5144c426 | /DZ06/6task_01.py | c9624019d48f912b13f6328c70e74417e2a0fcd0 | [] | no_license | 5kyliner/WebAcademyProject | 7cdd44f80df8936ff73df1944e097dac92fe5c51 | 0182508d64abdac0b5373b86b58f1c040459dc26 | refs/heads/master | 2020-03-19T02:10:12.524695 | 2018-07-08T20:36:31 | 2018-07-08T20:36:31 | 135,605,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 745 | py | # Task01
"""
1. Найти номер и значение первого положительного элемента списка.
"""
def find_first_positive(a):
for i in range(len(a)):
if a[i] > 0:
# print('Номер элемента списке:', i)
# print('Значение:', a[i])
return (i, a[i])
elif a[i] < 0:
pass
else:
print('Положительных элементов нет')
if __name__ == '__main__':
# some tests
assert find_first_positive([-1, 2, 5]) == (1, 2)
assert find_first_positive([1, 2, 5]) == (0, 1)
assert find_first_positive([-1, -2, 5]) == (2, 5)
assert find_first_positive([-1, -2, -5]) is None | [
"kipperbot@gmail.com"
] | kipperbot@gmail.com |
d5b778e30438fb5003e9ab8f5be37d0e342c02cc | 380712a4d3436b5997cebdaf2d6bdd5227ffef99 | /06_using_classess.py | d142dc0ceb8bdaa3387069624f66825710ea5553 | [] | no_license | rifqirosyidi/tkinter-basic | 76b80095a0063a5e184fa12a1fb9193f3ea91fb6 | 75d9ae83af4b555335b95ac177bdd361529550ed | refs/heads/master | 2020-08-30T08:52:43.162243 | 2019-11-01T13:32:50 | 2019-11-01T13:32:50 | 218,325,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | from tkinter import *
class SomeClass:
def __init__(self, master):
self.frame = Frame(master)
self.frame.pack()
self.print_button = Button(self.frame, text="Print Me", command=self.print_message)
self.print_button.pack(side=LEFT)
self.close_button = Button(self.frame, text="Close Me", command=self.frame.quit)
self.close_button.pack(side=LEFT)
def print_message(self):
print("Print Me, Hi You Click Print Me")
root = Tk()
b = SomeClass(root)
root.mainloop() | [
"rief.rosyidi@gmail.com"
] | rief.rosyidi@gmail.com |
e51540b0300e36e9583ef8b4b9f38144c1a81ec8 | 7e4fd3a8ef590bf5175031d7dadd52a060c36aaa | /logging_learn.py | 9b22765abf463cbc55e865c24106b9b4259335e6 | [] | no_license | rickyhwung/python3 | fdbc3e7021c0214b59e69603827a2482ef0811c6 | bd127419c8716737f52a9f1fd0a149107b2cf48b | refs/heads/master | 2021-06-20T11:08:04.981280 | 2019-09-02T00:23:42 | 2019-09-02T00:23:42 | 190,026,308 | 0 | 0 | null | 2021-06-10T23:35:05 | 2019-06-03T14:53:47 | JavaScript | UTF-8 | Python | false | false | 1,614 | py | import logging
import logging.handlers
import time
format_dict = {
1 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
2 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
3 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
4 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
5 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
6 : logging.Formatter('%(asctime)s - %(name)s - %(levelname)s -%(pathname)s - %(lineno)d - %(message)s'),
}
# 创建一个logger
logger = logging.getLogger('mylogger')
logger.setLevel(logging.DEBUG)
myapp = logging.getLogger('myapp')
myapp.setLevel(logging.DEBUG)
# 创建一个handler,用于写入日志文件
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# 添加TimedRotatingFileHandler
# 定义一个1秒换一次log文件的handler
# 保留3个旧log文件
fh_date = logging.handlers.TimedRotatingFileHandler("log/myapp.log", when='S', interval=1, backupCount=3)
fh_date.setLevel(logging.DEBUG)
# 再创建一个handler,用于输出到控制台
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# 定义handler的输出格式
formatter = format_dict[6]
fh.setFormatter(formatter)
fh_date.setFormatter(formatter)
ch.setFormatter(formatter)
# 给logger添加handler
logger.addHandler(fh)
logger.addHandler(ch)
logger.addHandler(fh_date)
myapp.addHandler(fh_date)
# logger.addHandler(fh_date)
# 记录一条日志
while True:
time.sleep(0.1)
myapp.info("test")
logger.info('foorbar') | [
"rickyhwung@163.com"
] | rickyhwung@163.com |
c87b53e73526500ff0f09df41d71d7f7fd37e251 | 936032b889517d13f58af2e9bc60700b9f371dac | /Borsellino.Langton.py | e51019b7126c99e263225bd08820236bd9a605b9 | [] | no_license | mborsel1/LangtonAnt | 5587b2fbb37a077525d956c502d8565db9c98976 | 2a4e78c46807bcc45da2e2e3a4348b4b38d3dccf | refs/heads/master | 2021-04-26T23:39:42.883183 | 2018-03-04T22:17:18 | 2018-03-04T22:17:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,797 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 14 10:58:05 2018
@author: Michael Borsellino
"""
import matplotlib
matplotlib.use('TkAgg')
import pylab as PL
import scipy as SP
width = 100
height = 100
def init():
global time, x1, y1, config, state, lastx, lasty
time = 0
lastx, lasty = 1, 0
x1, y1 = width/2, height/2
config = SP.zeros([height, width])
for x in xrange(width):
for y in xrange(height):
state = 0
config[x, y] = state
def draw():
PL.cla()
PL.pcolor(config, vmin = 0, vmax = 1, cmap = PL.cm.binary)
PL.axis('image')
PL.title('t = ' + str(time))
def step():
global time, x1, y1, state, config, lastx, lasty
time += 1
state = config[x1, y1]
if state == 0: #if white
state = 1 #change to black
if lastx == 0: #if did not move horizontal
if lasty == 1: #and moved up
lastx, lasty = -1, 0 #move left
else: #and moved down
lastx, lasty = 1, 0 #move right
elif lastx == 1: #if moved right
lastx, lasty = 0, 1 #move up
else: #if moved left
lastx, lasty = 0, -1 #move down
else: #if black
state = 0 #change to white
if lastx == 0: #if did not move horization
if lasty == 1: #and moved up
lastx, lasty = 1, 0 #move right
else: #and moved down
lastx, lasty = -1, 0 #move left
elif lastx == 1: #if moved right
lastx, lasty = 0, -1 #move down
else: #if moved left
lastx, lasty = 0, 1 #move up
config[x1, y1] = state
x1, y1 = x1 + lastx, y1 + lasty
import pycxsimulator
pycxsimulator.GUI().start(func=[init,draw,step]) | [
"noreply@github.com"
] | noreply@github.com |
8c702e90c34470a77861f3a53678d7024f517190 | 8453bad2f70c942c72888d0f089b31717916b62e | /Tries/multiStringSearch.py | 0b76aa66acc65d3cd924c92bf4c98f54e7661e0d | [] | no_license | sagnikghoshcr7/DSA-AlgoExpert-Practice_Solutions | 2dc59fc809c3e356d48ae2e0103d3e308111905d | fe05e162f6244aed4b9471612e53eb2e1c3d9356 | refs/heads/master | 2022-12-27T12:50:26.117284 | 2020-10-13T05:51:22 | 2020-10-13T05:51:22 | 282,289,863 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,679 | py | """
Multi String Search
Write a function that takes in a big string and an array of small strings, all of which are smaller
in length than the big string. The function should return an array of booleans, where each boolean
represents whether the small string at that index in the array of small strings is contained in the big string.
Note that you can't use language-built-in string-matching methods.
Sample Input: "this is a big string", ["this", "yo", "is", "a", "bigger", "string", "kappa"]
Sample Output: [true, false, true, true, false, true, false]
"""
# SOLUTION 1
# O(bns) time | O(n) space
def multiStringSearch1(bigString, smallStrings):
return [isInBigString1(bigString, smallString) for smallString in smallStrings]
def isInBigString1(bigString, smallString):
for i in range(len(bigString)):
if i + len(smallString) > len(bigString):
break
if isInBigStringHelper1(bigString, smallString, i):
return True
return False
def isInBigStringHelper1(bigString, smallString, startIdx):
leftBigIdx = startIdx
rightBigIdx = startIdx + len(smallString) - 1
leftSmallIdx = 0
rightSmallIdx = len(smallString) - 1
while leftBigIdx <= rightBigIdx:
if bigString[leftBigIdx] != smallString[leftSmallIdx] or bigString[rightBigIdx] != smallString[rightSmallIdx]:
return False
leftBigIdx += 1
rightBigIdx -= 1
leftSmallIdx += 1
rightSmallIdx -= 1
return True
# SOLUTION 2
# O(b^2 + ns) time | O(b^2 + n) space
def multiStringSearch2(bigString, smallStrings):
modifiedSuffixTrie = ModifiedSuffixTrie(bigString)
return [modifiedSuffixTrie.contains(string) for string in smallStrings]
class ModifiedSuffixTrie:
def __init__(self, string):
self.root = {}
self.populateModifiedSuffixTrieFrom(string)
def populateModifiedSuffixTrieFrom(self, string):
for i in range(len(string)):
self.insertSubstringStartingAt(i, string)
def insertSubstringStartingAt(self, i, string):
node = self.root
for j in range(i, len(string)):
letter = string[j]
if letter not in node:
node[letter] = {}
node = node[letter]
def contains(self, string):
node = self.root
for letter in string:
if letter not in node:
return False
node = node[letter]
return True
# SOLUTION 3
# O(ns + bs) time | O(ns) space
def multiStringSearch3(bigString, smallStrings):
trie = Trie()
for string in smallStrings:
trie.insert(string)
containedStrings = {}
for i in range(len(bigString)):
findSmallStringsIn3(bigString, i, trie, containedStrings)
return [string in containedStrings for string in smallStrings]
def findSmallStringsIn3(string, startIdx, trie, containedStrings):
currentNode = trie.root
for i in range(startIdx, len(string)):
currentChar = string[i]
if currentChar not in currentNode:
break
currentNode = currentNode[currentChar]
if trie.endSymbol in currentNode:
containedStrings[currentNode[trie.endSymbol]] = True
class Trie:
def __init__(self):
self.root = {}
self.endSymbol = "*"
def insert(self, string):
current = self.root
for i in range(len(string)):
if string[i] not in current:
current[string[i]] = {}
current = current[string[i]]
current[self.endSymbol] = string
| [
"noreply@github.com"
] | noreply@github.com |
51fbd3042c1ab812d5c5f8d4532d7226469186bd | d0d845cc5c77ec62cb5f5268527efadc5ff68e12 | /tests/linsys_test.py | 01b8acbc61b5dff2c53bf5ee4ce03f50f6297486 | [
"MIT"
] | permissive | madhavajay/ud953 | 2134a267ccf15ff95d717b9d76633bfd83ea5e40 | 6c101ae15adefa98ad4950275b52ef03419a0f40 | refs/heads/master | 2021-01-21T04:44:51.628018 | 2016-06-18T08:58:20 | 2016-06-18T08:58:20 | 50,235,584 | 2 | 6 | null | 2016-06-18T09:02:22 | 2016-01-23T11:29:14 | Python | UTF-8 | Python | false | false | 6,192 | py | # -*- coding: utf-8 -*-
# Author: github.com/madhavajay
"""This is a test for the Linear System Class"""
from decimal import Decimal, getcontext
from vector import Vector
from line import Line
from plane import Plane
from linsys import LinearSystem
# set the decimal precision
getcontext().prec = 30
def test_linsys_basepoint():
"""Test Linear System Base Point"""
plane_1 = Plane(Vector([1, 1, 1]), 1)
plane_2 = Plane(Vector([0, 1, 0]), 2)
plane_3 = Plane(Vector([1, 1, -1]), 3)
plane_4 = Plane(Vector([1, 0, -2]), 2)
system = LinearSystem([plane_1, plane_2, plane_3, plane_4])
system[0] = plane_1
vector1 = Vector([1, 2])
constant = 2
answer = Vector([2, 0])
line = Line(vector1, constant)
basepoint = line.basepoint
assert basepoint == answer
def test_linsys_swap_row():
"""Test Linear System Swap Row"""
plane_1 = Plane(Vector([1, 1, 1]), 1)
plane_2 = Plane(Vector([0, 1, 0]), 2)
plane_3 = Plane(Vector([1, 1, -1]), 3)
plane_4 = Plane(Vector([1, 0, -2]), 2)
lin_sys = LinearSystem([plane_1, plane_2, plane_3, plane_4])
lin_sys.swap_rows(0, 1)
assert lin_sys[0] == plane_2 # swapped
assert lin_sys[1] == plane_1 # swapped
assert lin_sys[2] == plane_3
assert lin_sys[3] == plane_4
lin_sys.swap_rows(1, 3)
assert lin_sys[0] == plane_2
assert lin_sys[1] == plane_4 # swapped
assert lin_sys[2] == plane_3
assert lin_sys[3] == plane_1 # swapped
lin_sys.swap_rows(3, 1)
assert lin_sys[0] == plane_2
assert lin_sys[1] == plane_1 # swapped
assert lin_sys[2] == plane_3
assert lin_sys[3] == plane_4 # swapped
def test_linsys_multiply_row():
"""Test Linear System Multiply Coefficient and Row"""
plane_1 = Plane(Vector([1, 1, 1]), 1)
plane_2 = Plane(Vector([0, 1, 0]), 2)
plane_3 = Plane(Vector([1, 1, -1]), 3)
plane_4 = Plane(Vector([1, 0, -2]), 2)
# same as the end of the last test
lin_sys = LinearSystem([plane_2, plane_1, plane_3, plane_4])
lin_sys.multiply_coefficient_and_row(1, 0)
assert lin_sys[0] == plane_2
assert lin_sys[1] == plane_1
assert lin_sys[2] == plane_3
assert lin_sys[3] == plane_4
lin_sys.multiply_coefficient_and_row(-1, 2)
new_plane_3 = Plane(Vector([-1, -1, 1]), -3)
assert lin_sys[0] == plane_2
assert lin_sys[1] == plane_1
assert lin_sys[2] == new_plane_3
assert lin_sys[3] == plane_4
lin_sys.multiply_coefficient_and_row(10, 1)
new_plane_1 = Plane(Vector([10, 10, 10]), 10)
assert lin_sys[0] == plane_2
assert lin_sys[1] == new_plane_1
assert lin_sys[2] == new_plane_3
assert lin_sys[3] == plane_4
def test_linsys_multiply_row_add():
"""Test Linear System Multiply Times Row and add to Row"""
plane_2 = Plane(Vector([0, 1, 0]), 2)
new_plane_1 = Plane(Vector([10, 10, 10]), 10)
new_plane_3 = Plane(Vector([-1, -1, 1]), -3)
plane_4 = Plane(Vector([1, 0, -2]), 2)
# same as the end of the last test
lin_sys = LinearSystem([plane_2, new_plane_1, new_plane_3, plane_4])
# multiply the first row by 0 and add to the second row
# this should have no affect
lin_sys.add_multiple_times_row_to_row(0, 0, 1)
assert lin_sys[0] == plane_2
assert lin_sys[1] == new_plane_1
assert lin_sys[2] == new_plane_3
assert lin_sys[3] == plane_4
# multiply the first row by 1 and add it to the second row
lin_sys.add_multiple_times_row_to_row(1, 0, 1)
plane_1_added = Plane(Vector([10, 11, 10]), 12)
assert lin_sys[0] == plane_2
assert lin_sys[1] == plane_1_added
assert lin_sys[2] == new_plane_3
assert lin_sys[3] == plane_4
# multiply the second row by -1 and add to the first row
lin_sys.add_multiple_times_row_to_row(-1, 1, 0)
plane_2_subtracted = Plane(Vector([-10, -10, -10]), -10)
assert lin_sys[0] == plane_2_subtracted
assert lin_sys[1] == plane_1_added
assert lin_sys[2] == new_plane_3
assert lin_sys[3] == plane_4
def test_triangular_form():
"""Test for Triangular Form"""
plane_1 = Plane(Vector([0, 1, 1]), 1)
plane_2 = Plane(Vector([1, -1, 1]), 2)
plane_3 = Plane(Vector([1, 2, -5]), 3)
lin_sys = LinearSystem([plane_1, plane_2, plane_3])
triangular = lin_sys.compute_triangular_form()
assert triangular[0] == Plane(Vector([1, -1, 1]), 2)
assert triangular[1] == Plane(Vector([0, 1, 1]), 1)
assert triangular[2] == Plane(Vector([0, 0, -9]), -2)
def test_rref_form():
"""Test for RREF Reduced Row Echelon Form"""
plane_1 = Plane(Vector([0, 1, 1]), 1)
plane_2 = Plane(Vector([1, -1, 1]), 2)
plane_3 = Plane(Vector([1, 2, -5]), 3)
lin_sys = LinearSystem([plane_1, plane_2, plane_3])
rref = lin_sys.compute_rref_form()
assert rref[0] == Plane(Vector([1, 0, 0]), Decimal(23) / Decimal(9))
assert rref[1] == Plane(Vector([0, 1, 0]), Decimal(7) / Decimal(9))
assert rref[2] == Plane(Vector([0, 0, 1]), Decimal(2) / Decimal(9))
def test_no_consistent_solutions():
"""Test the system has no solutions"""
plane_1 = Plane(Vector([1, 1, -1]), 2)
plane_2 = Plane(Vector([2, 3, -1]), 0)
plane_3 = Plane(Vector([3, 4, -2]), 1)
lin_sys_1 = LinearSystem([plane_1, plane_2, plane_3])
solutions_1 = lin_sys_1.system_solutions()
assert solutions_1 == 'system has no consistent solutions'
def test_infinite_solutions():
"""Test the system has infinite solutions"""
plane_4 = Plane(Vector([1, 1, 1]), 3)
plane_5 = Plane(Vector([2, 4, 1]), 8)
plane_6 = Plane(Vector([6, 10, 4]), 22)
lin_sys_2 = LinearSystem([plane_4, plane_5, plane_6])
solutions_2 = lin_sys_2.system_solutions()
assert solutions_2 == 'system has infinite solutions'
def test_single_solution():
"""Test the system has a single solution"""
plane_7 = Plane(Vector([1, 1, 1]), 1)
plane_8 = Plane(Vector([0, 1, 0]), 2)
plane_9 = Plane(Vector([1, 1, -1]), 3)
plane_10 = Plane(Vector([1, 0, -2]), 2)
lin_sys_3 = LinearSystem([plane_7, plane_8, plane_9, plane_10])
solutions_3 = lin_sys_3.system_solutions()
assert solutions_3 == 'solution is: a = 0.000, b = 2.000, c = -1.000'
| [
"me@madhavajay.com"
] | me@madhavajay.com |
650b83baea8f8fa8775490ba7da86be009c5d21d | 601c25757c20f0f399d6abbf948638f93008a897 | /to_coco_train.py | 01702e9d9c457ef271ae4bc84977b6638eb6ba94 | [] | no_license | aositeluofu/ZTE_challenge_sort_top9 | 25954fb07e6e306213ecd782c33a52374b95afe5 | 1e372c88c8628100ee61957c5f50ff07a0cae54f | refs/heads/master | 2023-01-23T01:14:00.007763 | 2020-11-22T13:42:44 | 2020-11-22T13:42:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,023 | py | import json
from os.path import join,dirname,realpath
import os
import cv2
import numpy as np
def file2array(path, delimiter=','):
recordlist = []
fp = open(path, 'r', encoding='utf-8')
content = fp.read() # content现在是一行字符串,该字符串包含文件所有内容
fp.close()
rowlist = content.splitlines() # 按行转换为一维表,splitlines默认参数是‘\n’
# 逐行遍历
# 结果按分隔符分割为行向量
recordlist = [[int(i) for i in row.split(delimiter)] for row in rowlist if row.strip()]
M = np.array(recordlist)
M = M[M[:,0].argsort(),:]
return M.astype(int)
root=dirname(realpath(__file__))
# txt_dir='rec_init_score0.8_nms0.3_0.85_crop_ensemble_more_fair0.4_0.5'
# txt_dir='after_i0c2_rec_mmdet_81.9_4_score0.7_iout0.55_B'
txt_dir='after_rec_dif_i0c2_B_rec_74.8_3_score0.7_iout0.55_B_75.592'
# txt_dir='after_rec_dif_i0instead_cut_c2_B_rec_74.8_3_score0.7_iout0.55_B_75.592'
# txt_dir='rec_c2_rec_mmdet_81.2_3_score0.8_iout0.55'
# txt_dir='rec_mmdet_11e_0.9_0_81.2'
# txt_dir='rec_c2_rec_mmdet_81.5_4_score0.8_iout0.55_82.173'
# txt_dir='rec_c2_rec_mmdet_81.5_4_score0.8_iout0.45'
# txt_dir='rec_new'
out_name=join(root,'track_B_train.json')
size={"Track1.txt":(1550,734),\
"Track4.txt":(1920,980),\
"Track5.txt":(1400,559),\
"Track9.txt":(1116,874),\
"Track10.txt":(615,593),\
"Track2.txt":(1550,734),\
"Track3.txt":(1116,874),\
"Track6.txt":(1400,559),\
"Track8.txt":(928,620),\
"Track11.txt":(615,593),\
"Track12.txt":(1728,824)}
json_out={}
json_out['images']=[]
json_out['annotations']=[]
# dataset='A-data'
dataset='B-data'
img_id=0
anno_id=0
for lists in os.listdir(join(root,dataset)):
path = os.path.join(join(root,dataset), lists)
M=file2array(join(root,txt_dir,lists+'.txt'))
s=size[lists+'.txt']
frame_dict={}
for f in range(1,M[-1,0]+1):
frame_dict[f]={}# 以帧id为索引,物体id为次级索引
for i in range(len(M)):
l=M[i]
frame_dict[l[0]][l[1]]=l.copy()
for img_name in os.listdir(path):
img_dict={}
# img=cv2.imread(join(path,img_name),0)
# img_dict['file_name']=lists+'/img1/'+img_name
img_dict['file_name']=lists+'/'+img_name
img_dict['id']=img_id
img_dict['height']=s[1]
img_dict['width']=s[0]
json_out['images'].append(img_dict)
print('img_id',img_id)
img_frame=int(img_name.split('.')[0])
for o_key in frame_dict[img_frame].keys():
anno={}
# anno['segmentation']=[[float(frame_dict[img_frame][o_key][2]),float(frame_dict[img_frame][o_key][3]),\
# float(frame_dict[img_frame][o_key][2])+1,float(frame_dict[img_frame][o_key][3]),\
# float(frame_dict[img_frame][o_key][2])+1,float(frame_dict[img_frame][o_key][3])+1,\
# float(frame_dict[img_frame][o_key][2]),float(frame_dict[img_frame][o_key][3])+1]]
anno['image_id']=img_id
anno['id']=anno_id
anno['category_id']=1
anno_id=anno_id+1
anno['area']=frame_dict[img_frame][o_key][4]*frame_dict[img_frame][o_key][5]*1.0
anno['bbox']=[float(frame_dict[img_frame][o_key][2]),float(frame_dict[img_frame][o_key][3]),\
float(frame_dict[img_frame][o_key][4]),float(frame_dict[img_frame][o_key][5])]
json_out['annotations'].append(anno)
img_id=img_id+1
json_out["categories"]= [{"supercategory": "person", "id": 1, "name": "person"}, {"supercategory": "vehicle", "id": 2, "name": "bicycle"}, {"supercategory": "vehicle", "id": 3, "name": "car"}, {"supercategory": "vehicle", "id": 4, "name": "motorcycle"}, {"supercategory": "vehicle", "id": 5, "name": "airplane"}, {"supercategory": "vehicle", "id": 6, "name": "bus"}, {"supercategory": "vehicle", "id": 7, "name": "train"}, {"supercategory": "vehicle", "id": 8, "name": "truck"}, {"supercategory": "vehicle", "id": 9, "name": "boat"}, {"supercategory": "outdoor", "id": 10, "name": "traffic light"}, {"supercategory": "outdoor", "id": 11, "name": "fire hydrant"}, {"supercategory": "outdoor", "id": 13, "name": "stop sign"}, {"supercategory": "outdoor", "id": 14, "name": "parking meter"}, {"supercategory": "outdoor", "id": 15, "name": "bench"}, {"supercategory": "animal", "id": 16, "name": "bird"}, {"supercategory": "animal", "id": 17, "name": "cat"}, {"supercategory": "animal", "id": 18, "name": "dog"}, {"supercategory": "animal", "id": 19, "name": "horse"}, {"supercategory": "animal", "id": 20, "name": "sheep"}, {"supercategory": "animal", "id": 21, "name": "cow"}, {"supercategory": "animal", "id": 22, "name": "elephant"}, {"supercategory": "animal", "id": 23, "name": "bear"}, {"supercategory": "animal", "id": 24, "name": "zebra"}, {"supercategory": "animal", "id": 25, "name": "giraffe"}, {"supercategory": "accessory", "id": 27, "name": "backpack"}, {"supercategory": "accessory", "id": 28, "name": "umbrella"}, {"supercategory": "accessory", "id": 31, "name": "handbag"}, {"supercategory": "accessory", "id": 32, "name": "tie"}, {"supercategory": "accessory", "id": 33, "name": "suitcase"}, {"supercategory": "sports", "id": 34, "name": "frisbee"}, {"supercategory": "sports", "id": 35, "name": "skis"}, {"supercategory": "sports", "id": 36, "name": "snowboard"}, {"supercategory": "sports", "id": 37, "name": "sports ball"}, {"supercategory": "sports", "id": 38, "name": "kite"}, {"supercategory": "sports", "id": 39, "name": "baseball bat"}, {"supercategory": "sports", "id": 40, "name": "baseball glove"}, {"supercategory": "sports", "id": 41, "name": "skateboard"}, {"supercategory": "sports", "id": 42, "name": "surfboard"}, {"supercategory": "sports", "id": 43, "name": "tennis racket"}, {"supercategory": "kitchen", "id": 44, "name": "bottle"}, {"supercategory": "kitchen", "id": 46, "name": "wine glass"}, {"supercategory": "kitchen", "id": 47, "name": "cup"}, {"supercategory": "kitchen", "id": 48, "name": "fork"}, {"supercategory": "kitchen", "id": 49, "name": "knife"}, {"supercategory": "kitchen", "id": 50, "name": "spoon"}, {"supercategory": "kitchen", "id": 51, "name": "bowl"}, {"supercategory": "food", "id": 52, "name": "banana"}, {"supercategory": "food", "id": 53, "name": "apple"}, {"supercategory": "food", "id": 54, "name": "sandwich"}, {"supercategory": "food", "id": 55, "name": "orange"}, {"supercategory": "food", "id": 56, "name": "broccoli"}, {"supercategory": "food", "id": 57, "name": "carrot"}, {"supercategory": "food", "id": 58, "name": "hot dog"}, {"supercategory": "food", "id": 59, "name": "pizza"}, {"supercategory": "food", "id": 60, "name": "donut"}, {"supercategory": "food", "id": 61, "name": "cake"}, {"supercategory": "furniture", "id": 62, "name": "chair"}, {"supercategory": "furniture", "id": 63, "name": "couch"}, {"supercategory": "furniture", "id": 64, "name": "potted plant"}, {"supercategory": "furniture", "id": 65, "name": "bed"}, {"supercategory": "furniture", "id": 67, "name": "dining table"}, {"supercategory": "furniture", "id": 70, "name": "toilet"}, {"supercategory": "electronic", "id": 72, "name": "tv"}, {"supercategory": "electronic", "id": 73, "name": "laptop"}, {"supercategory": "electronic", "id": 74, "name": "mouse"}, {"supercategory": "electronic", "id": 75, "name": "remote"}, {"supercategory": "electronic", "id": 76, "name": "keyboard"}, {"supercategory": "electronic", "id": 77, "name": "cell phone"}, {"supercategory": "appliance", "id": 78, "name": "microwave"}, {"supercategory": "appliance", "id": 79, "name": "oven"}, {"supercategory": "appliance", "id": 80, "name": "toaster"}]
out_name=join(root,'track_B_train.json')
with open(out_name,"w") as f:
json.dump(json_out,f)
print(out_name+" 加载入文件完成...") | [
"759379768@qq.com"
] | 759379768@qq.com |
e5efff36442daca1dc76753963d41d3814d762fb | ca590c3b7503ef9a1c954f046aaba48789a3b714 | /basic_app/views.py | fb7ede572b217a00df186addb60a305a6b54ea9e | [] | no_license | arpitgupta1906/django-loginform-practice | 24b357f3d40828ef566a3cdb1d3f704c3811baf4 | 899029a06a0fc669a2efda9b66fdf8c56167a637 | refs/heads/master | 2020-06-04T07:45:54.084173 | 2019-06-14T11:21:38 | 2019-06-14T11:21:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,345 | py | from django.shortcuts import render
from basic_app.forms import UserForm,UserProfileInfoForm
from django.contrib.auth import authenticate,login,logout
from django.http import HttpResponseRedirect,HttpResponse
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
# Create your views here.
def index(request):
return render(request,'basic_app/index.html')
@login_required
def special(request):
return HttpResponse("You are logged in,nice")
@login_required
def user_logout(request):
logout(request)
return HttpResponseRedirect(reverse('index'))
def register(request):
registered=False
if request.method == "POST":
user_form =UserForm(data=request.POST)
profile_form=UserProfileInfoForm(data=request.POST)
if user_form.is_valid() and profile_form.is_valid():
user=user_form.save()
user.set_password(user.password)
user.save()
profile=profile_form.save(commit=False)
profile.user=user
if 'profile_pic' in request.FILES:
profile.profile_pic=request.FILES['profile_pic']
profile.save()
registered=True
else:
print(user_form.errors,profile_form.errors)
else:
user_form=UserForm()
profile_form=UserProfileInfoForm()
return render(request,'basic_app/registration.html',{'user_form':user_form,
'profile_form':profile_form,
'registered':registered})
def user_login(request):
if request.method== 'POST':
username=request.POST.get('username')
password=request.POST.get('password')
user=authenticate(username=username,password=password)
if user:
if user.is_active:
login(request,user)
return HttpResponseRedirect(reverse='index')
else:
return HttpResponse("ACCOUNT IN ACTIVE")
else:
print("someone tried to login and failed")
print("Username:{} and password:{}".format(username,password))
return HttpResponse("invalid login details supplied")
else:
return render(request,'basic_app/login.html',{})
| [
"gupta.25@iitj.ac.in"
] | gupta.25@iitj.ac.in |
81d64fbe8e61f3bfd56fd9fe45446ed82ad92b0e | 3ee5f3f013cbb6ab8620c973c191ccc5e5d47aec | /nps_examples_py/setup.py | e7d39e367baae191821864d00af970b90237b4ce | [] | no_license | nps-ros2/nps-ros2-examples | 96837de908c7c76089f1eda6c10fb28c23452fdf | 557244746b370f04288a7de74b4b12991cf331e0 | refs/heads/master | 2020-05-07T20:51:12.577750 | 2019-09-10T17:43:42 | 2019-09-10T17:43:42 | 180,880,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | from setuptools import find_packages
from setuptools import setup
package_name = 'nps_examples_py'
setup(
name=package_name,
version='0.6.2',
packages=find_packages(exclude=['test']),
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
],
install_requires=['setuptools'],
zip_safe=True,
author='your name',
author_email='you@yours.com',
maintainer='your name',
maintainer_email='you@yours.com',
keywords=['ROS'],
classifiers=[
'Programming Language :: Python'
],
description=(
'Adapted from ROS2 demos.'
),
license='your license',
entry_points={
'console_scripts': [
'listener = nps_examples_py.topics.listener:main',
'talker = nps_examples_py.topics.talker:main'
],
},
)
| [
"bdallen@nps.edu"
] | bdallen@nps.edu |
16e39520ada57ae4d129e3811101334697cef212 | de3d9ba254b1e8047817fff9fec9b3762e138817 | /micro_bit/micro_bit资料收集/播放音乐粉刷匠.py | 85d1f6fb6cd5966cb7c83265861063df60a1a8c5 | [] | no_license | chenjiegd/microbit_Repository | 3343401cf4f6f9a8011fa7a2ebe0023a6a76c4e1 | 8b37e4c378712a53f20b5a68ed62e96d2c5c19d3 | refs/heads/master | 2020-04-17T17:25:43.179476 | 2019-06-06T01:24:25 | 2019-06-06T01:24:25 | 166,781,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 887 | py | from microbit import *
import music
display.show(Image.MUSIC_QUAVER)
tune = ["G4:2", "E4:2", "G4:2", "E4:2", "G4:2", "E4:2", "C4:4", "D4:2", "F4:2",
"E4:2", "D4:2", "G4:4", "E1:4", "G4:2", "E4:2", "G4:2", "E4:2", "G4:2",
"E4:2", "C4:4", "D4:2", "F4:2", "E4:2", "D4:2", "C4:4", "E1:4", "D4:2",
"D4:2", "F4:2", "F4:2", "E4:2", "C4:2", "G4:4", "D4:2", "F4:2", "E4:2",
"D4:2", "G4:4", "E1:4", "G4:2", "E4:2", "G4:2", "E4:2", "G4:2", "E4:2",
"C4:4", "D4:2", "F4:2", "E4:2", "D4:2", "C4:4"]
music.play(tune)
'''
这节课学习使用micro:bit机器人来演奏乐曲《粉刷匠》。
import是导入,这里的意思是导入music库函数,并制作一个音符列表来创建一个旋律,
例如列表中G4:2表示以2为节拍播放G4音调,C4:4表示以4为节拍播放C4音调,后面的也是依次类推。
''' | [
"noreply@github.com"
] | noreply@github.com |
271d40935f16d86274c80cad2bde0c58c0bb8ccc | 953fba52bb8f05339786e108f60b05e57e3f2093 | /guia/capitulo_5/cap5_project/settings.py | 4262851f96a3f7f869550cf9eb327664e9750168 | [] | no_license | student10github/primer_repositorio_cursdba | 2759eced078f97e0960cfb9031609dc125b5f5ed | 6b007c6c0e77bc4366473e9de1b6081f9b074159 | refs/heads/main | 2023-01-04T06:26:51.299247 | 2020-10-24T17:35:05 | 2020-10-24T17:35:05 | 306,871,381 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,096 | py | """
Django settings for cap5_project project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'j$mzm5e7*zwdyof%#cpjpp^fe8rfpt9gxy)0!e%d1wc4^7h$40'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cap5_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cap5_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cap5_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"student10github@gmail.com"
] | student10github@gmail.com |
b5a11e94c69d8671ad535398c8b61a5819cfd8cc | 45ed74f2493a9b82a28aaa8f92bc4961bdfd804f | /catkin_ws_lab_kf/build/robot_pose_ekf-master/cmake/robot_pose_ekf-genmsg-context.py | 16c05cfcd43b7f4a3a4b7f83e55e65430a87c61f | [] | no_license | fishflying3891/udacity-robotics-software-engineer | c21c66ce36f3bdee894ca0ce7571060bfc3333ce | 4a792c7d19339b3808c128ce8714f3dcc3ac64b9 | refs/heads/main | 2023-07-24T21:07:32.995069 | 2021-09-08T04:11:56 | 2021-09-08T04:11:56 | 380,644,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = ""
services_str = "/home/workspace/udacity-robotics-software-engineer/catkin_ws_lab_kf/src/robot_pose_ekf-master/srv/GetStatus.srv"
pkg_name = "robot_pose_ekf"
dependencies_str = "std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| [
"ethan.fei.yu@gmail.com"
] | ethan.fei.yu@gmail.com |
ecfb22a1f524765000e6d2826f9a05fc2854cefb | d818cbfba933f65e26ac6abe3865ed3f956eba5e | /user_group/tests.py | 0c374cf55703d59f04e1e3c82c5e098ca0e64d81 | [] | no_license | gwachhamit/MyProject | 27798c95c776cbb35a72c6c8b70492e1663474da | e499852a411aa9791740b207394625b66b3c2175 | refs/heads/master | 2020-04-25T16:03:31.579961 | 2019-02-27T10:55:11 | 2019-02-27T10:55:11 | 172,897,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,842 | py | from django.test import TestCase
from django.urls import reverse
from django.contrib.auth.models import User, Group
from rest_framework import status
from rest_framework.test import APITestCase
from rest_framework.test import APIClient
from rest_framework.test import force_authenticate
import json
# Create your tests here.
class TestGroup(APITestCase):
# Test module for Admin User api
def setUp(self):
self.customer = Group.objects.create(name='customer')
self.new_group = {'name': 'developer','permissions':[]}
def test_get_all_groups(self):
url = reverse('group-list')
# get API response
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_group(self):
# get API response
response = self.client.get(reverse('group-show',kwargs={'pk':self.customer.pk}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_post_group(self):
# get API response
response = self.client.post(
reverse('group-create'),
data=json.dumps(self.new_group),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_put_group(self):
response = self.client.put(
reverse('group-update',kwargs={'pk':self.customer.pk}),
data=json.dumps(self.new_group),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_user(self):
# get API response
response = self.client.delete(
reverse('group-delete',kwargs={'pk':self.customer.pk})
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
| [
"genuineaametax@gmail.com"
] | genuineaametax@gmail.com |
c3e3e185d6859d2945a795226ced30bc8ddb66b3 | 1ccc531e77b10fb75a31fdc441db84f6180c81b3 | /example-project/process_items.py | d106ca2d489f5338d423eeef7e97d5cf1fab35b1 | [
"MIT",
"Giftware"
] | permissive | lnlantian/Scrapy | aba7acccb9c03b2c8189229c4eb25f07eecc2389 | fedaa39e7fa8b839701a21f179cd292898a25642 | refs/heads/master | 2021-01-20T18:14:54.851395 | 2017-02-26T01:36:51 | 2017-02-26T01:36:51 | 64,541,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,926 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A script to process items from a redis queue."""
from __future__ import print_function, unicode_literals
import argparse
import json
import logging
import pprint
import sys
import time
from scrapy_redis import get_redis
logger = logging.getLogger('process_items')
def process_items(r, keys, timeout, limit=0, log_every=1000, wait=.1):
"""Process items from a redis queue.
Parameters
----------
r : Redis
Redis connection instance.
keys : list
List of keys to read the items from.
timeout: int
Read timeout.
"""
limit = limit or float('inf')
processed = 0
while processed < limit:
# Change ``blpop`` to ``brpop`` to process as LIFO.
ret = r.blpop(keys, timeout)
# If data is found before the timeout then we consider we are done.
if ret is None:
time.sleep(wait)
continue
source, data = ret
try:
item = json.loads(data)
except Exception:
logger.exception("Failed to load item:\n%r", pprint.pformat(data))
continue
try:
name = item.get('name') or item.get('title')
url = item.get('url') or item.get('link')
logger.debug("[%s] Processing item: %s <%s>", source, name, url)
except KeyError:
logger.exception("[%s] Failed to process item:\n%r",
source, pprint.pformat(item))
continue
processed += 1
if processed % log_every == 0:
logger.info("Processed %s items", processed)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('key', help="Redis key where items are stored")
parser.add_argument('--host')
parser.add_argument('--port')
parser.add_argument('--timeout', type=int, default=5)
parser.add_argument('--limit', type=int, default=0)
parser.add_argument('--progress-every', type=int, default=100)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
params = {}
if args.host:
params['host'] = args.host
if args.port:
params['port'] = args.port
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
r = get_redis(**params)
host = r.connection_pool.get_connection('info').host
logger.info("Waiting for items in '%s' (server: %s)", args.key, host)
kwargs = {
'keys': [args.key],
'timeout': args.timeout,
'limit': args.limit,
'log_every': args.progress_every,
}
try:
process_items(r, **kwargs)
retcode = 0 # ok
except KeyboardInterrupt:
retcode = 0 # ok
except Exception:
logger.exception("Unhandled exception")
retcode = 2
return retcode
if __name__ == '__main__':
sys.exit(main())
| [
"darkrho@gmail.com"
] | darkrho@gmail.com |
b102bf04f8e4c9b68a9fdf1a1ebe79d2b472e02c | ac58676ef85ba33c1b50ba48af55412565a16e17 | /Day20.py | 2ce96a0d8d4cd763f3498bd719685427d1de714a | [] | no_license | Rupam-Shil/30_days_of_competative_python | 51e4bd66c8704169cdb6aa65c49cca99a664e853 | 6bf7f25020be1bec871729362e24b48e18e9dfa3 | refs/heads/main | 2023-03-30T02:46:04.744683 | 2021-03-28T06:20:48 | 2021-03-28T06:20:48 | 336,993,444 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | '''Find a square of number without using multiplication and division operator'''
def calcSquare(num):
sum = 0
for i in range(num):
sum += num
return sum
num = int(input("Please enter a no:"))
print("the square of {} is {}".format(num, calcSquare(num)))
| [
"noreply@github.com"
] | noreply@github.com |
ed7e12c276248207dcadefe405fbe058b20652dd | 258e47d8e55db0fb12437aa1e7f9860a8bef6623 | /agilex/configuracion_agilex/doctype/tipo_de_documento/tipo_de_documento_dashboard.py | 6f3e46469731e8b52ecba7c28fb32c310398f215 | [
"MIT"
] | permissive | Nirchains/agilex | 003894bed211c71004f37beb22fd96fc1df6576f | 04470873abdea5d0023a1ccadf02a932fb3e834b | refs/heads/master | 2021-06-12T11:23:48.027599 | 2021-05-28T21:48:00 | 2021-05-28T21:48:00 | 166,990,550 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | from frappe import _
def get_data():
return {
'heatmap': False,
'heatmap_message': _('This is based on the Time Sheets created against this project'),
'fieldname': 'tipo_de_documento',
'transactions': [
{
'label': _('Ver expedientes'),
'items': ['Expediente']
}
]
} | [
"nirchains@gmail.com"
] | nirchains@gmail.com |
3306d61cfb77c241204cf96a9bb4ef072b7ab5ed | 2b0187b6214da482fe253e0e7a5632d32f1db000 | /blog/migrations/0001_initial.py | 4b2ab9a1ba58d0c4432c3820b24950d1a224d544 | [] | no_license | KedroBoss/byexample | 910eae1f0ba2aa3031331c7c6d9a4991664f7af5 | 995dd8cf64084602a11a3c0b0071d90d6595bf73 | refs/heads/master | 2021-01-11T21:44:50.768250 | 2017-01-19T09:40:54 | 2017-01-19T09:40:54 | 78,844,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,436 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-03 11:14
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('slug', models.SlugField(max_length=250, unique_for_date='published')),
('body', models.TextField()),
('published', models.DateTimeField(default=django.utils.timezone.now)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('published', 'Published'), ('draft', 'Draft')], default='draft', max_length=10)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_post', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-published',),
},
),
]
| [
"dimmak90@gmail.com"
] | dimmak90@gmail.com |
855f7cf66e2f45a2fe4d5bc6c25db3575a14ec1d | a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea | /airflow/providers/amazon/aws/example_dags/example_redshift_to_s3.py | 8116e02dc165ce82f017a21ede850dece6254ec9 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ishiis/airflow | 4305794e36b611d01f49e3f2401be3dc49782670 | 292440d54f4db84aaf0c5a98cf5fcf34303f2fa8 | refs/heads/master | 2022-07-30T00:51:28.806940 | 2022-07-14T12:07:11 | 2022-07-14T12:07:11 | 209,801,072 | 1 | 0 | Apache-2.0 | 2019-09-20T13:47:26 | 2019-09-20T13:47:26 | null | UTF-8 | Python | false | false | 1,575 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from os import getenv
from airflow import DAG
from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
S3_BUCKET_NAME = getenv("S3_BUCKET_NAME", "s3_bucket_name")
S3_KEY = getenv("S3_KEY", "s3_key")
REDSHIFT_TABLE = getenv("REDSHIFT_TABLE", "redshift_table")
with DAG(
dag_id="example_redshift_to_s3",
start_date=datetime(2021, 1, 1),
schedule_interval=None,
catchup=False,
tags=['example'],
) as dag:
# [START howto_transfer_redshift_to_s3]
task_transfer_redshift_to_s3 = RedshiftToS3Operator(
task_id='transfer_redshift_to_s3',
s3_bucket=S3_BUCKET_NAME,
s3_key=S3_KEY,
schema='PUBLIC',
table=REDSHIFT_TABLE,
)
# [END howto_transfer_redshift_to_s3]
| [
"noreply@github.com"
] | noreply@github.com |
5ae06310b3013350540c1be6333d058b349c1008 | 22a5d022ee3abeb4d6d8ef439bf5ada22c9eb686 | /ssmode/bar_chart.py | 8e32470e92756ad42883342dfee6934735519870 | [
"MIT"
] | permissive | skyselect/ssmode | 269d297c981bbc49769db8c088af10e8671bf8ff | 95da748bc62779db8308bfd05470292948040410 | refs/heads/master | 2021-03-23T20:08:54.487573 | 2020-03-31T15:34:25 | 2020-03-31T15:34:25 | 247,480,707 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | from .constants import colors
def style_bar_chart(ptl_fig, ytitle=''):
# Add axis title and style the legend
ptl_fig.layout.yaxis = {"title": ytitle, "titlefont": {"size": 12}}
ptl_fig.layout.legend = {"xanchor":"center", "yanchor":"top", "x":0.5,"y":-0.15, "orientation":"h"}
ptl_fig.layout.font = dict(family='Graphik, Arial, sans-serif', size=11, color='#666666')
# Color the bars, need to modify color library with more colors
i=0
for bar in ptl_fig.data:
if bar.name != 'hide':
bar.marker = {"color": colors[i % len(colors)]}
bar.textposition='auto'
bar.textfont={"size": 11, "color": "#FFFFFF", "family": "Graphik, Arial, sans-serif"}
bar.hoverinfo='text'
i+=1
# Format hovering
ptl_fig.layout.hovermode = "x"
ptl_fig.layout.hoverlabel = {"namelength": -1, "bgcolor":"#F1F2F5"}
return ptl_fig | [
"rasmuskisel@Rasmuss-MacBook-Pro.local"
] | rasmuskisel@Rasmuss-MacBook-Pro.local |
1b1e93bc39f957e9273260f3bc1e19ce4bea3d8c | 0bda0c021c7ca651646d15cba9a54bc82b836838 | /SSX_model_A.py | 85124f9da6930b92f748d5dd6504985f0c4fd3b1 | [] | no_license | nanders4/SSX-Files | 89666ad802adcd510788fc79fd374c65104a4bfe | 958393ffe0e0a2b7aa28a453c0374af82a9013d8 | refs/heads/master | 2020-03-25T03:00:06.397897 | 2018-08-03T15:44:54 | 2018-08-03T15:44:54 | 143,317,608 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,747 | py | """SSX_model_A.py
This is the *simplest* model we will consider for modelling spheromaks evolving in the SSX wind tunnel.
Major simplificiations fall in two categories
Geometry
--------
We consider a square duct using parity bases (sin/cos) in all directions.
Equations
---------
The equations themselves are those from Schaffner et al (2014), with the following simplifications
* hall term off
* constant eta instead of Spitzer
* no wall recycling term
* no mass diffusion
For this first model, rather than kinematic viscosity nu and thermal
diffusivitiy chi varying with density rho as they should, we are here
holding them *constant*. This dramatically simplifies the form of the
equations in Dedalus.
We use the vector potential, and enforce the Coulomb Gauge, div(A) = 0.
"""
import os
import sys
import time
import numpy as np
import dedalus.public as de
from dedalus.extras import flow_tools
from matplotlib import pyplot
import matplotlib as mpl
from spheromak import spheromak_A
import logging
logger = logging.getLogger(__name__)
#grid dimensions
nx = 72
ny = 72
nz = 540
#simulation space parameters
r = 1
length = 10
# for 3D runs, you can divide the work up over two dimensions (x and y).
# The product of the two elements of mesh *must* equal the number
# of cores used.
# mesh = None
mesh = [20,14]
kappa = 0.0001 #temperature diffusivity*rho0
mu = 0.0001 #viscosity*rho0
eta = 0.01 #magnetic diffusivity
rho0 = 0.001
gamma = 5./3. #ideal gas adiabatic index
x = de.SinCos('x', nx, interval=(-r, r))
y = de.SinCos('y', ny, interval=(-r, r))
z = de.SinCos('z', nz, interval=(0,length))
domain = de.Domain([x,y,z],grid_dtype='float', mesh=mesh)
SSX = de.IVP(domain, variables=['lnrho','T', 'vx', 'vy', 'vz', 'Ax', 'Ay', 'Az', 'phi'])
SSX.meta['T','lnrho']['x', 'y', 'z']['parity'] = 1
SSX.meta['phi']['x', 'y', 'z']['parity'] = -1
SSX.meta['vx']['y', 'z']['parity'] = 1
SSX.meta['vx']['x']['parity'] = -1
SSX.meta['vy']['x', 'z']['parity'] = 1
SSX.meta['vy']['y']['parity'] = -1
SSX.meta['vz']['x', 'y']['parity'] = 1
SSX.meta['vz']['z']['parity'] = -1
SSX.meta['Ax']['y', 'z']['parity'] = -1
SSX.meta['Ax']['x']['parity'] = 1
SSX.meta['Ay']['x', 'z']['parity'] = -1
SSX.meta['Ay']['y']['parity'] = 1
SSX.meta['Az']['x', 'y']['parity'] = -1
SSX.meta['Az']['z']['parity'] = 1
SSX.parameters['mu'] = mu
SSX.parameters['chi'] = kappa/rho0
SSX.parameters['nu'] = mu/rho0
SSX.parameters['eta'] = eta
SSX.parameters['gamma'] = gamma
SSX.substitutions['divv'] = "dx(vx) + dy(vy) + dz(vz)"
SSX.substitutions['vdotgrad(A)'] = "vx*dx(A) + vy*dy(A) + vz*dz(A)"
SSX.substitutions['Bdotgrad(A)'] = "Bx*dx(A) + By*dy(A) + Bz*dz(A)"
SSX.substitutions['Lap(A)'] = "dx(dx(A)) + dy(dy(A)) + dz(dz(A))"
SSX.substitutions['Bx'] = "dy(Az) - dz(Ay)"
SSX.substitutions['By'] = "dz(Ax) - dx(Az)"
SSX.substitutions['Bz'] = "dx(Ay) - dy(Ax)"
# Coulomb Gauge implies J = -Laplacian(A)
SSX.substitutions['jx'] = "-Lap(Ax)"
SSX.substitutions['jy'] = "-Lap(Ay)"
SSX.substitutions['jz'] = "-Lap(Az)"
SSX.substitutions['J2'] = "jx**2 + jy**2 + jz**2"
SSX.substitutions['rho'] = "exp(lnrho)"
# Continuity
SSX.add_equation("dt(lnrho) + divv = - vdotgrad(lnrho)")
# Momentum
SSX.add_equation("dt(vx) + dx(T) - nu*Lap(vx) = T*dx(lnrho) - vdotgrad(vx) + (jy*Bz - jz*By)/rho")
SSX.add_equation("dt(vy) + dy(T) - nu*Lap(vy) = T*dy(lnrho) - vdotgrad(vy) + (jz*Bx - jx*Bz)/rho")
SSX.add_equation("dt(vz) + dz(T) - nu*Lap(vz) = T*dz(lnrho) - vdotgrad(vz) + (jx*By - jy*Bx)/rho")
# MHD equations: A
SSX.add_equation("dt(Ax) + eta*jx + dx(phi) = vy*Bz - vz*By")
SSX.add_equation("dt(Ay) + eta*jy + dy(phi) = vz*Bx - vx*Bz")
SSX.add_equation("dt(Az) + eta*jz + dz(phi) = vx*By - vy*Bx")
SSX.add_equation("dx(Ax) + dy(Ay) + dz(Az) = 0", condition="(nx != 0) or (ny != 0) or (nz != 0)")
SSX.add_equation("phi = 0", condition="(nx == 0) and (ny == 0) and (nz == 0)")
# Energy
SSX.add_equation("dt(T) - (gamma - 1) * chi*Lap(T) = - (gamma - 1) * T * divv - vdotgrad(T) + (gamma - 1)*eta*J2")
solver = SSX.build_solver(de.timesteppers.RK443)
# Initial timestep
dt = 5e-6
# Integration parameters
solver.stop_sim_time = 50
solver.stop_wall_time = 60*60*10 #in seconds
solver.stop_iteration = np.inf
# Initial conditions
Ax = solver.state['Ax']
Ay = solver.state['Ay']
Az = solver.state['Az']
lnrho = solver.state['lnrho']
T = solver.state['T']
x = domain.grid(0)
y = domain.grid(1)
z = domain.grid(2)
fullGrid = x*y*z
# Initial condition parameters
R = r
L = R
lambda_rho = L # half-width of transition region for initial conditions
rho_min = 0.011
T0 = 0.1
#Spheromak initial conditions
#Vector potential
aa_x, aa_y, aa_z = spheromak_A(domain, center=(0,0, 0), R=R, L=L)
Ax['g'] = aa_x
Ay['g'] = aa_y
Az['g'] = aa_z
#Density
for i in range(x.shape[0]):
xVal = x[i,0,0]
for j in range(y.shape[1]):
yVal = y[0,j,0]
for k in range(z.shape[2]):
zVal = z[0,0,k]
if((zVal<=(2*lambda_rho)) and (np.sqrt(xVal**2 + yVal**2)<R)):
fullGrid[i][j][k] = (1 + rho_min)/2 + (1 - rho_min)/2*np.cos(zVal * np.pi/(2*lambda_rho)) #rho_min + rho_min*np.cos(zVal*np.pi/(2*lambda_rho))
else:
fullGrid[i][j][k] = rho_min
rho0 = domain.new_field()
rho0['g'] = fullGrid
#Temperature
lnrho['g'] = np.log(rho0['g'])
T['g'] = T0 * rho0['g']**(gamma - 1)
# analysis output
wall_dt_checkpoints = 60*55
output_cadence = .5 # This is in simulation time units
checkpoint = solver.evaluator.add_file_handler('checkpoints2', max_writes=1, wall_dt=wall_dt_checkpoints, mode='overwrite')
checkpoint.add_system(solver.state, layout='c')
field_writes = solver.evaluator.add_file_handler('fields2', max_writes=50, sim_dt = output_cadence, mode='overwrite')
field_writes.add_task('vx')
field_writes.add_task('vy')
field_writes.add_task('vz')
field_writes.add_task('Bx')
field_writes.add_task('By')
field_writes.add_task('Bz')
field_writes.add_task("exp(lnrho)", name='rho')
field_writes.add_task('T')
# Flow properties
flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
flow.add_property("sqrt(vx*vx + vy*vy + vz*vz) / nu", name='Re')
flow.add_property("sqrt(vx*vx + vy*vy + vz*vz) / sqrt(T)", name='Ma')
char_time = 50. # this should be set to a characteristic time in the problem (the alfven crossing time of the tube, for example)
CFL_safety = 0.3
CFL = flow_tools.CFL(solver, initial_dt=dt, cadence=1, safety=CFL_safety,
max_change=1.5, min_change=0.5, max_dt=output_cadence, threshold=0.05)
CFL.add_velocities(('vx', 'vy', 'vz'))
good_solution = True
# Main loop
try:
logger.info('Starting loop')
start_time = time.time()
while solver.ok and good_solution:
#dt = CFL.compute_dt()
solver.step(dt)
if (solver.iteration-1) % 1 == 0:
logger_string = 'iter: {:d}, t/tb: {:.2e}, dt/tb: {:.2e}'.format(solver.iteration, solver.sim_time/char_time, dt/char_time)
Re_avg = flow.grid_average('Re')
logger_string += ' Max Re = {:.2g}, Avg Re = {:.2g}, Max Ma = {:.1g}'.format(flow.max('Re'), Re_avg, flow.max('Ma'))
logger.info(logger_string)
if not np.isfinite(Re_avg):
good_solution = False
logger.info("Terminating run. Trapped on Reynolds = {}".format(Re_avg))
except:
logger.error('Exception raised, triggering end of main loop.')
raise
finally:
end_time = time.time()
logger.info('Iterations: %i' %solver.iteration)
logger.info('Sim end time: %f' %solver.sim_time)
logger.info('Run time: %.2f sec' %(end_time-start_time))
logger.info('Run time: %f cpu-hr' %((end_time-start_time)/60/60*domain.dist.comm_cart.size))
logger.info('Iter/sec: {:g}'.format(solver.iteration/(end_time-start_time)))
| [
"nanderson321@gmail.com"
] | nanderson321@gmail.com |
41a53bbfa73c42d13714aa95f8a6f780a4bd9f0f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/299/66464/submittedfiles/testes.py | 83cd0f4164df46ab44cfb4ae691bbced548efa8a | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | # -*- coding: utf-8 -*-
print("Nikolas Sivini Borges Galvão")
print("20")
print(11+1037)
print((9*35+160)/5)
print(3.14159*5**2*3)
print((2+5)**2)
#add
x=0
y=2
while x<100:
x=x+y
print(x)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
48a03867f4833bc8161bc39682ab3974887a8612 | d0fe1112743cc36b2089b695fb7c527a3b8bb9f7 | /LifeCycleAnalyzer/Simulators/__init__.py | da272a632315bffa45e44941c0af211cb81b23f6 | [
"MIT"
] | permissive | vd1371/GIAMS | bfff465c69f02a5dd1a2544bfe7170087a8e181d | cf8b7cb028b6cc6cd7facd6f45dd288067e9ff65 | refs/heads/master | 2023-04-14T07:09:00.801898 | 2022-08-01T01:16:06 | 2022-08-01T01:16:06 | 278,012,609 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | from .MainSimulator import MainSimulator
from .DummyRiskAnalyzer import DummyRiskAnalyzer
from .EnvSimulator import EnvSimulator | [
"vd1371@gmail.com"
] | vd1371@gmail.com |
31fcfdec26c80808e1bec53fe1961534b35fc880 | 0d24036dcf8736c0392a1ee1c2f3b45633221d8a | /etc/src/genpy-bgp-oper/cisco_ios_xr_ipv4_bgp_oper/bgp/instances/instance/instance_standby/default_vrf/afs/af/advertised_path_xr/advertised_path/bgp_path_bag_pb2.py | 3e00a023d4edf24ceacb989bcfb6ea9a39192b3d | [] | no_license | mspiez/telemetry_collector | c4b97c6686748fc20748898a25e9fc756d2d0b63 | 52ed12c06debfe04181f0bfea9854a66ed8bb3df | refs/heads/master | 2020-12-19T23:28:08.358956 | 2020-05-02T19:54:38 | 2020-05-02T19:54:38 | 235,883,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 177,156 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cisco_ios_xr_ipv4_bgp_oper/bgp/instances/instance/instance_standby/default_vrf/afs/af/advertised_path_xr/advertised_path/bgp_path_bag.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='cisco_ios_xr_ipv4_bgp_oper/bgp/instances/instance/instance_standby/default_vrf/afs/af/advertised_path_xr/advertised_path/bgp_path_bag.proto',
package='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path',
syntax='proto3',
serialized_pb=_b('\n\x8b\x01\x63isco_ios_xr_ipv4_bgp_oper/bgp/instances/instance/instance_standby/default_vrf/afs/af/advertised_path_xr/advertised_path/bgp_path_bag.proto\x12xcisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path\"\x89\x01\n\x11\x62gp_path_bag_KEYS\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x66_name\x18\x02 \x01(\t\x12\n\n\x02rd\x18\x03 \x01(\t\x12\x0f\n\x07network\x18\x04 \x01(\t\x12\x15\n\rprefix_length\x18\x05 \x01(\r\x12\x18\n\x10neighbor_address\x18\x06 \x01(\t\"\x98\x06\n\x0c\x62gp_path_bag\x12\x0f\n\x07no_path\x18\x32 \x01(\x08\x12\x0f\n\x07\x61\x66_name\x18\x33 \x01(\t\x12\xa1\x01\n\x10neighbor_address\x18\x34 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x1b\n\x13process_instance_id\x18\x35 \x01(\r\x12\x11\n\tlabel_oor\x18\x36 \x01(\x08\x12\x1a\n\x12label_o_or_version\x18\x37 \x01(\r\x12\x12\n\nlabel_fail\x18\x38 \x01(\x08\x12\x9e\x01\n\x10path_information\x18\x39 \x01(\x0b\x32\x83\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_\x12\xa8\x01\n\x1a\x61ttributes_after_policy_in\x18: \x01(\x0b\x32\x83\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_\x12\x1b\n\x13route_distinguisher\x18; \x01(\t\x12\"\n\x1asource_route_distinguisher\x18< \x01(\t\x12\x16\n\x0eprefix_version\x18= \x01(\r\x12\x10\n\x08vrf_name\x18> \x01(\t\x12\x17\n\x0fsource_vrf_name\x18? \x01(\t\x12\x12\n\nsrcaf_name\x18@ \x01(\t\"&\n\x15IPV4TunnelAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"#\n\x12IPV4MDTAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"(\n\x17RTConstraintAddressType\x12\r\n\x05value\x18\x01 \x01(\t\" \n\x0fIPV6AddressType\x12\r\n\x05value\x18\x01 \x01(\t\"\x1f\n\x0eMACAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"\xfc\x02\n\x13\x62gp_attr_rnh_addr_t\x12\x0b\n\x03len\x18\x01 \x01(\t\x12\x14\n\x0cipv4_address\x18\x02 \x01(\t\x12\xa0\x01\n\x0cipv6_address\x18\x03 \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\x9e\x01\n\x0bmac_address\x18\x04 \x01(\x0b\x32\x88\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.MACAddressType\"\xda\x01\n\x12\x62gp_evpn_gw_addr_t\x12\x0b\n\x03len\x18\x01 \x01(\t\x12\x14\n\x0cipv4_address\x18\x02 \x01(\t\x12\xa0\x01\n\x0cipv6_address\x18\x03 \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\")\n\x10\x62gp_l2vpn_addr_t\x12\x15\n\rl2vpn_address\x18\x01 \x01(\x0c\"#\n\x14\x62gp_l2vpn_evpn_esi_t\x12\x0b\n\x03\x65si\x18\x01 \x01(\x0c\"%\n\x14L2VPNEVPNAddressType\x12\r\n\x05value\x18\x01 \x01(\t\".\n\x15\x62gp_l2vpn_mspw_addr_t\x12\x15\n\rl2vpn_address\x18\x01 \x01(\x0c\"$\n\x13IPV6MVPNAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"$\n\x13IPV4MVPNAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"!\n\x10LS_LSAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"(\n\x17IPv4FlowspecAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"(\n\x17IPv6FlowspecAddressType\x12\r\n\x05value\x18\x01 \x01(\t\"\xd7\x19\n\x0c\x62gp_addrtype\x12\x0b\n\x03\x61\x66i\x18\x01 \x01(\t\x12\x14\n\x0cipv4_address\x18\x02 \x01(\t\x12\x1a\n\x12ipv4_mcast_address\x18\x03 \x01(\t\x12\x1a\n\x12ipv4_label_address\x18\x04 \x01(\t\x12\xad\x01\n\x13ipv4_tunnel_address\x18\x05 \x01(\x0b\x32\x8f\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4TunnelAddressType\x12\xa7\x01\n\x10ipv4_mdt_address\x18\x06 \x01(\x0b\x32\x8c\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MDTAddressType\x12\x18\n\x10ipv4_vpn_address\x18\x07 \x01(\t\x12\x1d\n\x15ipv4_vpna_mcastddress\x18\x08 \x01(\t\x12\xa0\x01\n\x0cipv6_address\x18\t \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\xa6\x01\n\x12ipv6_mcast_address\x18\n \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\xa6\x01\n\x12ipv6_label_address\x18\x0b \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\xa4\x01\n\x10ipv6_vpn_address\x18\x0c \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\xaa\x01\n\x16ipv6_vpn_mcast_address\x18\r \x01(\x0b\x32\x89\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType\x12\xa7\x01\n\x12l2_vpnvpls_address\x18\x0e \x01(\x0b\x32\x8a\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_addr_t\x12\xb1\x01\n\x15rt_constraint_address\x18\x0f \x01(\x0b\x32\x91\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.RTConstraintAddressType\x12\xa9\x01\n\x11ipv6_mvpn_address\x18\x10 \x01(\x0b\x32\x8d\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6MVPNAddressType\x12\xa9\x01\n\x11ipv4_mvpn_address\x18\x11 \x01(\x0b\x32\x8d\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MVPNAddressType\x12\xac\x01\n\x13l2_vpn_evpn_address\x18\x12 \x01(\x0b\x32\x8e\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.L2VPNEVPNAddressType\x12\xa2\x01\n\rls_ls_address\x18\x13 \x01(\x0b\x32\x8a\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.LS_LSAddressType\x12\xad\x01\n\x13l2_vpn_mspw_address\x18\x14 \x01(\x0b\x32\x8f\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_mspw_addr_t\x12\xb1\x01\n\x15ipv4_flowspec_address\x18\x15 \x01(\x0b\x32\x91\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv4FlowspecAddressType\x12\xb1\x01\n\x15ipv6_flowspec_address\x18\x16 \x01(\x0b\x32\x91\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv6FlowspecAddressType\x12\xb5\x01\n\x19ipv4_vpn_flowspec_address\x18\x17 \x01(\x0b\x32\x91\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv4FlowspecAddressType\x12\xb5\x01\n\x19ipv6_vpn_flowspec_address\x18\x18 \x01(\x0b\x32\x91\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv6FlowspecAddressType\"\xc1\x01\n\x0e\x62gp_prefixtype\x12\x97\x01\n\x06prefix\x18\x01 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x15\n\rprefix_length\x18\x02 \x01(\r\"\x91\x02\n\rbgp_te_tunnel\x12\x13\n\x0btunnel_name\x18\x01 \x01(\x0c\x12\x15\n\rhas_te_tunnel\x18\x02 \x01(\x08\x12\x14\n\x0cis_tunnel_up\x18\x03 \x01(\x08\x12\x1c\n\x14is_tunnel_info_stale\x18\x04 \x01(\x08\x12\x1c\n\x14is_tunnel_registered\x18\x05 \x01(\x08\x12\x1a\n\x12tunnel_v6_required\x18\x06 \x01(\x08\x12\x19\n\x11tunnel_v6_enabled\x18\x07 \x01(\x08\x12\x15\n\rbinding_label\x18\x08 \x01(\r\x12\x18\n\x10tunnel_if_handle\x18\t \x01(\r\x12\x1a\n\x12last_tunnel_update\x18\n \x01(\r\"9\n\x14\x62gp_pedistlbl_entry_\x12\x12\n\npe_address\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\r\"\xec\t\n\x16\x62gp_common_attr_entry_\x12\x19\n\x11is_metric_present\x18\x01 \x01(\x08\x12\x1d\n\x15is_local_pref_present\x18\x02 \x01(\x08\x12#\n\x1bis_atomic_aggregate_present\x18\x03 \x01(\x08\x12\x1d\n\x15is_aggregator_present\x18\x04 \x01(\x08\x12\x19\n\x11is_origin_present\x18\x05 \x01(\x08\x12\x1a\n\x12is_as_path_present\x18\x06 \x01(\x08\x12\x1c\n\x14is_community_present\x18\x07 \x01(\x08\x12%\n\x1dis_extended_community_present\x18\x08 \x01(\x08\x12\x16\n\x0eis_ssa_present\x18\t \x01(\x08\x12\x1c\n\x14is_connector_present\x18\n \x01(\x08\x12\x17\n\x0fis_pmsi_present\x18\x0b \x01(\x08\x12\x18\n\x10is_pppmp_present\x18\x0c \x01(\x08\x12\x17\n\x0fis_aigp_present\x18\r \x01(\x08\x12)\n!is_pe_distinguisher_label_present\x18\x0e \x01(\x08\x12\x1f\n\x17is_ls_attribute_present\x18\x0f \x01(\x08\x12\x1e\n\x16is_label_index_present\x18\x10 \x01(\x08\x12\x13\n\x0bneighbor_as\x18\x11 \x01(\r\x12\x15\n\raggregator_as\x18\x12 \x01(\r\x12\x1a\n\x12\x61ggregator_address\x18\x13 \x01(\t\x12\x0e\n\x06metric\x18\x14 \x01(\r\x12\x18\n\x10local_preference\x18\x15 \x01(\r\x12\x0e\n\x06origin\x18\x16 \x01(\r\x12\x0f\n\x07\x61s_path\x18\x17 \x03(\r\x12\x11\n\tcommunity\x18\x18 \x03(\r\x12 \n\x18\x65xtended_community_flags\x18\x19 \x01(\r\x12\x1a\n\x12\x65xtended_community\x18\x1a \x03(\r\x12\x1a\n\x12unknown_attributes\x18\x1b \x03(\r\x12\x14\n\x0c\x63luster_list\x18\x1c \x03(\r\x12\x12\n\noriginator\x18\x1d \x01(\t\x12\x1b\n\x13l2_t_pv3_session_id\x18\x1e \x01(\r\x12\x17\n\x0fl2_t_pv3_cookie\x18\x1f \x03(\r\x12\x16\n\x0e\x63onnector_type\x18 \x01(\r\x12\x17\n\x0f\x63onnector_value\x18! \x03(\r\x12\x19\n\x11\x61igp_metric_value\x18\" \x01(\x04\x12\x11\n\tpmsi_type\x18# \x01(\r\x12\x12\n\npmsi_flags\x18$ \x01(\r\x12\x12\n\npmsi_label\x18% \x01(\r\x12\x12\n\npmsi_value\x18& \x03(\r\x12\x12\n\nppm_pvalue\x18\' \x01(\r\x12\xaf\x01\n\x16pe_distinguisher_label\x18( \x03(\x0b\x32\x8e\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_pedistlbl_entry_\x12\x0f\n\x07ls_attr\x18) \x03(\r\x12\x18\n\x10label_index_attr\x18* \x03(\r\"\xa3\n\n\tbgp_attr_\x12\x18\n\x10is_as_path2_byte\x18\x01 \x01(\x08\x12&\n\x1eis_application_gateway_present\x18\x02 \x01(\x08\x12\x1b\n\x13is_attr_set_present\x18\x03 \x01(\x08\x12\x1c\n\x14set_aigp_inbound_igp\x18\x04 \x01(\x08\x12\x1f\n\x17set_aigp_inbound_metric\x18\x05 \x01(\x08\x12\x16\n\x0eis_rnh_present\x18\x06 \x01(\x08\x12\x19\n\x11is_ribrnh_present\x18\x07 \x01(\x08\x12\x1c\n\x14\x61ttribute_key_number\x18\x08 \x01(\r\x12!\n\x19\x61ttribute_reuse_id_config\x18\t \x01(\x08\x12!\n\x19\x61ttribute_reuse_id_max_id\x18\n \x01(\r\x12\x1f\n\x17\x61ttribute_reuse_id_node\x18\x0b \x01(\r\x12\"\n\x1a\x61ttribute_reuse_id_current\x18\x0c \x01(\r\x12\x1f\n\x17\x61ttribute_reuse_id_keys\x18\r \x01(\r\x12&\n\x1e\x61ttribute_reuse_id_recover_sec\x18\x0e \x01(\r\x12\x19\n\x11vpn_distinguisher\x18\x0f \x01(\r\x12\xac\x01\n\x11\x63ommon_attributes\x18\x10 \x01(\x0b\x32\x90\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_\x12\xa3\x01\n\x08\x61ttr_set\x18\x11 \x01(\x0b\x32\x90\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_\x12\x10\n\x08rnh_type\x18\x12 \x01(\r\x12\x0f\n\x07rnh_len\x18\x13 \x01(\r\x12\x14\n\x0crnh_addr_len\x18\x14 \x01(\r\x12\xa0\x01\n\x08rnh_addr\x18\x15 \x01(\x0b\x32\x8d\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t\x12\x14\n\x0cribrnh_table\x18\x16 \x01(\r\x12\x12\n\nribrnh_mac\x18\x17 \x01(\t\x12\x9a\x01\n\tribrnh_ip\x18\x18 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x16\n\x0eribrnhip_table\x18\x19 \x01(\r\x12\x12\n\nribrnh_vni\x18\x1a \x01(\r\x12\x14\n\x0cribrnh_encap\x18\x1b \x01(\r\"\xe9\x01\n\x11\x62gp_advinfo_type_\x12\x13\n\x0bis_neighbor\x18\x01 \x01(\x08\x12\xa1\x01\n\x10neighbor_address\x18\x02 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x1b\n\x13update_group_number\x18\x03 \x01(\r\"\xd0\'\n\tbgp_path_\x12\x9d\x01\n\nbgp_prefix\x18\x01 \x01(\x0b\x32\x88\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_prefixtype\x12\xa1\x01\n\x10neighbor_address\x18\x02 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x12\n\nroute_type\x18\x03 \x01(\t\x12\x15\n\ris_path_valid\x18\x04 \x01(\x08\x12\x16\n\x0eis_path_damped\x18\x05 \x01(\x08\x12\x1c\n\x14is_path_history_held\x18\x06 \x01(\x08\x12\x18\n\x10is_internal_path\x18\x07 \x01(\x08\x12\x14\n\x0cis_best_path\x18\x08 \x01(\x08\x12\x12\n\nis_as_best\x18\t \x01(\x08\x12\x17\n\x0fis_spkr_as_best\x18\n \x01(\x08\x12\x17\n\x0fis_partial_best\x18\x0b \x01(\x08\x12!\n\x19is_aggregation_suppressed\x18\x0c \x01(\x08\x12\x1a\n\x12is_import_dampened\x18\r \x01(\x08\x12\x19\n\x11is_import_suspect\x18\x0e \x01(\x08\x12\x1e\n\x16is_path_not_advertised\x18\x0f \x01(\x08\x12(\n is_path_not_advertised_to_ebg_ps\x18\x10 \x01(\x08\x12(\n is_path_advertised_local_as_only\x18\x11 \x01(\x08\x12$\n\x1cis_path_from_route_reflector\x18\x12 \x01(\x08\x12\x1d\n\x15is_path_received_only\x18\x13 \x01(\x08\x12%\n\x1dis_received_path_not_modified\x18\x14 \x01(\x08\x12\x1f\n\x17is_path_locally_sourced\x18\x15 \x01(\x08\x12\x1f\n\x17is_path_local_aggregate\x18\x16 \x01(\x08\x12$\n\x1cis_path_from_network_command\x18\x17 \x01(\x08\x12)\n!is_path_from_redistribute_command\x18\x18 \x01(\x08\x12\x18\n\x10is_path_imported\x18\x19 \x01(\x08\x12\x1c\n\x14is_path_reoriginated\x18\x1a \x01(\x08\x12&\n\x1eis_path_reoriginated_stitching\x18\x1b \x01(\x08\x12\x18\n\x10is_path_vpn_only\x18\x1c \x01(\x08\x12\'\n\x1fis_path_from_confederation_peer\x18\x1d \x01(\x08\x12\x1f\n\x17is_path_synced_with_igp\x18\x1e \x01(\x08\x12\x19\n\x11is_path_multipath\x18\x1f \x01(\x08\x12\x1d\n\x15is_path_imp_candidate\x18 \x01(\x08\x12\x15\n\ris_path_stale\x18! \x01(\x08\x12 \n\x18is_path_long_lived_stale\x18\" \x01(\x08\x12\x16\n\x0eis_path_backup\x18# \x01(\x08\x12(\n is_path_backup_protect_multipath\x18$ \x01(\x08\x12\x1d\n\x15is_path_best_external\x18% \x01(\x08\x12\x1f\n\x17is_path_additional_path\x18& \x01(\x08\x12!\n\x19is_path_nexthop_discarded\x18\' \x01(\x08\x12\x99\x01\n\x08next_hop\x18( \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x9d\x01\n\x0bnhte_tunnel\x18) \x01(\x0b\x32\x87\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel\x12\x16\n\x0ehas_rcvd_label\x18* \x01(\x08\x12\x12\n\nrcvd_label\x18+ \x01(\r\x12\x1b\n\x13has_local_net_label\x18, \x01(\x08\x12\x13\n\x0blocal_label\x18- \x01(\r\x12\x12\n\nigp_metric\x18. \x01(\r\x12\x13\n\x0bpath_weight\x18/ \x01(\r\x12\x1a\n\x12neighbor_router_id\x18\x30 \x01(\t\x12\x1a\n\x12has_mdt_group_addr\x18\x31 \x01(\x08\x12\x9f\x01\n\x0emdt_group_addr\x18\x32 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x13\n\x0bl2_vpn_size\x18\x33 \x01(\r\x12\x0f\n\x07has_esi\x18\x34 \x01(\x08\x12\xa8\x01\n\x0fl2_vpn_evpn_esi\x18\x35 \x01(\x0b\x32\x8e\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_evpn_esi_t\x12\x13\n\x0bhas_gw_addr\x18\x36 \x01(\x08\x12\x9e\x01\n\x07gw_addr\x18\x37 \x01(\x0b\x32\x8c\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t\x12\x18\n\x10has_second_label\x18\x38 \x01(\x08\x12\x14\n\x0csecond_label\x18\x39 \x01(\r\x12\"\n\x1al2vpn_circuit_status_value\x18: \x03(\r\x12\x17\n\x0fpath_flap_count\x18; \x01(\r\x12 \n\x18seconds_since_first_flap\x18< \x01(\r\x12\x1a\n\x12time_to_unsuppress\x18= \x01(\r\x12\x16\n\x0e\x64\x61mpen_penalty\x18> \x01(\r\x12\x15\n\rhalflife_time\x18? \x01(\r\x12\x18\n\x10suppress_penalty\x18@ \x01(\r\x12\x13\n\x0breuse_value\x18\x41 \x01(\r\x12\x1d\n\x15maximum_suppress_time\x18\x42 \x01(\r\x12\x1c\n\x14\x62\x65st_path_comp_stage\x18\x43 \x01(\t\x12\xa6\x01\n\x15\x62\x65st_path_comp_winner\x18\x44 \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12 \n\x18\x62\x65st_path_id_comp_winner\x18\x45 \x01(\r\x12\x12\n\npath_flags\x18\x46 \x01(\x04\x12\x19\n\x11path_import_flags\x18G \x01(\r\x12\x14\n\x0c\x62\x65st_path_id\x18H \x01(\r\x12\x15\n\rlocal_path_id\x18I \x01(\r\x12\x14\n\x0crcvd_path_id\x18J \x01(\r\x12\x1a\n\x12path_table_version\x18K \x01(\r\x12\xaf\x01\n\x19local_peers_advertised_to\x18L \x03(\x0b\x32\x8b\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_\x12\xac\x01\n\x16pe_peers_advertised_to\x18M \x03(\x0b\x32\x8b\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_\x12\xad\x01\n\x17\x62\x65st_path_orr_bitfields\x18N \x03(\x0b\x32\x8b\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_\x12\xac\x01\n\x16\x61\x64\x64_path_orr_bitfields\x18O \x03(\x0b\x32\x8b\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_\x12\x17\n\x0f\x61\x64vertisedto_pe\x18P \x01(\x08\x12\x12\n\nrib_failed\x18Q \x01(\x08\x12\"\n\x1asn_rpki_origin_as_validity\x18R \x01(\r\x12$\n\x1cshow_rpki_origin_as_validity\x18S \x01(\x08\x12\x1e\n\x16ibgp_signaled_validity\x18T \x01(\x08\x12*\n\"rpki_origin_as_validation_disabled\x18U \x01(\x08\x12\x17\n\x0f\x61\x63\x63\x65pt_own_path\x18V \x01(\x08\x12(\n accept_own_self_originated_p_ath\x18W \x01(\x08\x12\x13\n\x0b\x61igp_metric\x18X \x01(\x04\x12\x15\n\rmvpn_sfs_path\x18Y \x01(\x08\x12\x1a\n\x12\x66spec_invalid_path\x18Z \x01(\x08\x12\x19\n\x11has_mvpn_nbr_addr\x18[ \x01(\x08\x12\x9e\x01\n\rmvpn_nbr_addr\x18\\ \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x1d\n\x15has_mvpn_nexthop_addr\x18] \x01(\x08\x12\xa2\x01\n\x11mvpn_nexthop_addr\x18^ \x01(\x0b\x32\x86\x01.cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype\x12\x15\n\rhas_mvpn_pmsi\x18_ \x01(\x08\x12\x16\n\x0emvpn_pmsi_type\x18` \x01(\r\x12\x17\n\x0fmvpn_pmsi_flags\x18\x61 \x01(\r\x12\x17\n\x0fmvpn_pmsi_label\x18\x62 \x01(\r\x12\x17\n\x0fmvpn_pmsi_value\x18\x63 \x03(\r\x12\x18\n\x10has_mvpn_extcomm\x18\x64 \x01(\x08\x12\x1a\n\x12\x65xtended_community\x18\x65 \x03(\r\x12\x17\n\x0fmvpn_path_flags\x18\x66 \x01(\r\x12\x10\n\x08local_nh\x18g \x01(\x08\x12\x1c\n\x14rt_set_limit_enabled\x18h \x01(\x08\x12\x16\n\x0epath_rt_set_id\x18i \x01(\r\x12\x1f\n\x17path_rt_set_route_count\x18j \x01(\r\x12#\n\x1bis_path_af_install_eligible\x18k \x01(\x08\x12\x19\n\x11is_permanent_path\x18l \x01(\x08\x12\x19\n\x11graceful_shutdown\x18m \x01(\x08\x12!\n\x19labeled_unicast_safi_path\x18n \x01(\x08\x62\x06proto3')
)
_BGP_PATH_BAG_KEYS = _descriptor.Descriptor(
name='bgp_path_bag_KEYS',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='af_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.af_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rd', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.rd', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='network', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.network', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prefix_length', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.prefix_length', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS.neighbor_address', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=267,
serialized_end=404,
)
_BGP_PATH_BAG = _descriptor.Descriptor(
name='bgp_path_bag',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='no_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.no_path', index=0,
number=50, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='af_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.af_name', index=1,
number=51, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.neighbor_address', index=2,
number=52, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_instance_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.process_instance_id', index=3,
number=53, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_oor', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.label_oor', index=4,
number=54, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_o_or_version', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.label_o_or_version', index=5,
number=55, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_fail', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.label_fail', index=6,
number=56, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_information', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.path_information', index=7,
number=57, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attributes_after_policy_in', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.attributes_after_policy_in', index=8,
number=58, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='route_distinguisher', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.route_distinguisher', index=9,
number=59, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source_route_distinguisher', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.source_route_distinguisher', index=10,
number=60, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prefix_version', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.prefix_version', index=11,
number=61, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='vrf_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.vrf_name', index=12,
number=62, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source_vrf_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.source_vrf_name', index=13,
number=63, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='srcaf_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag.srcaf_name', index=14,
number=64, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=407,
serialized_end=1199,
)
_IPV4TUNNELADDRESSTYPE = _descriptor.Descriptor(
name='IPV4TunnelAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4TunnelAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4TunnelAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1201,
serialized_end=1239,
)
_IPV4MDTADDRESSTYPE = _descriptor.Descriptor(
name='IPV4MDTAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MDTAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MDTAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1241,
serialized_end=1276,
)
_RTCONSTRAINTADDRESSTYPE = _descriptor.Descriptor(
name='RTConstraintAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.RTConstraintAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.RTConstraintAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1278,
serialized_end=1318,
)
_IPV6ADDRESSTYPE = _descriptor.Descriptor(
name='IPV6AddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1320,
serialized_end=1352,
)
_MACADDRESSTYPE = _descriptor.Descriptor(
name='MACAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.MACAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.MACAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1354,
serialized_end=1385,
)
_BGP_ATTR_RNH_ADDR_T = _descriptor.Descriptor(
name='bgp_attr_rnh_addr_t',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='len', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t.len', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t.ipv4_address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t.ipv6_address', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mac_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t.mac_address', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1388,
serialized_end=1768,
)
_BGP_EVPN_GW_ADDR_T = _descriptor.Descriptor(
name='bgp_evpn_gw_addr_t',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='len', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t.len', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t.ipv4_address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t.ipv6_address', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1771,
serialized_end=1989,
)
_BGP_L2VPN_ADDR_T = _descriptor.Descriptor(
name='bgp_l2vpn_addr_t',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_addr_t',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='l2vpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_addr_t.l2vpn_address', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1991,
serialized_end=2032,
)
_BGP_L2VPN_EVPN_ESI_T = _descriptor.Descriptor(
name='bgp_l2vpn_evpn_esi_t',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_evpn_esi_t',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='esi', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_evpn_esi_t.esi', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2034,
serialized_end=2069,
)
_L2VPNEVPNADDRESSTYPE = _descriptor.Descriptor(
name='L2VPNEVPNAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.L2VPNEVPNAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.L2VPNEVPNAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2071,
serialized_end=2108,
)
_BGP_L2VPN_MSPW_ADDR_T = _descriptor.Descriptor(
name='bgp_l2vpn_mspw_addr_t',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_mspw_addr_t',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='l2vpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_mspw_addr_t.l2vpn_address', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2110,
serialized_end=2156,
)
_IPV6MVPNADDRESSTYPE = _descriptor.Descriptor(
name='IPV6MVPNAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6MVPNAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6MVPNAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2158,
serialized_end=2194,
)
_IPV4MVPNADDRESSTYPE = _descriptor.Descriptor(
name='IPV4MVPNAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MVPNAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MVPNAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2196,
serialized_end=2232,
)
_LS_LSADDRESSTYPE = _descriptor.Descriptor(
name='LS_LSAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.LS_LSAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.LS_LSAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2234,
serialized_end=2267,
)
_IPV4FLOWSPECADDRESSTYPE = _descriptor.Descriptor(
name='IPv4FlowspecAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv4FlowspecAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv4FlowspecAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2269,
serialized_end=2309,
)
_IPV6FLOWSPECADDRESSTYPE = _descriptor.Descriptor(
name='IPv6FlowspecAddressType',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv6FlowspecAddressType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv6FlowspecAddressType.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2311,
serialized_end=2351,
)
_BGP_ADDRTYPE = _descriptor.Descriptor(
name='bgp_addrtype',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='afi', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.afi', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_mcast_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_mcast_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_label_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_label_address', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_tunnel_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_tunnel_address', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_mdt_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_mdt_address', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_vpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_vpn_address', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_vpna_mcastddress', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_vpna_mcastddress', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_address', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_mcast_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_mcast_address', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_label_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_label_address', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_vpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_vpn_address', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_vpn_mcast_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_vpn_mcast_address', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_vpnvpls_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.l2_vpnvpls_address', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rt_constraint_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.rt_constraint_address', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_mvpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_mvpn_address', index=15,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_mvpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_mvpn_address', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_vpn_evpn_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.l2_vpn_evpn_address', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ls_ls_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ls_ls_address', index=18,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_vpn_mspw_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.l2_vpn_mspw_address', index=19,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_flowspec_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_flowspec_address', index=20,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_flowspec_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_flowspec_address', index=21,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv4_vpn_flowspec_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv4_vpn_flowspec_address', index=22,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ipv6_vpn_flowspec_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype.ipv6_vpn_flowspec_address', index=23,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2354,
serialized_end=5641,
)
_BGP_PREFIXTYPE = _descriptor.Descriptor(
name='bgp_prefixtype',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_prefixtype',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prefix', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_prefixtype.prefix', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prefix_length', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_prefixtype.prefix_length', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5644,
serialized_end=5837,
)
_BGP_TE_TUNNEL = _descriptor.Descriptor(
name='bgp_te_tunnel',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tunnel_name', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.tunnel_name', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_te_tunnel', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.has_te_tunnel', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_tunnel_up', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.is_tunnel_up', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_tunnel_info_stale', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.is_tunnel_info_stale', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_tunnel_registered', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.is_tunnel_registered', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tunnel_v6_required', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.tunnel_v6_required', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tunnel_v6_enabled', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.tunnel_v6_enabled', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.binding_label', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tunnel_if_handle', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.tunnel_if_handle', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_tunnel_update', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel.last_tunnel_update', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5840,
serialized_end=6113,
)
_BGP_PEDISTLBL_ENTRY_ = _descriptor.Descriptor(
name='bgp_pedistlbl_entry_',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_pedistlbl_entry_',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pe_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_pedistlbl_entry_.pe_address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_pedistlbl_entry_.label', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6115,
serialized_end=6172,
)
_BGP_COMMON_ATTR_ENTRY_ = _descriptor.Descriptor(
name='bgp_common_attr_entry_',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='is_metric_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_metric_present', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_local_pref_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_local_pref_present', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_atomic_aggregate_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_atomic_aggregate_present', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_aggregator_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_aggregator_present', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_origin_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_origin_present', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_as_path_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_as_path_present', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_community_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_community_present', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_extended_community_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_extended_community_present', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_ssa_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_ssa_present', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_connector_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_connector_present', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_pmsi_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_pmsi_present', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_pppmp_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_pppmp_present', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_aigp_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_aigp_present', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_pe_distinguisher_label_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_pe_distinguisher_label_present', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_ls_attribute_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_ls_attribute_present', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_label_index_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.is_label_index_present', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_as', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.neighbor_as', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aggregator_as', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.aggregator_as', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aggregator_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.aggregator_address', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metric', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.metric', index=19,
number=20, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_preference', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.local_preference', index=20,
number=21, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='origin', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.origin', index=21,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='as_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.as_path', index=22,
number=23, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='community', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.community', index=23,
number=24, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extended_community_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.extended_community_flags', index=24,
number=25, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extended_community', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.extended_community', index=25,
number=26, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='unknown_attributes', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.unknown_attributes', index=26,
number=27, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cluster_list', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.cluster_list', index=27,
number=28, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='originator', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.originator', index=28,
number=29, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_t_pv3_session_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.l2_t_pv3_session_id', index=29,
number=30, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_t_pv3_cookie', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.l2_t_pv3_cookie', index=30,
number=31, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='connector_type', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.connector_type', index=31,
number=32, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='connector_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.connector_value', index=32,
number=33, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aigp_metric_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.aigp_metric_value', index=33,
number=34, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pmsi_type', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.pmsi_type', index=34,
number=35, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pmsi_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.pmsi_flags', index=35,
number=36, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pmsi_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.pmsi_label', index=36,
number=37, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pmsi_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.pmsi_value', index=37,
number=38, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ppm_pvalue', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.ppm_pvalue', index=38,
number=39, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pe_distinguisher_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.pe_distinguisher_label', index=39,
number=40, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ls_attr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.ls_attr', index=40,
number=41, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_index_attr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_.label_index_attr', index=41,
number=42, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6175,
serialized_end=7435,
)
_BGP_ATTR_ = _descriptor.Descriptor(
name='bgp_attr_',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='is_as_path2_byte', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.is_as_path2_byte', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_application_gateway_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.is_application_gateway_present', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_attr_set_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.is_attr_set_present', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='set_aigp_inbound_igp', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.set_aigp_inbound_igp', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='set_aigp_inbound_metric', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.set_aigp_inbound_metric', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_rnh_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.is_rnh_present', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_ribrnh_present', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.is_ribrnh_present', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_key_number', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_key_number', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_config', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_config', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_max_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_max_id', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_node', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_node', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_current', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_current', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_keys', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_keys', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attribute_reuse_id_recover_sec', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attribute_reuse_id_recover_sec', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='vpn_distinguisher', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.vpn_distinguisher', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='common_attributes', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.common_attributes', index=15,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attr_set', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.attr_set', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rnh_type', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.rnh_type', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rnh_len', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.rnh_len', index=18,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rnh_addr_len', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.rnh_addr_len', index=19,
number=20, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rnh_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.rnh_addr', index=20,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnh_table', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnh_table', index=21,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnh_mac', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnh_mac', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnh_ip', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnh_ip', index=23,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnhip_table', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnhip_table', index=24,
number=25, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnh_vni', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnh_vni', index=25,
number=26, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ribrnh_encap', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_.ribrnh_encap', index=26,
number=27, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7438,
serialized_end=8753,
)
_BGP_ADVINFO_TYPE_ = _descriptor.Descriptor(
name='bgp_advinfo_type_',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='is_neighbor', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_.is_neighbor', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_.neighbor_address', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_group_number', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_.update_group_number', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8756,
serialized_end=8989,
)
_BGP_PATH_ = _descriptor.Descriptor(
name='bgp_path_',
full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bgp_prefix', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.bgp_prefix', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_address', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.neighbor_address', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='route_type', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.route_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_valid', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_valid', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_damped', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_damped', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_history_held', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_history_held', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_internal_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_internal_path', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_best_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_best_path', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_as_best', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_as_best', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_spkr_as_best', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_spkr_as_best', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_partial_best', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_partial_best', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_aggregation_suppressed', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_aggregation_suppressed', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_import_dampened', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_import_dampened', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_import_suspect', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_import_suspect', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_not_advertised', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_not_advertised', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_not_advertised_to_ebg_ps', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_not_advertised_to_ebg_ps', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_advertised_local_as_only', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_advertised_local_as_only', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_from_route_reflector', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_from_route_reflector', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_received_only', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_received_only', index=18,
number=19, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_received_path_not_modified', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_received_path_not_modified', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_locally_sourced', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_locally_sourced', index=20,
number=21, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_local_aggregate', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_local_aggregate', index=21,
number=22, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_from_network_command', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_from_network_command', index=22,
number=23, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_from_redistribute_command', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_from_redistribute_command', index=23,
number=24, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_imported', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_imported', index=24,
number=25, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_reoriginated', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_reoriginated', index=25,
number=26, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_reoriginated_stitching', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_reoriginated_stitching', index=26,
number=27, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_vpn_only', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_vpn_only', index=27,
number=28, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_from_confederation_peer', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_from_confederation_peer', index=28,
number=29, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_synced_with_igp', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_synced_with_igp', index=29,
number=30, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_multipath', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_multipath', index=30,
number=31, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_imp_candidate', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_imp_candidate', index=31,
number=32, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_stale', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_stale', index=32,
number=33, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_long_lived_stale', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_long_lived_stale', index=33,
number=34, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_backup', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_backup', index=34,
number=35, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_backup_protect_multipath', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_backup_protect_multipath', index=35,
number=36, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_best_external', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_best_external', index=36,
number=37, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_additional_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_additional_path', index=37,
number=38, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_nexthop_discarded', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_nexthop_discarded', index=38,
number=39, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='next_hop', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.next_hop', index=39,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nhte_tunnel', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.nhte_tunnel', index=40,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_rcvd_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_rcvd_label', index=41,
number=42, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rcvd_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.rcvd_label', index=42,
number=43, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_local_net_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_local_net_label', index=43,
number=44, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.local_label', index=44,
number=45, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='igp_metric', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.igp_metric', index=45,
number=46, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_weight', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_weight', index=46,
number=47, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neighbor_router_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.neighbor_router_id', index=47,
number=48, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_mdt_group_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_mdt_group_addr', index=48,
number=49, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mdt_group_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mdt_group_addr', index=49,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_vpn_size', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.l2_vpn_size', index=50,
number=51, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_esi', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_esi', index=51,
number=52, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2_vpn_evpn_esi', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.l2_vpn_evpn_esi', index=52,
number=53, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_gw_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_gw_addr', index=53,
number=54, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gw_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.gw_addr', index=54,
number=55, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_second_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_second_label', index=55,
number=56, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='second_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.second_label', index=56,
number=57, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='l2vpn_circuit_status_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.l2vpn_circuit_status_value', index=57,
number=58, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_flap_count', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_flap_count', index=58,
number=59, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='seconds_since_first_flap', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.seconds_since_first_flap', index=59,
number=60, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time_to_unsuppress', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.time_to_unsuppress', index=60,
number=61, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dampen_penalty', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.dampen_penalty', index=61,
number=62, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='halflife_time', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.halflife_time', index=62,
number=63, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='suppress_penalty', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.suppress_penalty', index=63,
number=64, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reuse_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.reuse_value', index=64,
number=65, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='maximum_suppress_time', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.maximum_suppress_time', index=65,
number=66, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='best_path_comp_stage', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.best_path_comp_stage', index=66,
number=67, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='best_path_comp_winner', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.best_path_comp_winner', index=67,
number=68, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='best_path_id_comp_winner', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.best_path_id_comp_winner', index=68,
number=69, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_flags', index=69,
number=70, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_import_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_import_flags', index=70,
number=71, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='best_path_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.best_path_id', index=71,
number=72, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_path_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.local_path_id', index=72,
number=73, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rcvd_path_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.rcvd_path_id', index=73,
number=74, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_table_version', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_table_version', index=74,
number=75, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_peers_advertised_to', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.local_peers_advertised_to', index=75,
number=76, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pe_peers_advertised_to', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.pe_peers_advertised_to', index=76,
number=77, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='best_path_orr_bitfields', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.best_path_orr_bitfields', index=77,
number=78, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='add_path_orr_bitfields', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.add_path_orr_bitfields', index=78,
number=79, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='advertisedto_pe', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.advertisedto_pe', index=79,
number=80, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rib_failed', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.rib_failed', index=80,
number=81, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sn_rpki_origin_as_validity', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.sn_rpki_origin_as_validity', index=81,
number=82, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='show_rpki_origin_as_validity', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.show_rpki_origin_as_validity', index=82,
number=83, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ibgp_signaled_validity', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.ibgp_signaled_validity', index=83,
number=84, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rpki_origin_as_validation_disabled', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.rpki_origin_as_validation_disabled', index=84,
number=85, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accept_own_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.accept_own_path', index=85,
number=86, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accept_own_self_originated_p_ath', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.accept_own_self_originated_p_ath', index=86,
number=87, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aigp_metric', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.aigp_metric', index=87,
number=88, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_sfs_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_sfs_path', index=88,
number=89, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fspec_invalid_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.fspec_invalid_path', index=89,
number=90, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_mvpn_nbr_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_mvpn_nbr_addr', index=90,
number=91, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_nbr_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_nbr_addr', index=91,
number=92, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_mvpn_nexthop_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_mvpn_nexthop_addr', index=92,
number=93, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_nexthop_addr', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_nexthop_addr', index=93,
number=94, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_mvpn_pmsi', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_mvpn_pmsi', index=94,
number=95, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_pmsi_type', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_pmsi_type', index=95,
number=96, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_pmsi_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_pmsi_flags', index=96,
number=97, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_pmsi_label', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_pmsi_label', index=97,
number=98, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_pmsi_value', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_pmsi_value', index=98,
number=99, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_mvpn_extcomm', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.has_mvpn_extcomm', index=99,
number=100, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extended_community', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.extended_community', index=100,
number=101, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvpn_path_flags', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.mvpn_path_flags', index=101,
number=102, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_nh', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.local_nh', index=102,
number=103, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rt_set_limit_enabled', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.rt_set_limit_enabled', index=103,
number=104, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_rt_set_id', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_rt_set_id', index=104,
number=105, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='path_rt_set_route_count', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.path_rt_set_route_count', index=105,
number=106, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_path_af_install_eligible', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_path_af_install_eligible', index=106,
number=107, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_permanent_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.is_permanent_path', index=107,
number=108, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='graceful_shutdown', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.graceful_shutdown', index=108,
number=109, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='labeled_unicast_safi_path', full_name='cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_.labeled_unicast_safi_path', index=109,
number=110, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8992,
serialized_end=14064,
)
_BGP_PATH_BAG.fields_by_name['neighbor_address'].message_type = _BGP_ADDRTYPE
_BGP_PATH_BAG.fields_by_name['path_information'].message_type = _BGP_PATH_
_BGP_PATH_BAG.fields_by_name['attributes_after_policy_in'].message_type = _BGP_ATTR_
_BGP_ATTR_RNH_ADDR_T.fields_by_name['ipv6_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ATTR_RNH_ADDR_T.fields_by_name['mac_address'].message_type = _MACADDRESSTYPE
_BGP_EVPN_GW_ADDR_T.fields_by_name['ipv6_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv4_tunnel_address'].message_type = _IPV4TUNNELADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv4_mdt_address'].message_type = _IPV4MDTADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_mcast_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_label_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_vpn_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_vpn_mcast_address'].message_type = _IPV6ADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['l2_vpnvpls_address'].message_type = _BGP_L2VPN_ADDR_T
_BGP_ADDRTYPE.fields_by_name['rt_constraint_address'].message_type = _RTCONSTRAINTADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_mvpn_address'].message_type = _IPV6MVPNADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv4_mvpn_address'].message_type = _IPV4MVPNADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['l2_vpn_evpn_address'].message_type = _L2VPNEVPNADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ls_ls_address'].message_type = _LS_LSADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['l2_vpn_mspw_address'].message_type = _BGP_L2VPN_MSPW_ADDR_T
_BGP_ADDRTYPE.fields_by_name['ipv4_flowspec_address'].message_type = _IPV4FLOWSPECADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_flowspec_address'].message_type = _IPV6FLOWSPECADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv4_vpn_flowspec_address'].message_type = _IPV4FLOWSPECADDRESSTYPE
_BGP_ADDRTYPE.fields_by_name['ipv6_vpn_flowspec_address'].message_type = _IPV6FLOWSPECADDRESSTYPE
_BGP_PREFIXTYPE.fields_by_name['prefix'].message_type = _BGP_ADDRTYPE
_BGP_COMMON_ATTR_ENTRY_.fields_by_name['pe_distinguisher_label'].message_type = _BGP_PEDISTLBL_ENTRY_
_BGP_ATTR_.fields_by_name['common_attributes'].message_type = _BGP_COMMON_ATTR_ENTRY_
_BGP_ATTR_.fields_by_name['attr_set'].message_type = _BGP_COMMON_ATTR_ENTRY_
_BGP_ATTR_.fields_by_name['rnh_addr'].message_type = _BGP_ATTR_RNH_ADDR_T
_BGP_ATTR_.fields_by_name['ribrnh_ip'].message_type = _BGP_ADDRTYPE
_BGP_ADVINFO_TYPE_.fields_by_name['neighbor_address'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['bgp_prefix'].message_type = _BGP_PREFIXTYPE
_BGP_PATH_.fields_by_name['neighbor_address'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['next_hop'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['nhte_tunnel'].message_type = _BGP_TE_TUNNEL
_BGP_PATH_.fields_by_name['mdt_group_addr'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['l2_vpn_evpn_esi'].message_type = _BGP_L2VPN_EVPN_ESI_T
_BGP_PATH_.fields_by_name['gw_addr'].message_type = _BGP_EVPN_GW_ADDR_T
_BGP_PATH_.fields_by_name['best_path_comp_winner'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['local_peers_advertised_to'].message_type = _BGP_ADVINFO_TYPE_
_BGP_PATH_.fields_by_name['pe_peers_advertised_to'].message_type = _BGP_ADVINFO_TYPE_
_BGP_PATH_.fields_by_name['best_path_orr_bitfields'].message_type = _BGP_ADVINFO_TYPE_
_BGP_PATH_.fields_by_name['add_path_orr_bitfields'].message_type = _BGP_ADVINFO_TYPE_
_BGP_PATH_.fields_by_name['mvpn_nbr_addr'].message_type = _BGP_ADDRTYPE
_BGP_PATH_.fields_by_name['mvpn_nexthop_addr'].message_type = _BGP_ADDRTYPE
DESCRIPTOR.message_types_by_name['bgp_path_bag_KEYS'] = _BGP_PATH_BAG_KEYS
DESCRIPTOR.message_types_by_name['bgp_path_bag'] = _BGP_PATH_BAG
DESCRIPTOR.message_types_by_name['IPV4TunnelAddressType'] = _IPV4TUNNELADDRESSTYPE
DESCRIPTOR.message_types_by_name['IPV4MDTAddressType'] = _IPV4MDTADDRESSTYPE
DESCRIPTOR.message_types_by_name['RTConstraintAddressType'] = _RTCONSTRAINTADDRESSTYPE
DESCRIPTOR.message_types_by_name['IPV6AddressType'] = _IPV6ADDRESSTYPE
DESCRIPTOR.message_types_by_name['MACAddressType'] = _MACADDRESSTYPE
DESCRIPTOR.message_types_by_name['bgp_attr_rnh_addr_t'] = _BGP_ATTR_RNH_ADDR_T
DESCRIPTOR.message_types_by_name['bgp_evpn_gw_addr_t'] = _BGP_EVPN_GW_ADDR_T
DESCRIPTOR.message_types_by_name['bgp_l2vpn_addr_t'] = _BGP_L2VPN_ADDR_T
DESCRIPTOR.message_types_by_name['bgp_l2vpn_evpn_esi_t'] = _BGP_L2VPN_EVPN_ESI_T
DESCRIPTOR.message_types_by_name['L2VPNEVPNAddressType'] = _L2VPNEVPNADDRESSTYPE
DESCRIPTOR.message_types_by_name['bgp_l2vpn_mspw_addr_t'] = _BGP_L2VPN_MSPW_ADDR_T
DESCRIPTOR.message_types_by_name['IPV6MVPNAddressType'] = _IPV6MVPNADDRESSTYPE
DESCRIPTOR.message_types_by_name['IPV4MVPNAddressType'] = _IPV4MVPNADDRESSTYPE
DESCRIPTOR.message_types_by_name['LS_LSAddressType'] = _LS_LSADDRESSTYPE
DESCRIPTOR.message_types_by_name['IPv4FlowspecAddressType'] = _IPV4FLOWSPECADDRESSTYPE
DESCRIPTOR.message_types_by_name['IPv6FlowspecAddressType'] = _IPV6FLOWSPECADDRESSTYPE
DESCRIPTOR.message_types_by_name['bgp_addrtype'] = _BGP_ADDRTYPE
DESCRIPTOR.message_types_by_name['bgp_prefixtype'] = _BGP_PREFIXTYPE
DESCRIPTOR.message_types_by_name['bgp_te_tunnel'] = _BGP_TE_TUNNEL
DESCRIPTOR.message_types_by_name['bgp_pedistlbl_entry_'] = _BGP_PEDISTLBL_ENTRY_
DESCRIPTOR.message_types_by_name['bgp_common_attr_entry_'] = _BGP_COMMON_ATTR_ENTRY_
DESCRIPTOR.message_types_by_name['bgp_attr_'] = _BGP_ATTR_
DESCRIPTOR.message_types_by_name['bgp_advinfo_type_'] = _BGP_ADVINFO_TYPE_
DESCRIPTOR.message_types_by_name['bgp_path_'] = _BGP_PATH_
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
bgp_path_bag_KEYS = _reflection.GeneratedProtocolMessageType('bgp_path_bag_KEYS', (_message.Message,), dict(
DESCRIPTOR = _BGP_PATH_BAG_KEYS,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_KEYS)
))
_sym_db.RegisterMessage(bgp_path_bag_KEYS)
bgp_path_bag = _reflection.GeneratedProtocolMessageType('bgp_path_bag', (_message.Message,), dict(
DESCRIPTOR = _BGP_PATH_BAG,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag)
))
_sym_db.RegisterMessage(bgp_path_bag)
IPV4TunnelAddressType = _reflection.GeneratedProtocolMessageType('IPV4TunnelAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV4TUNNELADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4TunnelAddressType)
))
_sym_db.RegisterMessage(IPV4TunnelAddressType)
IPV4MDTAddressType = _reflection.GeneratedProtocolMessageType('IPV4MDTAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV4MDTADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MDTAddressType)
))
_sym_db.RegisterMessage(IPV4MDTAddressType)
RTConstraintAddressType = _reflection.GeneratedProtocolMessageType('RTConstraintAddressType', (_message.Message,), dict(
DESCRIPTOR = _RTCONSTRAINTADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.RTConstraintAddressType)
))
_sym_db.RegisterMessage(RTConstraintAddressType)
IPV6AddressType = _reflection.GeneratedProtocolMessageType('IPV6AddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV6ADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6AddressType)
))
_sym_db.RegisterMessage(IPV6AddressType)
MACAddressType = _reflection.GeneratedProtocolMessageType('MACAddressType', (_message.Message,), dict(
DESCRIPTOR = _MACADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.MACAddressType)
))
_sym_db.RegisterMessage(MACAddressType)
bgp_attr_rnh_addr_t = _reflection.GeneratedProtocolMessageType('bgp_attr_rnh_addr_t', (_message.Message,), dict(
DESCRIPTOR = _BGP_ATTR_RNH_ADDR_T,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_rnh_addr_t)
))
_sym_db.RegisterMessage(bgp_attr_rnh_addr_t)
bgp_evpn_gw_addr_t = _reflection.GeneratedProtocolMessageType('bgp_evpn_gw_addr_t', (_message.Message,), dict(
DESCRIPTOR = _BGP_EVPN_GW_ADDR_T,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_evpn_gw_addr_t)
))
_sym_db.RegisterMessage(bgp_evpn_gw_addr_t)
bgp_l2vpn_addr_t = _reflection.GeneratedProtocolMessageType('bgp_l2vpn_addr_t', (_message.Message,), dict(
DESCRIPTOR = _BGP_L2VPN_ADDR_T,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_addr_t)
))
_sym_db.RegisterMessage(bgp_l2vpn_addr_t)
bgp_l2vpn_evpn_esi_t = _reflection.GeneratedProtocolMessageType('bgp_l2vpn_evpn_esi_t', (_message.Message,), dict(
DESCRIPTOR = _BGP_L2VPN_EVPN_ESI_T,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_evpn_esi_t)
))
_sym_db.RegisterMessage(bgp_l2vpn_evpn_esi_t)
L2VPNEVPNAddressType = _reflection.GeneratedProtocolMessageType('L2VPNEVPNAddressType', (_message.Message,), dict(
DESCRIPTOR = _L2VPNEVPNADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.L2VPNEVPNAddressType)
))
_sym_db.RegisterMessage(L2VPNEVPNAddressType)
bgp_l2vpn_mspw_addr_t = _reflection.GeneratedProtocolMessageType('bgp_l2vpn_mspw_addr_t', (_message.Message,), dict(
DESCRIPTOR = _BGP_L2VPN_MSPW_ADDR_T,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_l2vpn_mspw_addr_t)
))
_sym_db.RegisterMessage(bgp_l2vpn_mspw_addr_t)
IPV6MVPNAddressType = _reflection.GeneratedProtocolMessageType('IPV6MVPNAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV6MVPNADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV6MVPNAddressType)
))
_sym_db.RegisterMessage(IPV6MVPNAddressType)
IPV4MVPNAddressType = _reflection.GeneratedProtocolMessageType('IPV4MVPNAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV4MVPNADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPV4MVPNAddressType)
))
_sym_db.RegisterMessage(IPV4MVPNAddressType)
LS_LSAddressType = _reflection.GeneratedProtocolMessageType('LS_LSAddressType', (_message.Message,), dict(
DESCRIPTOR = _LS_LSADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.LS_LSAddressType)
))
_sym_db.RegisterMessage(LS_LSAddressType)
IPv4FlowspecAddressType = _reflection.GeneratedProtocolMessageType('IPv4FlowspecAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV4FLOWSPECADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv4FlowspecAddressType)
))
_sym_db.RegisterMessage(IPv4FlowspecAddressType)
IPv6FlowspecAddressType = _reflection.GeneratedProtocolMessageType('IPv6FlowspecAddressType', (_message.Message,), dict(
DESCRIPTOR = _IPV6FLOWSPECADDRESSTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.IPv6FlowspecAddressType)
))
_sym_db.RegisterMessage(IPv6FlowspecAddressType)
bgp_addrtype = _reflection.GeneratedProtocolMessageType('bgp_addrtype', (_message.Message,), dict(
DESCRIPTOR = _BGP_ADDRTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_addrtype)
))
_sym_db.RegisterMessage(bgp_addrtype)
bgp_prefixtype = _reflection.GeneratedProtocolMessageType('bgp_prefixtype', (_message.Message,), dict(
DESCRIPTOR = _BGP_PREFIXTYPE,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_prefixtype)
))
_sym_db.RegisterMessage(bgp_prefixtype)
bgp_te_tunnel = _reflection.GeneratedProtocolMessageType('bgp_te_tunnel', (_message.Message,), dict(
DESCRIPTOR = _BGP_TE_TUNNEL,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_te_tunnel)
))
_sym_db.RegisterMessage(bgp_te_tunnel)
bgp_pedistlbl_entry_ = _reflection.GeneratedProtocolMessageType('bgp_pedistlbl_entry_', (_message.Message,), dict(
DESCRIPTOR = _BGP_PEDISTLBL_ENTRY_,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_pedistlbl_entry_)
))
_sym_db.RegisterMessage(bgp_pedistlbl_entry_)
bgp_common_attr_entry_ = _reflection.GeneratedProtocolMessageType('bgp_common_attr_entry_', (_message.Message,), dict(
DESCRIPTOR = _BGP_COMMON_ATTR_ENTRY_,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_common_attr_entry_)
))
_sym_db.RegisterMessage(bgp_common_attr_entry_)
bgp_attr_ = _reflection.GeneratedProtocolMessageType('bgp_attr_', (_message.Message,), dict(
DESCRIPTOR = _BGP_ATTR_,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_attr_)
))
_sym_db.RegisterMessage(bgp_attr_)
bgp_advinfo_type_ = _reflection.GeneratedProtocolMessageType('bgp_advinfo_type_', (_message.Message,), dict(
DESCRIPTOR = _BGP_ADVINFO_TYPE_,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_advinfo_type_)
))
_sym_db.RegisterMessage(bgp_advinfo_type_)
bgp_path_ = _reflection.GeneratedProtocolMessageType('bgp_path_', (_message.Message,), dict(
DESCRIPTOR = _BGP_PATH_,
__module__ = 'cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_bag_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_ipv4_bgp_oper.bgp.instances.instance.instance_standby.default_vrf.afs.af.advertised_path_xr.advertised_path.bgp_path_)
))
_sym_db.RegisterMessage(bgp_path_)
# @@protoc_insertion_point(module_scope)
| [
"mspiez@gmail.com"
] | mspiez@gmail.com |
f421a6af40ae5171cceff2d4962cb7c99889310d | fe87192240c3d5ffe7deb5c9f2b7f02f347a2c00 | /peptide-permable/analyze_result.py | 69ef531b3d8aa7f477fdaf44fe9133b385513008 | [] | no_license | leexa90/dl_dev_course | ccfae0bbef4790b0b75fc9da0679f23c1da3bcf5 | 10a9e826cd7e752ce607deadc63826b313de39d2 | refs/heads/master | 2022-08-17T05:07:35.280305 | 2017-12-07T09:04:14 | 2017-12-07T09:04:14 | 105,847,852 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,368 | py | import pandas as pd
import os
files = sorted([x for x in os.listdir('results') if ('.csv' in x and 'results5' in x)])
dict_files = {}
data = pd.read_csv('results/'+files[0])
data['diff'] = 0
dict_files[0] = files[0]
counter =1
for i in files[1:]:
print i
counter += 1
dict_files[counter] = i
temp = pd.read_csv('results/'+i)
temp['diff'] = counter
data = pd.concat([data,temp])
import numpy as np
import matplotlib.pyplot as plt
size= np.log10(data.prob)
plt.hist(size,bins=100)
dictt_inv = {0: 'A', 1: 'C', 2: 'E', 3: 'D', 4: 'G', 5: 'F', 6: 'I', 7: 'H', 8: 'K', 9: 'M',
10: 'L', 11: 'N', 12: 'Q', 13: 'P', 14: 'S', 15: 'R', 16: 'T', 17: 'W', 18: 'V', 19: 'Y'}
def string(arr):
result = ''
for i in arr:
result += dictt_inv[i]
return result
p53_seq='ETFSDLWKLLPEN'
p53_seq_vec = np.array([2., 16., 5., 14., 3., 10., 17., 8., 10., 10., 13., 2., 11.])
data['var'] = map(np.std, np.array(data[['fold' +str(x) for x in range(0,60)]]))
data['var'] = data['var']/(59**.5)
best = data.sort_values('prob')[list(data.keys()[0:13])+['diff','prob']].reset_index(drop=True)
def get_diff(x):
return np.argsort(p53_seq_vec != x[[str(y) for y in range(13)]].values)[-3:]
for i in range(1,10):
print p53_seq
#print best.iloc[-i][range(0,13)].values, best.iloc[-i].prob,'\n'
#print np.argsort(p53_seq_vec != best.iloc[-i][range(0,13)].values)[-3:],'\n'
print string(best.iloc[-i][range(0,13)].values), best.iloc[-i].prob,'\n'
#best['prob'] = np.log10(best['prob']+0.01)
for diff in pd.unique(data[data['prob']-data['var'] >= 0.60]['diff']):
above_30 = data[data['prob']-data['var'] >= 0.60]
above_30 = above_30[above_30['diff']== diff]
score = np.zeros((13,20))
float_formatter = lambda x: "%.3f" % x
np.set_printoptions(formatter={'float_kind':float_formatter})
for aa in range(0,20):
for pos in range(0,13):
score[pos,aa] = np.sum(above_30[above_30[str(pos)] == aa].prob)/np.sum(above_30.prob)
import matplotlib as mpl
from matplotlib.text import TextPath
from matplotlib.patches import PathPatch
from matplotlib.font_manager import FontProperties
fp = FontProperties(family="monospace", weight="bold")
globscale = 1.35
LETTERS = {
"A" : TextPath((-0.35, 0), "A", size=1, prop=fp),
"C" : TextPath((-0.35, 0), "C", size=1, prop=fp),
"E" : TextPath((-0.35, 0), "E", size=1, prop=fp),
"D" : TextPath((-0.35, 0), "D", size=1, prop=fp) ,
"G" : TextPath((-0.35, 0), "G", size=1, prop=fp),
"F" : TextPath((-0.35, 0), "F", size=1, prop=fp),
"I" : TextPath((-0.35, 0), "I", size=1, prop=fp),
"H" : TextPath((-0.35, 0), "H", size=1, prop=fp) ,
"K" : TextPath((-0.35, 0), "K", size=1, prop=fp),
"M" : TextPath((-0.35, 0), "M", size=1, prop=fp),
"L" : TextPath((-0.35, 0.003), "L", size=1, prop=fp),
"N" : TextPath((-0.35, 0), "N", size=1, prop=fp) ,
"Q" : TextPath((-0.35, 0.01), "Q", size=1, prop=fp),
"P" : TextPath((-0.35, 0), "P", size=1, prop=fp),
"S" : TextPath((-0.35, 0.01), "S", size=1, prop=fp),
"R" : TextPath((-0.35, 0), "R", size=1, prop=fp),
"T" : TextPath((-0.35, 0), "T", size=1, prop=fp),
"W" : TextPath((-0.35, 0), "W", size=1, prop=fp),
"V" : TextPath((-0.35, 0), "V", size=1, prop=fp),
"Y" : TextPath((-0.35, 0), "Y", size=1, prop=fp) }
COLOR_SCHEME = {'A': 'grey', 'C': 'lightBlue', 'E': 'red', 'D': 'red',
'G': 'grey', 'F': 'green', 'I': 'grey', 'H': 'blue', 'K': 'blue',
'M': 'grey', 'L': 'grey', 'N': 'lightBlue', 'Q': 'lightBlue', 'P': 'orange',
'S': 'lightBlue', 'R': 'blue', 'T': 'lightBlue', 'W': 'green', 'V': 'grey',
'Y': 'green'}
def letterAt(letter, x, y, yscale=1, ax=None):
text = LETTERS[letter]
t = mpl.transforms.Affine2D().scale(1*globscale, yscale*globscale) + \
mpl.transforms.Affine2D().translate(x,y) + ax.transData
p = PathPatch(text, lw=0, fc=COLOR_SCHEME[letter], transform=t)
if ax != None:
ax.add_artist(p)
return p
def plot(thres=0.05,name='temp'):
fig, ax = plt.subplots(figsize=(10,8))
for i in range(0,13):
y = 0
for aa in np.argsort(score[i,:]):#for aa in range(0,20)[::-1]:
temp_score = score[i,aa]
if temp_score >= thres:
letter = dictt_inv[aa]
a=letterAt(letter,i+1,y,temp_score,ax)
y += temp_score
plt.xlim((0,14))
plt.ylim((-0.1,1))
plt.title(dict_files[diff]+',num samples:'+str(len(above_30)))
plt.xlabel('peptide position')
plt.ylabel('probabilities')
plt.tight_layout()
plt.xticks(range(1,14),['E1', 'T2', 'F3', 'S4', 'D5', 'L6', 'W7', 'K8', 'L9', 'L10', 'P11', 'E12', 'N13'])
for i in range(0,13):
a=letterAt(p53_seq[i],i+1,-0.1,0.09,ax)
plt.plot((0,14),(0,0),color='black',linewidth='5')
plt.savefig(name+'.png',dpi=300)
#plt.show()
plt.close()
for i in (5,):
plot(i*1.0/100,'Fig_60percent%s_thres%s_var'%(diff,i))
| [
"lee.x.a90@gmail.com"
] | lee.x.a90@gmail.com |
e209526ee9554ff6548df32367dd10813a5f4f0c | f90987f8084f984fbafef11322eeeeeeb9277b3c | /dfpipe/pipe.py | 87b48a3794e6ec8123d6c581fb4f5e009192ea69 | [
"Apache-2.0"
] | permissive | bw4sz/gae-dataflow | ad04a11bc04adfbeef2b5bf6248d8dce41e5d017 | b26b296f8cae30c67d5e5a85bb45ad8e56ede14f | refs/heads/master | 2021-01-01T16:57:45.743717 | 2017-05-04T16:54:42 | 2017-05-04T16:54:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,500 | py | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Define and launch a Dataflow pipeline to analyze recent tweets stored
in the Datastore.
"""
from __future__ import absolute_import
import datetime
import json
import logging
import re
import apache_beam as beam
from apache_beam import combiners
from apache_beam.io.gcp.bigquery import parse_table_schema_from_json
from apache_beam.io.gcp.datastore.v1.datastoreio import ReadFromDatastore
from apache_beam.pvalue import AsDict
from apache_beam.pvalue import AsSingleton
from google.cloud.proto.datastore.v1 import query_pb2
from googledatastore import helper as datastore_helper, PropertyFilter
logging.basicConfig(level=logging.INFO)
class WordExtractingDoFn(beam.DoFn):
"""Parse each tweet text into words, removing some 'stopwords'."""
def process(self, element):
content_value = element.properties.get('text', None)
text_line = ''
if content_value:
text_line = content_value.string_value
words = set([x.lower() for x in re.findall(r'[A-Za-z\']+', text_line)])
stopwords = [
'a', 'amp', 'an', 'and', 'are', 'as', 'at', 'be', 'been',
'but', 'by', 'co', 'do', 'for', 'has', 'have', 'he', 'her', 'his',
'https', 'if', 'in', 'is', 'it', 'me', 'my', 'no', 'not', 'of', 'on',
'or', 'rt', 's', 'she', 'so', 't', 'than', 'that', 'the', 'they',
'this', 'to', 'us', 'was', 'we', 'what', 'with', 'you', 'your'
'who', 'when', 'via']
# temp
stopwords += ['lead', 'scoopit']
stopwords += list(map(chr, range(97, 123)))
return list(words - set(stopwords))
class CoOccurExtractingDoFn(beam.DoFn):
"""Parse each tweet text into words, and after removing some 'stopwords',
emit the bigrams.
"""
def process(self, element):
content_value = element.properties.get('text', None)
text_line = ''
if content_value:
text_line = content_value.string_value
words = set([x.lower() for x in re.findall(r'[A-Za-z\']+', text_line)])
stopwords = [
'a', 'amp', 'an', 'and', 'are', 'as', 'at', 'be', 'been',
'but', 'by', 'co', 'do', 'for', 'has', 'have', 'he', 'her', 'his',
'https', 'if', 'in', 'is', 'it', 'me', 'my', 'no', 'not', 'of', 'on',
'or', 'rt', 's', 'she', 'so', 't', 'than', 'that', 'the', 'they',
'this', 'to', 'us', 'was', 'we', 'what', 'with', 'you', 'your',
'who', 'when', 'via']
# temp
stopwords += ['lead', 'scoopit']
stopwords += list(map(chr, range(97, 123)))
pruned_words = list(words - set(stopwords))
pruned_words.sort()
import itertools
return list(itertools.combinations(pruned_words, 2))
class URLExtractingDoFn(beam.DoFn):
"""Extract the urls from each tweet."""
def process(self, element):
url_content = element.properties.get('urls', None)
if url_content:
urls = url_content.array_value.values
links = []
for u in urls:
links.append(u.string_value.lower())
return links
def make_query(kind):
"""Creates a Cloud Datastore query to retrieve all entities with a
'created_at' date > N days ago.
"""
days = 4
now = datetime.datetime.now()
earlier = now - datetime.timedelta(days=days)
query = query_pb2.Query()
query.kind.add().name = kind
datastore_helper.set_property_filter(query.filter, 'created_at',
PropertyFilter.GREATER_THAN,
earlier)
return query
def process_datastore_tweets(project, dataset, pipeline_options):
"""Creates a pipeline that reads tweets from Cloud Datastore from the last
N days. The pipeline finds the top most-used words, the top most-tweeted
URLs, ranks word co-occurrences by an 'interestingness' metric (similar to
on tf* idf).
"""
ts = str(datetime.datetime.utcnow())
p = beam.Pipeline(options=pipeline_options)
# Create a query to read entities from datastore.
query = make_query('Tweet')
# Read entities from Cloud Datastore into a PCollection.
lines = (p
| 'read from datastore' >> ReadFromDatastore(project, query, None))
global_count = AsSingleton(
lines
| 'global count' >> beam.combiners.Count.Globally())
# Count the occurrences of each word.
percents = (lines
| 'split' >> (beam.ParDo(WordExtractingDoFn())
.with_output_types(unicode))
| 'pair_with_one' >> beam.Map(lambda x: (x, 1))
| 'group' >> beam.GroupByKey()
| 'count' >> beam.Map(lambda (word, ones): (word, sum(ones)))
| 'in tweets percent' >> beam.Map(
lambda (word, wsum), gc: (word, float(wsum) / gc), global_count))
top_percents = (percents
| 'top 500' >> combiners.Top.Of(500, lambda x, y: x[1] < y[1])
)
# Count the occurrences of each expanded url in the tweets
url_counts = (lines
| 'geturls' >> (beam.ParDo(URLExtractingDoFn())
.with_output_types(unicode))
| 'urls_pair_with_one' >> beam.Map(lambda x: (x, 1))
| 'urls_group' >> beam.GroupByKey()
| 'urls_count' >> beam.Map(lambda (word, ones): (word, sum(ones)))
| 'urls top 300' >> combiners.Top.Of(300, lambda x, y: x[1] < y[1])
)
# Define some inline helper functions.
def join_cinfo(cooccur, percents):
"""Calculate a co-occurence ranking."""
import math
word1 = cooccur[0][0]
word2 = cooccur[0][1]
try:
word1_percent = percents[word1]
weight1 = 1 / word1_percent
word2_percent = percents[word2]
weight2 = 1 / word2_percent
return (cooccur[0], cooccur[1], cooccur[1] *
math.log(min(weight1, weight2)))
except:
return 0
def generate_cooccur_schema():
"""BigQuery schema for the word co-occurrence table."""
json_str = json.dumps({'fields': [
{'name': 'w1', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'w2', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'log_weight', 'type': 'FLOAT', 'mode': 'NULLABLE'},
{'name': 'ts', 'type': 'TIMESTAMP', 'mode': 'NULLABLE'}]})
return parse_table_schema_from_json(json_str)
def generate_url_schema():
"""BigQuery schema for the urls count table."""
json_str = json.dumps({'fields': [
{'name': 'url', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'count', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'ts', 'type': 'TIMESTAMP', 'mode': 'NULLABLE'}]})
return parse_table_schema_from_json(json_str)
def generate_wc_schema():
"""BigQuery schema for the word count table."""
json_str = json.dumps({'fields': [
{'name': 'word', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'percent', 'type': 'FLOAT', 'mode': 'NULLABLE'},
{'name': 'ts', 'type': 'TIMESTAMP', 'mode': 'NULLABLE'}]})
return parse_table_schema_from_json(json_str)
# Now build the rest of the pipeline.
# Calculate the word co-occurence scores.
cooccur_rankings = (lines
| 'getcooccur' >> (beam.ParDo(CoOccurExtractingDoFn()))
| 'co_pair_with_one' >> beam.Map(lambda x: (x, 1))
| 'co_group' >> beam.GroupByKey()
| 'co_count' >> beam.Map(lambda (wordts, ones): (wordts, sum(ones)))
| 'weights' >> beam.Map(join_cinfo, AsDict(percents))
| 'co top 300' >> combiners.Top.Of(300, lambda x, y: x[2] < y[2])
)
# Format the counts into a PCollection of strings.
wc_records = top_percents | 'format' >> beam.FlatMap(
lambda x: [{'word': xx[0], 'percent': xx[1], 'ts': ts} for xx in x])
url_records = url_counts | 'urls_format' >> beam.FlatMap(
lambda x: [{'url': xx[0], 'count': xx[1], 'ts': ts} for xx in x])
co_records = cooccur_rankings | 'co_format' >> beam.FlatMap(
lambda x: [{'w1': xx[0][0], 'w2': xx[0][1], 'count': xx[1],
'log_weight': xx[2], 'ts': ts} for xx in x])
# Write the results to three BigQuery tables.
wc_records | 'wc_write_bq' >> beam.io.Write(
beam.io.BigQuerySink(
'%s:%s.word_counts' % (project, dataset),
schema=generate_wc_schema(),
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))
url_records | 'urls_write_bq' >> beam.io.Write(
beam.io.BigQuerySink(
'%s:%s.urls' % (project, dataset),
schema=generate_url_schema(),
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))
co_records | 'co_write_bq' >> beam.io.Write(
beam.io.BigQuerySink(
'%s:%s.word_cooccur' % (project, dataset),
schema=generate_cooccur_schema(),
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))
# Actually run the pipeline.
return p.run()
| [
"amyu@google.com"
] | amyu@google.com |
f7c55e0e1f70031e5e6cb304b08ede6bb0e96d1e | 42209d0278c429d4b8c22b23109b577e10665570 | /rgb2flow_script.py | 1f44b08f1970135093c108836c2d2090e75c53d0 | [] | no_license | YunwenHuang/Single-shot-Spatio-temporal-action-detection | 342520ec4bbc88ba7df94df71f9dbee7afa6fb86 | 959f811b12ca5583b602765e5968f32ebad92ce0 | refs/heads/master | 2020-03-18T21:11:25.955033 | 2017-10-24T16:19:38 | 2017-10-24T16:19:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,258 | py | # Imports
from glob import glob
from utils import ImageUtils
import os
import yaml
import h5py
import io
import numpy as np
import multiprocessing
from PIL import Image
import cv2
from scipy.io import loadmat
from joblib import Parallel, delayed
import pdb
import argparse
# Global Vars
PARALLEL = False
parser = argparse.ArgumentParser()
parser.add_argument('-debug', action='store_true')
debug = parser.parse_args().debug
# Load Config File
with open('config.yaml', 'r') as f:
config = yaml.load(f)
# Load Annotations and Do mapping
annotations = loadmat(config['ucf_annotations'])
annot_mapping = {}
for idx in range(len(annotations['annot'][0])):
example_name = annotations['annot'][0][idx][1][0]
example_name = example_name.split('/')[-1]
annot_mapping[example_name] = idx
print("Finished mapping")
# Initialize H5py tree structure
rootDir = config['ucf_rootDir']
flowDir = config['ucf_flowDir']
datasetDir = config['ucf_dataset']
tinyDatasetDir = config['ucf_tinyDataset']
f = h5py.File(datasetDir, 'w')
train = f.create_group('train')
test = f.create_group('test')
tiny_datset = h5py.File(tinyDatasetDir, 'w')
#Define Conversion Method
def Convert(actionPath, example, action, test_set, id, tiny_set):
print("Started: ", example)
examplePath = os.path.join(actionPath, example)
frames = []
images = []
compressedFlowImages = []
for frame in sorted(glob(examplePath + "/*.jpg")):
im = open(frame, 'rb').read()
frames.append(im)
images.append(np.array(Image.open(io.BytesIO(im))))
if not os.path.exists(os.path.join(flowDir, action, example)):
os.makedirs(os.path.join(flowDir, action, example))
flowFrames = ImageUtils.ComputeOpticalFlow(np.array(images), os.path.join(flowDir, action, example))
for i, ff in enumerate(flowFrames):
r, buf = cv2.imencode('.jpg', ff)
compressedFlowImages.append(buf.tostring())
if id in test_set:
ex = test.create_group(example)
else:
ex = train.create_group(example)
if debug:
pdb.set_trace()
ex.create_dataset("rgb", data=frames)
ex.create_dataset("flow", data=compressedFlowImages)
annots = ex.create_group("annot")
if example not in annot_mapping:
print("EXCEPTION: ", example)
return
example_id = annot_mapping[example]
annots.create_dataset('action', data=annotations['annot'][0][example_id][2][0][0][2][0][0])
annots.create_dataset('startFrame', data=annotations['annot'][0][example_id][2][0][0][1][0][0])
annots.create_dataset('endFrame', data=annotations['annot'][0][example_id][2][0][0][0][0][0])
annots.create_dataset('bboxes', data=annotations['annot'][0][example_id][2][0][0][3])
if tiny_set:
tiny_ex = tiny_datset.create_group(example)
tiny_ex.create_dataset("rgb", data=frames)
tiny_ex.create_dataset("flow", data=compressedFlowImages)
tiny_annots = tiny_ex.create_group("annot")
tiny_annots.create_dataset('action', data=annotations['annot'][0][example_id][2][0][0][2][0][0])
tiny_annots.create_dataset('startFrame', data=annotations['annot'][0][example_id][2][0][0][1][0][0])
tiny_annots.create_dataset('endFrame', data=annotations['annot'][0][example_id][2][0][0][0][0][0])
tiny_annots.create_dataset('bboxes', data=annotations['annot'][0][example_id][2][0][0][3])
print(example, ", IsTrain: ", id in test_set)
tiny_set = np.random.choice(range(24), 3, replace=True)
print(tiny_set)
for label, action in enumerate(sorted(os.listdir(rootDir))):
actionPath = os.path.join(rootDir, action)
if os.path.isdir(actionPath):
if PARALLEL:
num_cores = multiprocessing.cpu_count() - 1
Parallel(n_jobs=num_cores)(delayed(Convert)(actionPath, example, action) for example in os.listdir(os.path.join(rootDir, action)))
else:
examples_files = sorted(os.listdir(os.path.join(rootDir, action)))
# Do 2:1 Split
test_set = np.random.choice(range(len(examples_files)), int(len(examples_files) / 3.0), replace=True)
for id, example in enumerate(examples_files):
Convert(actionPath, example, action, test_set, id, label in tiny_set)
| [
"alaaelnouby@gmail.com"
] | alaaelnouby@gmail.com |
98adc2f86d297b8a79c2eb2c4ad0528d7e435bc9 | a056e699bb03614563dc9090c4c3bc65479fc2d9 | /buffered_normal.py | 052155b79cc21ef651f9092a14a4e1be4c1a18a0 | [
"BSD-3-Clause"
] | permissive | iandees/marblecutter | 278890faaf7a4d7b604bf2520aff8adb3c5d1b95 | 779b9c597bbd69ca3044f2c246721dc4eeeef61d | refs/heads/mapzen | 2021-01-25T04:36:13.210028 | 2017-06-15T19:35:53 | 2017-06-15T19:35:53 | 93,455,208 | 0 | 0 | null | 2017-06-05T23:13:17 | 2017-06-05T23:13:17 | null | UTF-8 | Python | false | false | 1,243 | py | # noqa
# coding=utf-8
from __future__ import division
import logging
from StringIO import StringIO
import numpy as np
from PIL import Image
from normal import render_normal
LOG = logging.getLogger(__name__)
BUFFER = 4
COLLAR = 2
CONTENT_TYPE = 'image/png'
EXT = 'png'
NAME = 'Buffered Normal'
def render(tile, (data, buffers)): # noqa
buffers = map(lambda x: max(0, x - COLLAR), buffers)
data = data[0][buffers[3]:data.shape[1] - buffers[1],
buffers[0]:data.shape[2] - buffers[2]]
if buffers[0] == 0:
# empty left
cols = data[:, :COLLAR]
data = np.hstack((cols, data))
pass
if buffers[2] == 0:
# empty right
cols = data[:, -COLLAR:]
data = np.hstack((data, cols))
pass
if buffers[3] == 0:
# empty top buffer; repeat
rows = data[:COLLAR]
data = np.vstack((rows, data))
buffers[3] = COLLAR
if buffers[1] == 0:
# empty bottom buffer; repeat
data = np.vstack((data, rows))
buffers[1] = COLLAR
imgarr = render_normal(tile, data, buffers)
out = StringIO()
im = Image.fromarray(imgarr, 'RGBA')
im.save(out, 'png')
return (CONTENT_TYPE, out.getvalue())
| [
"seth@mojodna.net"
] | seth@mojodna.net |
00432f0cdfde323754f5d5b31594e5f419a4260f | 26bd2d94a849fff0d5428c752b132a4780b96577 | /lib/PvGithubFormat.py | 15e78f500512fa5d028ac9964c64453deaee47a5 | [] | no_license | kaosdg/pivotalreleased | f3ec70d059e458d0a5f4854a2209906ad7dc9785 | 55039665cfcabf91a17793bd69c40be3fc396948 | refs/heads/master | 2021-01-22T14:15:49.823569 | 2014-02-21T15:35:30 | 2014-02-21T15:35:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,579 | py | from PvFormat import PvFormat
from datetime import datetime
from cStringIO import StringIO
class PvGithubFormat(PvFormat):
def format_project_details(self, project):
file_pointer = StringIO()
file_pointer.write('# %s\n' % project.get('header'))
file_pointer.write('### %s\n' % project.get('name'))
if project.get('description'):
file_pointer.write('###### %s\n' % project.get('description'))
project_details = file_pointer.getvalue()
file_pointer.close()
return project_details
def format_iteration_details(self, iteration):
file_pointer = StringIO()
stories = iteration.get('stories')
start_date = datetime.strptime(iteration.get('start'), "%Y-%m-%dT%H:%M:%SZ")
end_date = datetime.strptime(iteration.get('finish'), "%Y-%m-%dT%H:%M:%SZ")
file_pointer.write("## %s\n" % iteration.get('header'))
file_pointer.write("#### Iteration Number : %s\n" % iteration.get('number'))
file_pointer.write("#### Iteration Start : %s\n" % start_date.strftime('%A, %B %d %Y'))
file_pointer.write("#### Iteration Finish : %s\n" % end_date.strftime('%A, %B %d %Y'))
file_pointer.write("#### Team Strength : %s\n" % iteration.get('team_strength'))
file_pointer.write("#### Number of Stories: %s\n" % len(stories))
file_pointer.write("#### Iteration Points : %s\n" % PvFormat.get_iteration_points(stories))
iteration_details = file_pointer.getvalue()
file_pointer.close()
return iteration_details
def format_story_details(self, story_type):
file_pointer = StringIO()
file_pointer.write("### %sS\n" % story_type.upper())
story_details = file_pointer.getvalue()
file_pointer.close()
return story_details
def format_bug(self, bug):
return self.format_story(bug)
def format_chore(self, chore):
return self.format_story(chore)
def format_feature(self, feature):
return self.format_story(feature)
def format_story(self, story):
return "+ [[#%s]](%s) - %s\n" % (story.get('id'), story.get('url'), story.get('name'))
@classmethod
def footer(cls):
file_pointer = StringIO()
file_pointer.write("---\n")
file_pointer.write("###### Release notes Generated by "
"[pivotalmakerelease](https://github.com/kaosdg/pivotalreleased)")
file_pointer.write("\n")
footer = file_pointer.getvalue()
file_pointer.close()
return footer | [
"karl.catigbe@viacom.com"
] | karl.catigbe@viacom.com |
0e12cb4aaa4ad20db8b1aa3d191e74e7f3cb902b | 6d683f971154d319c5e32a20c2a3ac87c21bf7b5 | /OOPbase64/base64.py | 1d00282d9771f80ba76f0ef5055a92373c6051ac | [] | no_license | DevinMcF/csc200 | c52c66b20c038e1c3429a1e9a84648ee676f5358 | 5209357404d02470a86f345297957d664d9198d2 | refs/heads/master | 2020-12-21T17:12:41.803688 | 2020-02-13T16:11:08 | 2020-02-13T16:11:08 | 236,499,505 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | class Base64Converter:
def __init__(self):
"""
Create a string containing the Base64 digits for encoding and a
dictionary containing the numerical value of each digit character
for decoding.
"""
self.digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
self.digpos = {}
for pos, dig in enumerate(self.digits):
self.digpos[dig] = pos
| [
"devin.ryan.mcfarlane@gmail.com"
] | devin.ryan.mcfarlane@gmail.com |
082444ad271b4e2d34e2eed2760bd53b0c01a64b | 5a45b19e36b8bf2f524676b5255827d6c69ec614 | /cvdaTA.py | 1649b867d290a2d52152e67872ead03c9d4b21ea | [] | no_license | Ipsitbhatt/C-sharp- | 806db3e90cb2321e1ba32c29dd9035ca3d53e346 | acc6da84e0db4592902378e1c9be96ed499aedc4 | refs/heads/master | 2020-04-16T17:48:36.280318 | 2019-03-28T05:16:32 | 2019-03-28T05:16:32 | 165,789,738 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 28 09:46:14 2019
@author: dit
"""
import cv2
import numpy as np
import matplotlib.pyplot as plt
image = cv2.imread('C:\\Users\\dit\\Pictures\\cat.jpeg')
image1 = cv2.cvtColor(image,cv2.COLOR_RGB2GRAY)
plt.imshow(image) | [
"noreply@github.com"
] | noreply@github.com |
79a88831525618e5295eaf8ea636b39e46629ee8 | 7f5e2326fbf378ec5e9657f4ad22bbcede3f7f93 | /search/searchAgents.py | d06f12cf61ba98a0d4deb896c1ab65ed94fe40eb | [] | no_license | EmmmaHan/CS188 | b30cf2a587b464aa201b75f15fee119abd7fde9e | 125504812d3f75a4bbf1ae0ed138db9192052d21 | refs/heads/master | 2022-01-11T12:15:49.905561 | 2019-06-06T03:16:43 | 2019-06-06T03:16:43 | 190,487,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,559 | py | # searchAgents.py
# ---------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
"""
This file contains all of the agents that can be selected to control Pacman. To
select an agent, use the '-p' option when running pacman.py. Arguments can be
passed to your agent using '-a'. For example, to load a SearchAgent that uses
depth first search (dfs), run the following command:
> python pacman.py -p SearchAgent -a fn=depthFirstSearch
Commands to invoke other search strategies can be found in the project
description.
Please only change the parts of the file you are asked to. Look for the lines
that say
"*** YOUR CODE HERE ***"
The parts you fill in start about 3/4 of the way down. Follow the project
description for details.
Good luck and happy searching!
"""
from game import Directions
from game import Agent
from game import Actions
import util
import time
import search
class GoWestAgent(Agent):
"An agent that goes West until it can't."
def getAction(self, state):
"The agent receives a GameState (defined in pacman.py)."
if Directions.WEST in state.getLegalPacmanActions():
return Directions.WEST
else:
return Directions.STOP
#######################################################
# This portion is written for you, but will only work #
# after you fill in parts of search.py #
#######################################################
class SearchAgent(Agent):
"""
This very general search agent finds a path using a supplied search
algorithm for a supplied search problem, then returns actions to follow that
path.
As a default, this agent runs DFS on a PositionSearchProblem to find
location (1,1)
Options for fn include:
depthFirstSearch or dfs
breadthFirstSearch or bfs
Note: You should NOT change any code in SearchAgent
"""
def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'):
# Warning: some advanced Python magic is employed below to find the right functions and problems
# Get the search function from the name and heuristic
if fn not in dir(search):
raise AttributeError(fn + ' is not a search function in search.py.')
func = getattr(search, fn)
if 'heuristic' not in func.__code__.co_varnames:
print('[SearchAgent] using function ' + fn)
self.searchFunction = func
else:
if heuristic in globals().keys():
heur = globals()[heuristic]
elif heuristic in dir(search):
heur = getattr(search, heuristic)
else:
raise AttributeError(heuristic + ' is not a function in searchAgents.py or search.py.')
print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic))
# Note: this bit of Python trickery combines the search algorithm and the heuristic
self.searchFunction = lambda x: func(x, heuristic=heur)
# Get the search problem type from the name
if prob not in globals().keys() or not prob.endswith('Problem'):
raise AttributeError(prob + ' is not a search problem type in SearchAgents.py.')
self.searchType = globals()[prob]
print('[SearchAgent] using problem type ' + prob)
def registerInitialState(self, state):
"""
This is the first time that the agent sees the layout of the game
board. Here, we choose a path to the goal. In this phase, the agent
should compute the path to the goal and store it in a local variable.
All of the work is done in this method!
state: a GameState object (pacman.py)
"""
if self.searchFunction == None: raise Exception("No search function provided for SearchAgent")
starttime = time.time()
problem = self.searchType(state) # Makes a new search problem
self.actions = self.searchFunction(problem) # Find a path
totalCost = problem.getCostOfActions(self.actions)
print('Path found with total cost of %d in %.1f seconds' % (totalCost, time.time() - starttime))
if '_expanded' in dir(problem): print('Search nodes expanded: %d' % problem._expanded)
def getAction(self, state):
"""
Returns the next action in the path chosen earlier (in
registerInitialState). Return Directions.STOP if there is no further
action to take.
state: a GameState object (pacman.py)
"""
if 'actionIndex' not in dir(self): self.actionIndex = 0
i = self.actionIndex
self.actionIndex += 1
if i < len(self.actions):
return self.actions[i]
else:
return Directions.STOP
class PositionSearchProblem(search.SearchProblem):
"""
A search problem defines the state space, start state, goal test, successor
function and cost function. This search problem can be used to find paths
to a particular point on the pacman board.
The state space consists of (x,y) positions in a pacman game.
Note: this search problem is fully specified; you should NOT change it.
"""
def __init__(self, gameState, costFn = lambda x: 1, goal=(1,1), start=None, warn=True, visualize=True):
"""
Stores the start and goal.
gameState: A GameState object (pacman.py)
costFn: A function from a search state (tuple) to a non-negative number
goal: A position in the gameState
"""
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
if start != None: self.startState = start
self.goal = goal
self.costFn = costFn
self.visualize = visualize
if warn and (gameState.getNumFood() != 1 or not gameState.hasFood(*goal)):
print('Warning: this does not look like a regular search maze')
# For display purposes
self._visited, self._visitedlist, self._expanded = {}, [], 0 # DO NOT CHANGE
def getStartState(self):
return self.startState
def isGoalState(self, state):
isGoal = state == self.goal
# For display purposes only
if isGoal and self.visualize:
self._visitedlist.append(state)
import __main__
if '_display' in dir(__main__):
if 'drawExpandedCells' in dir(__main__._display): #@UndefinedVariable
__main__._display.drawExpandedCells(self._visitedlist) #@UndefinedVariable
return isGoal
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
x,y = state
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
nextState = (nextx, nexty)
cost = self.costFn(nextState)
successors.append( ( nextState, action, cost) )
# Bookkeeping for display purposes
self._expanded += 1 # DO NOT CHANGE
if state not in self._visited:
self._visited[state] = True
self._visitedlist.append(state)
return successors
def getCostOfActions(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999.
"""
if actions == None: return 999999
x,y= self.getStartState()
cost = 0
for action in actions:
# Check figure out the next state and see whether its' legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
cost += self.costFn((x,y))
return cost
class StayEastSearchAgent(SearchAgent):
"""
An agent for position search with a cost function that penalizes being in
positions on the West side of the board.
The cost function for stepping into a position (x,y) is 1/2^x.
"""
def __init__(self):
self.searchFunction = search.uniformCostSearch
costFn = lambda pos: .5 ** pos[0]
self.searchType = lambda state: PositionSearchProblem(state, costFn, (1, 1), None, False)
class StayWestSearchAgent(SearchAgent):
"""
An agent for position search with a cost function that penalizes being in
positions on the East side of the board.
The cost function for stepping into a position (x,y) is 2^x.
"""
def __init__(self):
self.searchFunction = search.uniformCostSearch
costFn = lambda pos: 2 ** pos[0]
self.searchType = lambda state: PositionSearchProblem(state, costFn)
def manhattanHeuristic(position, problem, info={}):
"The Manhattan distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])
def euclideanHeuristic(position, problem, info={}):
"The Euclidean distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return ( (xy1[0] - xy2[0]) ** 2 + (xy1[1] - xy2[1]) ** 2 ) ** 0.5
#####################################################
# This portion is incomplete. Time to write code! #
#####################################################
class CornersProblem(search.SearchProblem):
"""
This search problem finds paths through all four corners of a layout.
You must select a suitable state space and successor function
"""
def __init__(self, startingGameState):
"""
Stores the walls, pacman's starting position and corners.
"""
self.walls = startingGameState.getWalls()
self.startingPosition = startingGameState.getPacmanPosition()
top, right = self.walls.height-2, self.walls.width-2
self.corners = ((1,1), (1,top), (right, 1), (right, top))
for corner in self.corners:
if not startingGameState.hasFood(*corner):
print('Warning: no food in corner ' + str(corner))
self._expanded = 0 # DO NOT CHANGE; Number of search nodes expanded
# Please add any code here which you would like to use
# in initializing the problem
self.reached_corners = (0,0,0,0)
self.startingGameState = startingGameState
def getStartState(self):
"""
Returns the start state (in your state space, not the full Pacman state
space)
"""
return (self.startingPosition, self.reached_corners)
def isGoalState(self, state):
"""
Returns whether this search state is a goal state of the problem.
"""
return state[1] == (1,1,1,1)
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples, (successor,
action, stepCost), where 'successor' is a successor to the current
state, 'action' is the action required to get there, and 'stepCost'
is the incremental cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
x,y = state[0]
reached_cp = list(state[1])
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
hitsWall = self.walls[nextx][nexty]
if not hitsWall:
for c in range(0,len(self.corners)):
if (nextx, nexty) == self.corners[c]:
reached_cp[c] = 1
nextState = ((nextx, nexty), tuple(reached_cp))
successors.append((nextState, action, 1))
self._expanded += 1 # DO NOT CHANGE
return successors
def getCostOfActions(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999. This is implemented for you.
"""
if actions == None: return 999999
x,y= self.startingPosition
for action in actions:
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
return len(actions)
def cornersHeuristic(state, problem):
"""
A heuristic for the CornersProblem that you defined.
state: The current search state
(a data structure you chose in your search problem)
problem: The CornersProblem instance for this layout.
This function should always return a number that is a lower bound on the
shortest path from the state to a goal of the problem; i.e. it should be
admissible (as well as consistent).
"""
corners = problem.corners # These are the corner coordinates
walls = problem.walls # These are the walls of the maze, as a Grid (game.py)
greatestDistance = 0 # distance to the farthest unreached corner
for c in range(0, len(corners)):
if state[1][c] == 0:
currDistance = mazeDistance(state[0], corners[c], problem.startingGameState)
if greatestDistance == 0:
greatestDistance = currDistance
else:
if currDistance > greatestDistance:
greatestDistance = currDistance
return greatestDistance
class AStarCornersAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, cornersHeuristic)
self.searchType = CornersProblem
class FoodSearchProblem:
"""
A search problem associated with finding the a path that collects all of the
food (dots) in a Pacman game.
A search state in this problem is a tuple ( pacmanPosition, foodGrid ) where
pacmanPosition: a tuple (x,y) of integers specifying Pacman's position
foodGrid: a Grid (see game.py) of either True or False, specifying remaining food
"""
def __init__(self, startingGameState):
self.start = (startingGameState.getPacmanPosition(), startingGameState.getFood())
self.walls = startingGameState.getWalls()
self.startingGameState = startingGameState
self._expanded = 0 # DO NOT CHANGE
self.heuristicInfo = {} # A dictionary for the heuristic to store information
def getStartState(self):
return self.start
def isGoalState(self, state):
return state[1].count() == 0
def getSuccessors(self, state):
"Returns successor states, the actions they require, and a cost of 1."
successors = []
self._expanded += 1 # DO NOT CHANGE
for direction in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
x,y = state[0]
dx, dy = Actions.directionToVector(direction)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
nextFood = state[1].copy()
nextFood[nextx][nexty] = False
successors.append( ( ((nextx, nexty), nextFood), direction, 1) )
return successors
def getCostOfActions(self, actions):
"""Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999"""
x,y= self.getStartState()[0]
cost = 0
for action in actions:
# figure out the next state and see whether it's legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]:
return 999999
cost += 1
return cost
class AStarFoodSearchAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, foodHeuristic)
self.searchType = FoodSearchProblem
def foodHeuristic(state, problem):
"""
Your heuristic for the FoodSearchProblem goes here.
This heuristic must be consistent to ensure correctness. First, try to come
up with an admissible heuristic; almost all admissible heuristics will be
consistent as well.
If using A* ever finds a solution that is worse uniform cost search finds,
your heuristic is *not* consistent, and probably not admissible! On the
other hand, inadmissible or inconsistent heuristics may find optimal
solutions, so be careful.
The state is a tuple ( pacmanPosition, foodGrid ) where foodGrid is a Grid
(see game.py) of either True or False. You can call foodGrid.asList() to get
a list of food coordinates instead.
If you want access to info like walls, capsules, etc., you can query the
problem. For example, problem.walls gives you a Grid of where the walls
are.
If you want to *store* information to be reused in other calls to the
heuristic, there is a dictionary called problem.heuristicInfo that you can
use. For example, if you only want to count the walls once and store that
value, try: problem.heuristicInfo['wallCount'] = problem.walls.count()
Subsequent calls to this heuristic can access
problem.heuristicInfo['wallCount']
"""
position, foodGrid = state
leastDistance = 0
for x in range(0,foodGrid.width):
for y in range(0,foodGrid.height):
if foodGrid.data[x][y] == True:
if leastDistance == 0:
leastDistance = mazeDistance(position, (x,y), problem.startingGameState)
else:
currDistance = mazeDistance(position, (x,y), problem.startingGameState)
if currDistance < leastDistance:
least = currDistance
foodLeft = foodGrid.asList().count(True)
if foodLeft > 0:
leastDistance + foodLeft - 1
else:
return leastDistance + foodLeft
class ClosestDotSearchAgent(SearchAgent):
"Search for all food using a sequence of searches"
def registerInitialState(self, state):
self.actions = []
currentState = state
while(currentState.getFood().count() > 0):
nextPathSegment = self.findPathToClosestDot(currentState) # The missing piece
self.actions += nextPathSegment
for action in nextPathSegment:
legal = currentState.getLegalActions()
if action not in legal:
t = (str(action), str(currentState))
raise Exception('findPathToClosestDot returned an illegal move: %s!\n%s' % t)
currentState = currentState.generateSuccessor(0, action)
self.actionIndex = 0
print('Path found with cost %d.' % len(self.actions))
def findPathToClosestDot(self, gameState):
"""
Returns a path (a list of actions) to the closest dot, starting from
gameState.
"""
# Here are some useful elements of the startState
startPosition = gameState.getPacmanPosition()
food = gameState.getFood()
walls = gameState.getWalls()
problem = AnyFoodSearchProblem(gameState)
return search.aStarSearch(problem)
class AnyFoodSearchProblem(PositionSearchProblem):
"""
A search problem for finding a path to any food.
This search problem is just like the PositionSearchProblem, but has a
different goal test, which you need to fill in below. The state space and
successor function do not need to be changed.
The class definition above, AnyFoodSearchProblem(PositionSearchProblem),
inherits the methods of the PositionSearchProblem.
You can use this search problem to help you fill in the findPathToClosestDot
method.
"""
def __init__(self, gameState):
"Stores information from the gameState. You don't need to change this."
# Store the food for later reference
self.food = gameState.getFood()
# Store info for the PositionSearchProblem (no need to change this)
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
self.costFn = lambda x: 1
self._visited, self._visitedlist, self._expanded = {}, [], 0 # DO NOT CHANGE
def isGoalState(self, state):
"""
The state is Pacman's position. Fill this in with a goal test that will
complete the problem definition.
"""
x,y = state
if self.food[x][y]:
return True
return False
def mazeDistance(point1, point2, gameState):
"""
Returns the maze distance between any two points, using the search functions
you have already built. The gameState can be any game state -- Pacman's
position in that state is ignored.
Example usage: mazeDistance( (2,4), (5,6), gameState)
This might be a useful helper function for your ApproximateSearchAgent.
"""
x1, y1 = point1
x2, y2 = point2
walls = gameState.getWalls()
assert not walls[x1][y1], 'point1 is a wall: ' + str(point1)
assert not walls[x2][y2], 'point2 is a wall: ' + str(point2)
prob = PositionSearchProblem(gameState, start=point1, goal=point2, warn=False, visualize=False)
return len(search.bfs(prob))
| [
"emmahan@Emmas-MacBook-Pro-2.local"
] | emmahan@Emmas-MacBook-Pro-2.local |
6a592e70e7ba4b842e80cb01828081e643c30039 | 1022d1db3f02fd9f780c3234daa9954203859d38 | /DBcontroller/clientdbController.py | 9d198caadc446dfad058e25897f5b9563a0d399c | [] | no_license | riyajoe/Gym-Project | 8db42d65396f957929ffab4804293dd1bb27a9da | 9fea3d1a03bbad3b21af800873af937bb8609c79 | refs/heads/main | 2023-06-07T02:51:19.286935 | 2021-06-20T14:14:52 | 2021-06-20T14:14:52 | 354,658,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | from models.Client import Client
from app import db
class clientdbController:
def __init__(self):
self=self
def addClient(self,Client):
db.create_all()
db.session.add(Client)
db.session.commit()
def getClients(self):
return db.session.query(Client).all()
def getClientByid(self,id):
return db.session.query.filter(Client.memberid==id)
def getClientByMembership(self,id):
return db.session.query(Client.membership).filter(Client.memberid==id).all()
def updateClientByMembership(self,id,membershipObj):
db.session.query(Client.membership).filter(Client.memberid==id).update({Client.membership:membershipObj}).all()
db.session.commit()
| [
"joeriyamary@outlook.com"
] | joeriyamary@outlook.com |
469e579b0a396a30e46ed93bc267b76bed2218c9 | b088d5dc4321f9f145c7bceb20a0b9479b374c65 | /level1&2/42883.py | 8169482655042d081bd9380cf7217e0935b0e85c | [] | no_license | heojungeun/codingtestPractice | 55bfc2b13791f5cb3133b0815991a0c696f8482c | 65d668bf6df82967f89d4ec4eb3a1e11de603729 | refs/heads/master | 2022-09-17T00:34:05.887237 | 2020-05-30T06:45:30 | 2020-05-30T06:45:30 | 261,093,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | def solution(number, k):
# import itertools
# dig = []
# for i in range(0,len(number)):
# dig.append(i)
# dig = list(itertools.combinations(dig,k))
# lenn = len(number)
# arr = []
# for x in dig:
# tmp = ''
# for i in range(lenn):
# if i in x:
# continue
# tmp += number[i]
# arr.append(int(tmp))
# answer = str(max(arr))
st = []
for x in number:
if k==0 or not st:
st.append(x)
else:
if st[-1] < x:
tmp = reversed(st)
for e in tmp:
if e < x:
st.pop()
k -= 1
if k==0 or not st:
st.append(x)
break
else:
st.append(x)
break
else:
st.append(x)
while k > 0:
st.pop()
k -= 1
answer = "".join(st)
return answer
def standardsolution(number,k):
st = []
for i, num in enumerate(number):
while st and k>0 and st[-1]<num:
st.pop()
k -= 1
if k==0:
st += number[i:]
break
st.append(num)
st = st[:-k] if k>0 else st
return "".join(st)
n = "12"
nk = 1
print(solution(n,nk)) | [
"heocube@naver.com"
] | heocube@naver.com |
d91e62fc90665328bcd80d2dec48265a00a287c2 | dbeeb70d1f6dc4522ec69c54fad5a455f32649eb | /jadrn023/login.cgi | 552d43acd529496a9a64140868b14cab50e5584d | [] | no_license | gsivakumar608/Web-Application | 294cc8f45a92b94596212ce6d122d9e251b6090a | ea8c6ff93d28e1c60adb42116a7adda3546e5b36 | refs/heads/master | 2021-01-09T09:39:02.716133 | 2016-07-13T20:16:51 | 2016-07-13T20:16:51 | 63,276,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,696 | cgi | #!/usr/bin/perl
use CGI;
use CGI::Session;
use CGI::Carp qw (fatalsToBrowser);
use Crypt::SaltedHash;
##---------------------------- MAIN ---------------------------------------
my $q;
if(authenticate_user()) {
send_to_main();
}
else {
send_to_login_error();
}
###########################################################################
###########################################################################
sub authenticate_user {
$q = new CGI;
my $user = $q->param("user");
my $password = $q->param("password");
open DATA, "</srv/www/cgi-bin/jadrn023/passwords.dat"
or die "Cannot open file.";
@file_lines = <DATA>;
close DATA;
$OK = 0; #not authorized
foreach $line (@file_lines) {
chomp $line;
($stored_user, $stored_pass) = split /=/, $line;
if($stored_user eq $user && Crypt::SaltedHash->validate($stored_pass, $password)) {
$OK = 1;
last;
}
}
return $OK;
}
###########################################################################
###########################################################################
sub send_to_login_error {
print <<END;
Content-type: text/html
<html>
<head>
<meta http-equiv="refresh"
content="0; url=http://jadran.sdsu.edu/~jadrn023/proj1/error.html" />
</head><body></body>
</html>
END
}
###########################################################################
###########################################################################
sub send_to_main {
# args are DRIVER, CGI OBJECT, SESSION LOCATION
# default for undef is FILE, NEW SESSION, /TMP
# for login.html, don't look for any existing session.
# Always start a new one. Send a cookie to the browser.
# Default expiration is when the browser is closed.
# WATCH YOUR COOKIE NAMES! USE JADRNXXX_SID
my $session = new CGI::Session(undef, undef, {Directory=>'/tmp'});
$session->expires('+1d');
my $cookie = $q->cookie(jadrn000SID => $session->id);
print $q->header( -cookie=>$cookie ); #send cookie with session ID to browser
my $sid = $session->id;
print <<END;
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Cameras</title>
<meta http-equiv="content-type"
content="text/html;charset=utf-8" />
<link rel="stylesheet" type="text/css" href="/~jadrn023/proj1/css/style.css" />
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script type="text/javascript" src="/~jadrn023/proj1/js/ajax_healper.js"></script>
<script type="text/javascript" src="/~jadrn023/proj1/js/validation.js"></script>
</head>
<body>
<h1> Cameras </h1>
<div class="progressTextDiv"> Submitting Form...</div>
<div class="successCenter" id ="confirmation"></div>
<form id="addProductForm"
name="Validate"
action="http://jadran.sdsu.edu/perl/jadrn023/proj1/confirm.cgi"
method="post"
enctype="multipart/form-data">
<div id="menu">
<ul>
<li><a class="selected" href="/~jadrn023/proj1/newInventory.html">New Inventory</a></li>
<li><a href="/~jadrn023/proj1/editInventory.html">Edit Inventory</a></li>
<li><a href="/~jadrn023/proj1/deleteInventory.html">Delete Inventory</a></li>
</ul>
</div>
<!-- <div id= "content"> -->
<ul class="inlineobjects">
<li><label class="title">SKU:<span class="astric">*</span></label></li>
<li><input type="text" name="sku" id="sku" size="25" maxlength="6"/></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Category:<span class="astric">*</span></label></li>
<li><input type="text" name="category" id="category" size="25" /></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Vender:<span class="astric">*</span></label></li>
<li><input type="text" name="vender" id="vender" size="25" /></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Manufacturer's Identifier:<span class="astric">*</span></label></li>
<li><input type="text" name="manufacturersidentifier" id="manufacturersidentifier" size="25" /></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Description:<span class="astric">*</span></label></li>
<li><textarea rows="4" cols="50" name="description" id="description" ></textarea></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Product Features:<span class="astric">*</span></label></li>
<li><textarea rows="4" cols="50" name="productfeatures" id="productfeatures" ></textarea></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Cost:<span class="astric">*</span></label></li>
$ <li><input type="text" name="cost" id="cost" size="25" /></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Retail:<span class="astric">*</span></label></li>
$ <li><input type="text" name="retail" id="retail" size="25" /></li>
</ul>
<ul class="inlineobjects">
<li><label class="title">Product Image:<span class="astric">*</span></label></li>
<li><input type="file" name="productimage" id="productimage" /></li>
</ul>
<div id="error_message">
</div>
<div id="button">
<input type="submit" value="Submit" name="submit" class="formbutton" />
<input type="reset" value="Clear" name="reset" class="formbutton" />
</div>
</form>
</body>
</html>
END
}
###########################################################################
| [
"gsivakumar.608@gmail.com"
] | gsivakumar.608@gmail.com |
7c890c6f90d89e38a402fe7197ee5d893f440d9e | a174ff975f1cb1bcea094e64ece15c080bf9dcb9 | /libtorch/v1.7.0/arm64/src/libtorch-1.7/torch/version.py | f014665d30998adf28e2054d34f6d86d25172565 | [] | no_license | sugarme/gotch-docker | d4006c98c593fd126c0d84bcb5d0dd16a609046a | a4e28d5377016b2b4034e03ae39b30a8549feec6 | refs/heads/master | 2023-04-09T04:59:24.646331 | 2021-04-22T05:42:14 | 2021-04-22T05:42:14 | 355,382,320 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | __version__ = '1.7.0a0+6394982'
debug = True
cuda = None
git_version = '6394982d1389f9ce2e488ae4c9b4e3c0321ca978'
hip = None
| [
"thangtran@hotmail.com"
] | thangtran@hotmail.com |
64bc3026ee1dc94c42227f42402230d18c9dd555 | 53708ab28946feb229c6dda31dda053139b543af | /api/serializers.py | b714607d26c6ba8c2cf8815838e70aa25fafd5bb | [] | no_license | KzmMthr/foodgram-project | 26d82fb5036600185316b16e6452ef71288e55aa | 420d3ba8170bc40b43987bb4a30ade5f128b12b9 | refs/heads/master | 2023-04-25T21:37:27.133756 | 2021-05-25T06:04:04 | 2021-05-25T06:04:04 | 346,710,246 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,051 | py | from django.contrib.auth import get_user_model
from rest_framework import serializers
from api.models import Favorite, Purchase, Subscribe
from recipes.models import Ingredient, Recipe
User = get_user_model()
class IngredientSerializer(serializers.ModelSerializer):
class Meta:
fields = '__all__'
model = Ingredient
class FavoriteSerializer(serializers.ModelSerializer):
id = serializers.SlugRelatedField(
slug_field='id', queryset=Recipe.objects.all(), source='recipe')
author = serializers.PrimaryKeyRelatedField(
read_only=True, default=serializers.CurrentUserDefault())
class Meta:
fields = ('id', 'author')
model = Favorite
def create(self, validated_data):
if 'author' not in validated_data:
validated_data['author'] = self.context['request'].user
return Favorite.objects.create(**validated_data)
class SubscribeSerializer(serializers.ModelSerializer):
id = serializers.SlugRelatedField(
slug_field='id', queryset=User.objects.all(), source='author')
follower = serializers.PrimaryKeyRelatedField(
read_only=True, default=serializers.CurrentUserDefault())
class Meta:
fields = ['id', 'follower']
model = Subscribe
def create(self, validated_data):
if 'follower' not in validated_data:
validated_data['follower'] = self.context['request'].user
return Subscribe.objects.create(**validated_data)
class PurchaseSerializer(serializers.ModelSerializer):
id = serializers.SlugRelatedField(
slug_field='id', queryset=Recipe.objects.all(), source='recipe')
author = serializers.PrimaryKeyRelatedField(
read_only=True, default=serializers.CurrentUserDefault())
class Meta:
fields = ('id', 'author')
model = Purchase
def create(self, validated_data):
if 'author' not in validated_data:
validated_data['author'] = self.context['request'].user
return Purchase.objects.create(**validated_data)
| [
"gurkinnn@yandex.ru"
] | gurkinnn@yandex.ru |
b16642c37a87340c9129682da5a7cdc83f42cf28 | 09d8ededcaea85350aaa35ea240063f40cb82308 | /test/functional/rpc_users.py | 816b11734136e1a13c535e846c05ab29fa0e9bc3 | [
"MIT"
] | permissive | XaviFortes/PipoCoin | 89886c42490c3c0adbc95a0c60f9391192f7e5b5 | 0755b00fa600adb0ffa4de0b2746a66f3d0fefb7 | refs/heads/main | 2023-04-25T12:16:54.200733 | 2021-05-14T17:52:30 | 2021-05-14T17:52:30 | 367,409,612 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,985 | py | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiple RPC users."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
rpcuser = "rpcuser=rpcuser💻"
rpcpassword = "rpcpassword=rpcpassword🔑"
with open(os.path.join(self.options.tmpdir+"/node0", "pipocoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
with open(os.path.join(self.options.tmpdir+"/node1", "pipocoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcuser+"\n")
f.write(rpcpassword+"\n")
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
# rpcuser and rpcpassword authpair
rpcuserauthpair = "rpcuser💻:rpcpassword🔑"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rpcuser's password
rpcuserauthpair = "rpcuserwrong:rpcpassword"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rpcuser
rpcuserauthpair = "rpcuser:rpcpasswordwrong"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| [
"itsfortes@gmail.com"
] | itsfortes@gmail.com |
3abf0e9fd120a67af60e15d7712ed088d34fffcd | fda573d072a89359486b16b4e7145d2b2843c576 | /src/models/pseudonet_focal.py | f72ec03ad52c42547251113501b9503ff15f39f6 | [] | no_license | jessekim-ck/Oregon-wildlife | 1117ec8decc4fbcb9356d105cac1c3d46904d983 | ca8e837a8c483495514f9bcc6c781659ab6aa9c7 | refs/heads/master | 2022-12-01T10:21:59.526677 | 2020-08-20T05:10:25 | 2020-08-20T05:10:25 | 286,944,354 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,244 | py | import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision import transforms
from .utils import multi_focal_loss
from src.models import BaseModel
from src.backbones import EfficientNet
from src.datasets import BaseDataset
from src.datasets import PseudoDataset
class PseudoNetFocal(BaseModel):
def __init__(self, args):
super().__init__()
self.args = args
self.dataset = BaseDataset
self.train_transform = transforms.Compose([
transforms.Resize((256, 256)),
transforms.RandomCrop((224, 224)),
transforms.RandomHorizontalFlip(p=0.5),
transforms.RandomRotation(30),
transforms.ColorJitter(),
transforms.ToTensor(),
transforms.RandomErasing(p=0.5, value="random")
])
self.test_transform = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor()
])
self.feature = EfficientNet.from_name("efficientnet-b0")
out_channels = self.feature.out_channels
self.fc = nn.Linear(out_channels, 20)
def forward(self, x):
x = self.feature(x)
x = self.fc(x)
return x
def get_cost(self, data):
paths, imgs, cls_ids = data
x = self(imgs.cuda())
cost = multi_focal_loss(x, cls_ids.cuda())
with torch.no_grad():
pred_scores, cls_ids_pred = torch.max(torch.sigmoid(x), dim=1)
preds = {
"paths": np.array(paths),
"cls_ids": cls_ids.numpy(),
"cls_ids_pred": cls_ids_pred.cpu().numpy(),
"pred_scores": pred_scores.cpu().numpy()
}
return cost, preds
def get_pseudo_train_dataloader(self):
dataset = PseudoDataset(
model=self,
dataloader=self.get_test_dataloader(),
transform=self.train_transform,
th=0.9
)
dataloader = DataLoader(
dataset=dataset,
batch_size=self.args.batch_size,
shuffle=True,
num_workers=self.args.num_workers,
pin_memory=True
)
return dataloader
| [
"jessekim.ck.94@gmail.com"
] | jessekim.ck.94@gmail.com |
42fcf0dd82fc975c09922d023f79af57d7249813 | 5aa26394708ecad0210706c9f5e12ddf72c3e238 | /tests/backend/test_decorators.py | a7edb392f9f3349dd5a796deda45b1a997256a68 | [
"MIT"
] | permissive | ZaxR/busy-beaver | 59ac99c48ddb2f71572a9e454af7ae1a0621c844 | ffe1250d0156f71d1053f37c8070ca0dd888348f | refs/heads/master | 2020-04-20T17:01:06.953568 | 2019-01-31T18:19:25 | 2019-01-31T18:19:25 | 168,976,872 | 0 | 0 | MIT | 2019-02-03T18:13:31 | 2019-02-03T18:13:30 | null | UTF-8 | Python | false | false | 1,869 | py | import pytest
import responder
from busy_beaver import db
from busy_beaver.backend.decorators import authentication_required
from busy_beaver.models import ApiUser
TOKEN = "test_token_to_insert"
AUTH_HEADER = {"Authorization": f"token {TOKEN}"}
@pytest.fixture(scope="module")
def api():
api = responder.API()
@api.route("/no-auth")
def no_auth(req, resp):
resp.text = "hello, world!"
@api.route("/auth-required")
@authentication_required
def auth_required(req, resp, user):
resp.text = "hello, world!"
@api.route("/more-auth/{greeting}")
@authentication_required
def more_auth(req, resp, user, *, greeting):
resp.text = f"echo greeting: {greeting}"
return api
@pytest.fixture
def persist_api_user():
savepoint = db.session.begin_nested()
db.session.begin_nested()
user = ApiUser(username="test", token=TOKEN)
db.session.add(user)
db.session.commit()
db.session.refresh(user)
yield user
savepoint.rollback()
def test_no_auth_endpoint(api):
r = api.requests.get("/no-auth")
assert r.text == "hello, world!"
def test_auth_endpoint_without_headers(api):
r = api.requests.get("/auth-required")
assert r.status_code == 401
assert "Missing header: Authorization" in r.text
def test_auth_endpoint_incorrect_token(api):
r = api.requests.get("/auth-required", headers={"Authorization": "token not-there"})
assert r.status_code == 401
assert "Invalid token" in r.text
def test_auth_endpoint_success(api, persist_api_user):
r = api.requests.get("/auth-required", headers=AUTH_HEADER)
assert r.status_code == 200
def test_auth_endpoint_with_url_variable(api, persist_api_user):
RANDOM_STRING = "asdfbadsf"
r = api.requests.get(f"/more-auth/{RANDOM_STRING}", headers=AUTH_HEADER)
assert RANDOM_STRING in r.text
| [
"noreply@github.com"
] | noreply@github.com |
33a71d0a1b09888ba5713b6614017fcbeb58681d | e2ee8df2cde2fb40e1b136a01cde7d4f10ed1a11 | /2019/6/solution.py | a51a36f123bd52999d82ee558d86046a582ced1a | [] | no_license | yaodingyd/AdventOfCode | e2b160ec2c0d3aaaf3bea0955d197696f414439f | 49c592b3c75c73d4f845c992ef06961410255f05 | refs/heads/master | 2020-09-27T01:29:26.460108 | 2020-01-03T15:23:45 | 2020-01-03T16:28:46 | 226,390,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | def main():
l = []
with open('input.txt') as file:
for line in file:
l.append(line.strip('\n').split(')'))
#print(part_one(l))
print(part_two(l)-2)
def part_one(l):
d = {}
for orbit in l:
if orbit[0] in d:
d[orbit[0]].append(orbit[1])
else:
d[orbit[0]] = [orbit[1]]
cur = 'COM'
sum = 0
def find(cur, d, distance):
nonlocal sum
if cur in d:
satelites = d[cur]
for satelite in satelites:
sum += distance
find(satelite, d, distance+1)
find(cur, d, 1)
return sum
def part_two(d):
cur = 'COM'
def find(cur, d, distance):
nonlocal sum
if cur in d:
satelites = d[cur]
for satelite in satelites:
if satelite == 'SAN':
break
sum += distance
find(satelite, d, distance+1)
find(cur, d, 1)
return sum
def part_two(l):
d = {}
for orbit in l:
d[orbit[1]] = orbit[0]
p1 = 'YOU'
p2 = 'SAN'
d1 = {}
d1[p1] = 0
d2 = {}
d2[p2] = 0
while True:
if p1 in d:
t1 = d[p1]
d1[t1] = d1[p1] + 1
if t1 in d2:
return d1[t1] + d2[t1]
p1 = t1
if p2 in d:
t2 = d[p2]
d2[t2] = d2[p2] + 1
if t2 in d1:
return d1[t2] + d2[t2]
p2 = t2
if p1 not in d and p2 not in d:
return
if __name__ == '__main__':
main() | [
"yao.ding@compass.com"
] | yao.ding@compass.com |
a41a1d0985e9c6ccd90ab996db6283cf92386ea4 | 26c909d5ccf36193a72e9034707b69edbfd67789 | /138_copy_list_with_random_pointer.py | f36b458b137745a6c6b3243e2c6aa71783fc7325 | [] | no_license | zeroohub/leetcode | 39a835476eedea5bf8f434a15efb5e73495209f9 | cfefa073d6c6f664a835b87369dbba0203b91e58 | refs/heads/master | 2020-03-22T03:51:22.247932 | 2019-02-21T11:07:13 | 2019-02-21T11:07:13 | 139,456,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,957 | py | # -*- coding: utf-8 -*-
from data_structure import *
from collections import defaultdict
class Solution(object):
def copyRandomList(self, head):
cache = defaultdict(list)
result_head = new_head = RandomListNode(0)
temp_head = head
while head:
new_node = RandomListNode(head.label)
new_head.next = new_node
new_head = new_head.next
if head.random:
cache[head.random].append(new_node)
head = head.next
head = temp_head
new_head = result_head.next
while head:
if head in cache:
for node in cache[head]:
node.random = new_head
head = head.next
new_head = new_head.next
return result_head.next
class Solution(object):
def __init__(self):
self.cloned = {}
def copyRandomList(self, head):
if not head:
return
if head in self.cloned:
return self.cloned[head]
node = RandomListNode(head.label)
self.cloned[head] = node
node.next = self.copyRandomList(head.next)
node.random = self.copyRandomList(head.random)
return node
class Solution(object):
def copyRandomList(self, head):
if not head:
return None
temp_head = head
while head:
node = RandomListNode(head.label)
node.next = head.next
head.next = node
head = node.next
head = temp_head
while head:
if head.random:
node = head.next
node.random = head.random.next
head = head.next.next
head = temp_head
new_head = head.next
while head:
node = head.next
head.next = node.next
head = head.next
node.next = head.next if head else None
return new_head
| [
"spamzero@yeah.net"
] | spamzero@yeah.net |
0370c61edcc25acc8cac391cd63b6b9f324944bc | 877e4821c6c62eb4017f7652c25a03342c8c4884 | /cbs_whitelist/white_list_sort.py | 41ebbdf34f445ccb664540d4a7c01a4fdcc8225e | [
"MIT"
] | permissive | ForrestLi/py_strategy | f1380dda26174220584744b5b47b014ecd3f6e1c | dab2b8afb9d9577219d4571cb36b408a5d82fee8 | refs/heads/main | 2023-01-19T19:43:06.931568 | 2020-11-22T04:08:07 | 2020-11-22T04:08:07 | 314,944,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,105 | py | '''
Created on Nov 15, 2020
@author: Forrest Li
'''
import statistics
import operator
cbs_ch_d={'XHKG:02233': 'NAN', '000048': ['23.16', '33.14'], 'XHKG:00613': 'NAN', '600570': ['21.16', '56.91'], '300122': ['41.66', '69.49'], '600031': ['23.93', '35.12'], '600764': ['47.9', '66.7'], 'XHKG:00119': 'NAN', '600516': ['33.94', '81.85'], 'XHKG:00743': 'NAN', '600587': ['22.79', '21.55'], '600745': ['23.86', '34.79'], '300016': ['35.97', '73.97'], '005670': 'NAN', '300308': ['37.01', '37.58'], '002161': ['30.07', '21.92'], 'XHKG:00124': 'NAN', '601100': ['30.38', '52.41'], '000672': ['28.5', '58.15'], '600801': ['35.39', '51.68'], '000885': ['41.62', '41.83'], '300107': ['52.47', '75.52'], '000830': ['22.65', '47.49'], '000567': [], '300276': ['25.36', '20.35'], '601003': ['21.36', '59.57'], '000779': ['33.13', '73.13'], '300132': ['45.81', '67.9'], '002611': ['42.79', '49.76'], '003960': 'NAN', '300205': ['31.64', '32.53'], '000961': ['13.99', '12.32'], '002016': ['33.95', '71.7'], '600673': ['15.1', '54.65'], '300123': ['18.56', '24.44'], 'XHKG:01918': 'NAN', '600215': ['15.36', '19.48'], '300226': ['27.69', '28.08'], '000025': ['41.21', '46.01'], '200025': 'NAN', '600753': ['45.58', '74.91'], '600781': ['66.63', '52.87'], '006580': 'NAN', '000560': ['37.08', '19.07'], '600282': ['26.16', '56.48'], '001390': 'NAN', '600731': ['25.5', '38.01'], '600702': ['31.84', '42.31'], '600853': ['18.45', '23.47'], '600456': ['24.22', '18.51'], '000789': ['44.15', '60.38'], '600782': ['28.45', '65.21'], '600768': ['44.52', '67.37'], '600728': ['35.42', '37.17'], '600287': ['36.43', '37.36'], 'XHKG:03347': 'NAN', '300347': ['61.82', '64.9'], '600328': ['24.89', '42.53'], '002299': ['47.35', '26.75'], '600160': ['41.09', '63.12'], '002097': ['21.39', '21.19'], '600250': ['33.3', '53.93'], '002182': ['38.98', '32.55'], '600810': ['31.29', '31.99'], '600985': ['47.8', '56.07'], '000736': ['26.86', '37.18'], '600260': ['29.08', '41.76'], '000705': ['37.03', '36.02'], '002190': ['47.1', '23.95'], '300012': ['60.36', '43.97'], '000795': ['42.87', '53.19'], '002135': ['16.19', '22.94'], '600512': ['39.56', '42.93'], '002214': ['35.66', '31.01'], '002189': ['30.45', '42.74'], '601225': ['39.04', '61.6'], '600295': ['22.91', '28.77'], '002632': ['53.6', '58.71'], '600368': ['24.01', '29.95'], '600585': ['60.92', '79.78', '93.27', '94.96', '94.23', '92.99', '94.27', '94.96', '95.91', '95.35', '94.23'], 'XHKG:00914': 'NAN', '300236': ['50.0', '51.88'], '002384': ['27.96', '30.85'], '000906': ['44.38', '43.21'], '000757': ['45.78', '48.67'], '002458': ['71.16', '17.3'], '000656': ['31.86', '27.17'], '002746': ['81.96', '45.75'], '600132': ['44.67', '67.15'], '002475': ['57.46', '52.46'], '002127': ['80.32', '83.12'], '300198': ['44.22', '31.08'], '000661': ['74.81', '74.61'], '000061': ['19.86', '18.25'], '600466': ['28.56', '27.27'], '300285': ['66.58', '60.75'], 'XHKG:00581': 'NAN', '600052': ['51.1', '39.34'], 'XHKG:02007': 'NAN', '600763': ['79.52', '89.5'], '600846': ['37.83', '34.97'], '002605': ['49.03', '45.62'], '600559': ['54.97', '58.16'], '002541': ['32.31', '29.71'], '002599': ['28.02', '43.79'], '003230': 'NAN', '002080': ['27.99', '31.06'], '002175': ['31.61', '14.67'], '002088': ['50.18', '68.45'], '002099': ['49.83', '57.01'], '600809': ['63.97', '64.01'], '003090': 'NAN', 'XHKG:02382': 'NAN', '002057': ['58.7', '69.33'], '601016': ['21.87', '21.96'], '002648': ['39.41', '65.58'], '002371': ['31.99', '27.72'], '002601': ['35.5', '62.81'], '601012': ['70.69', '71.27', '54.99', '74.49', '74.83', '68.96', '71.78', '74.49', '79.38', '76.62', '74.83'], '002438': ['30.26', '25.32'], '000682': ['44.5', '61.03'], '000951': ['26.76', '39.34'], '600567': ['28.96', '49.5'], '300232': ['65.25', '54.41'], '601058': ['31.69', '30.09'], '002645': ['48.98', '56.85'], '300316': ['59.66', '50.1'], '002332': ['40.49', '47.3'], '600426': ['45.6', '48.35'], '300014': ['53.77', '45.64'], 'XHKG:00512': 'NAN', '600277': ['32.42', '43.74'], 'XHKG:00535': 'NAN', 'XHKG:01813': 'NAN', '600486': ['54.24', '63.15'], 'XHKG:01169': 'NAN', '300200': ['56.88', '38.92'], '002461': ['29.95', '32.58'], '601888': ['85.28', '89.36'], '600436': ['80.02', '83.23'], '601677': ['45.34', '45.08'], '002402': ['60.34', '69.64'], '601588': ['24.99', '28.62'], 'XHKG:00588': 'NAN', 'XHKG:01600': 'NAN', '600668': ['53.7', '58.7'], '000596': ['61.41', '68.71'], '200596': 'NAN', '600325': ['25.71', '21.93'], '000537': ['30.11', '35.02'], '000858': ['79.34', '83.35', '85.32', '85.99', '88.15', '87.29', '90.6', '85.99', '88.12', '88.33', '88.15'], 'XHKG:00189': 'NAN', '300003': ['64.23', '57.53'], '002439': ['65.43', '69.0'], '600956': ['22.93', '28.07'], 'XHKG:00956': 'NAN', '300137': ['64.74', '76.57'], '600519': ['88.47', '92.81', '93.62', '80.41', '84.69', '96.79', '98.03', '80.41', '85.57', '82.16', '84.69'], '002600': ['32.53', '59.04'], '300038': ['39.96', '56.68'], '300184': ['38.12', '53.07'], '002602': ['63.5', '62.3'], '601318': [], 'XHKG:02318': 'NAN', '600491': ['21.32', '20.39'], 'XHKG:01098': 'NAN', '000636': ['23.61', '37.75'], '600452': ['58.09', '52.72'], '600507': ['55.06', '70.62'], '002507': ['78.79', '78.02'], '300088': ['48.27', '49.36'], '300015': ['77.21', '77.58'], '300059': [], '000756': ['32.64', '42.06'], 'XHKG:00719': 'NAN', '300357': ['89.9', '88.82'], '600161': ['43.62', '88.83'], '000568': ['83.07', '85.29'], '601601': [], 'XHKG:02601': 'NAN', '002110': ['67.63', '83.69'], '600309': ['48.9', '60.65'], '002373': ['63.26', '66.35'], 'XHKG:00881': 'NAN', '002511': ['57.81', '58.14'], '002714': ['65.28', '68.74', '31.49', '77.49', '82.17', '31.29', '52.5', '77.49', '85.26', '84.98', '82.17'], '002035': ['73.36', '71.48'], 'XHKG:00700': 'NAN', '002020': ['46.72', '52.38'], '002139': ['55.11', '59.23'], '300383': ['54.27', '38.32'], '002262': ['73.35', '77.53'], '002221': ['38.71', '47.8'], '600667': ['36.87', '40.2'], 'XHKG:01061': 'NAN', '600340': ['39.04', '35.91'], 'XHKG:00095': 'NAN', '600577': ['46.66', '54.32'], '600995': ['45.42', '49.26'], 'XHKG:02020': 'NAN', 'XHKG:00384': 'NAN', '300365': ['78.14', '72.77'], '300031': ['69.37', '61.13'], '601799': ['54.28', '53.95'], 'XHKG:00098': 'NAN', '600529': ['62.45', '60.84'], '600276': ['90.39', '87.75', '84.51', '93.19', '92.87', '90.27', '93.66', '93.19', '90.95', '93.1', '92.87'], '600438': ['52.06', '54.93'], '002637': ['44.16', '32.15'], '300021': ['42.48', '37.58'], '600064': ['36.17', '34.9'], '600872': ['62.23', '62.92'], 'XHKG:00240': 'NAN', '601233': ['49.59', '53.53'], '002587': ['69.14', '59.87']}
cbs_hk_d={'02233': ['28.0', '57.27'], '00613': [], '00119': ['21.49', '35.7'], '00743': ['26.99', '44.49'], '00124': ['44.29', '48.62'], '01918': ['33.32', '33.14', '34.84', '36.33', '34.84', '35.29', '42.64', '33.82', '43.64', '43.39', '42.4'], '03347': ['60.69', '63.91'], '00914': ['61.29', '79.42'], '00581': ['42.23', '80.98'], '02007': ['37.56', '41.42'], '02382': ['68.95', '76.27'], '00512': ['35.39', '42.15'], '00535': ['44.58', '38.93'], '01813': ['41.47', '41.88'], '01169': ['69.28', '73.44', '74.06', '81.51', '82.81', '80.45', '79.95', '79.18', '78.67', '81.74', '86.3'], '00588': ['25.8', '29.7'], '01600': ['45.52', '43.17'], '00189': ['51.38', '64.94'], '00956': ['25.71', '31.62'], '02318': [], '01098': [], '00719': ['36.95', '47.78'], '02601': [], '00881': ['47.17', '55.58'], '00700': ['71.39', '75.28', '68.92', '67.65', '70.81', '66.4', '68.82', '65.84', '66.8', '69.83', '67.37'], '01061': ['69.96', '75.16'], '00095': ['39.56', '38.09'], '02020': ['85.47', '87.15', '87.18', '83.36', '80.56', '91.85', '87.29', '89.27', '90.55', '84.66', '86.15'], '00384': ['45.67', '48.77'], '00098': ['45.16', '42.86'], '00240': ['56.65', '56.43']}
cbs_total_d={}
for k,v in cbs_hk_d.items():
if 'NAN' in v:
pass
if isinstance(v,list):
if v!=[]:
print(v)
listv =[]
for i in v:
listv.append(float(i))
if (statistics.mean(listv)>65):
cbs_total_d[k]=statistics.mean(listv)
sorted_x = sorted(cbs_total_d.items(), key=operator.itemgetter(1))
print(sorted_x) | [
"willpowerli@163.com"
] | willpowerli@163.com |
5cb834ff5d5f2ab8c49ef50d3374996233a78f63 | d30d0778a7a37408bed757c6bfd26dbee7b18d66 | /movies/urls.py | 0fa57be7c5d173f5d921125c8ca1df83246110b0 | [
"MIT"
] | permissive | AliAxghar/DjangoRestSimpleJwt | 8a6f33b46106a54fb93ac50edb8d9869aba65e59 | f64e6c5506185da19b90d5301938e016907925ac | refs/heads/main | 2023-06-24T01:21:47.104979 | 2021-07-29T19:26:35 | 2021-07-29T19:26:35 | 390,830,374 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | from django.contrib import admin
from django.urls import path, include
from django.conf.urls import url
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
)
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('api.urls')),
url(r'^health_check/', include('health_check.urls')),
url(r'^api/token/$', TokenObtainPairView.as_view(), name='token_obtain_pair'),
url(r'^api/token/refresh/$', TokenRefreshView.as_view(), name='token_refresh'),
]
| [
"d.ali679asghar@gmail.com"
] | d.ali679asghar@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.