hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1a08401fb30f5417d31f50f1d14aadf818b0ffd5
| 1,056
|
py
|
Python
|
arsenyinfo/src/utils.py
|
cortwave/camera-model-identification
|
b2cbac93308bd6e1bc9d38391f5e97f48da99263
|
[
"BSD-2-Clause"
] | 6
|
2018-02-09T11:40:29.000Z
|
2021-06-14T06:08:50.000Z
|
arsenyinfo/src/utils.py
|
cortwave/camera-model-identification
|
b2cbac93308bd6e1bc9d38391f5e97f48da99263
|
[
"BSD-2-Clause"
] | null | null | null |
arsenyinfo/src/utils.py
|
cortwave/camera-model-identification
|
b2cbac93308bd6e1bc9d38391f5e97f48da99263
|
[
"BSD-2-Clause"
] | 7
|
2018-02-09T11:41:11.000Z
|
2021-06-14T06:08:52.000Z
|
import logging
import subprocess
logging.basicConfig(level=logging.INFO,
format='%(levelname)s: %(name)s: %(message)s (%(asctime)s; %(filename)s:%(lineno)d)',
datefmt="%Y-%m-%d %H:%M:%S", )
logger = logging.getLogger(__name__)
def get_img_attributes(fname):
# ToDo: this should be refactored to be faster
s = subprocess.run([f'identify', '-verbose', f'{fname}'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
s = s.stdout.decode().split('\n')
try:
quality = int(list(filter(lambda x: 'Quality' in x, s))[0].split(': ')[-1])
except Exception:
logger.exception(f'Can not parse {fname} quality')
quality = 0
try:
soft = [x for x in s if 'Software' in x]
if soft:
soft = soft[0].split(': ')[-1].lower()
else:
soft = ''
except Exception:
logger.exception(f'Can not parse {fname} software')
soft = ''
return quality, soft
| 29.333333
| 105
| 0.535038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 273
| 0.258523
|
1a097db7feea5ecc6f10469e09b8bc2bd7a26dae
| 1,292
|
py
|
Python
|
4.logRegression/plot2D.py
|
zhaolongkzz/Machine-Learning
|
8ec62a4d469db125fd45534dc0217af4cbbf603d
|
[
"MIT"
] | null | null | null |
4.logRegression/plot2D.py
|
zhaolongkzz/Machine-Learning
|
8ec62a4d469db125fd45534dc0217af4cbbf603d
|
[
"MIT"
] | null | null | null |
4.logRegression/plot2D.py
|
zhaolongkzz/Machine-Learning
|
8ec62a4d469db125fd45534dc0217af4cbbf603d
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
# !/usr/bin/python3.6
from numpy import *
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import logRegress, regIteration
dataMat,labelMat=logRegress.loadDataSet()
dataArr = array(dataMat)
weights = regIteration.stocGradAscent0(dataArr,labelMat)
n = shape(dataArr)[0] #number of points to create
xcord1 = []; ycord1 = []
xcord2 = []; ycord2 = []
markers =[]
colors =[]
for i in range(n):
if int(labelMat[i])== 1:
xcord1.append(dataArr[i,1]); ycord1.append(dataArr[i,2])
else:
xcord2.append(dataArr[i,1]); ycord2.append(dataArr[i,2])
fig = plt.figure()
ax = fig.add_subplot(111)
#ax.scatter(xcord,ycord, c=colors, s=markers)
type1 = ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
type2 = ax.scatter(xcord2, ycord2, s=30, c='green')
x = arange(-3.0, 3.0, 0.1)
#weights = [-2.9, 0.72, 1.29]
#weights = [-5, 1.09, 1.42]
weights = [13.03822793, 1.32877317, -1.96702074]
weights = [4.12, 0.48, -0.6168]
y = (-weights[0]-weights[1]*x)/weights[2]
type3 = ax.plot(x, y)
#ax.legend([type1, type2, type3], ["Did Not Like", "Liked in Small Doses", "Liked in Large Doses"], loc=2)
#ax.axis([-5000,100000,-2,25])
plt.xlabel('X1')
plt.ylabel('X2')
plt.show()
| 30.761905
| 107
| 0.643189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 338
| 0.26161
|
1a0a2f514672571fd2b6146c24727b30c87165eb
| 2,596
|
py
|
Python
|
talos/distribute/distribute_run.py
|
abhijithneilabraham/talos
|
4f60dbbbedede240a086a7a6cd1e7a2b17db87dd
|
[
"MIT"
] | null | null | null |
talos/distribute/distribute_run.py
|
abhijithneilabraham/talos
|
4f60dbbbedede240a086a7a6cd1e7a2b17db87dd
|
[
"MIT"
] | null | null | null |
talos/distribute/distribute_run.py
|
abhijithneilabraham/talos
|
4f60dbbbedede240a086a7a6cd1e7a2b17db87dd
|
[
"MIT"
] | null | null | null |
import json
import threading
from .distribute_params import run_scan_with_split_params
from .distribute_utils import return_current_machine_id, ssh_connect, ssh_file_transfer, ssh_run
from .distribute_database import update_db
def run_central_machine(self, n_splits, run_central_node):
'''
Parameters
----------
params | `dict` | hyperparameter options
Returns
-------
None.
'''
# runs the experiment in central machine
machine_id = 0
run_scan_with_split_params(self, n_splits, run_central_node, machine_id)
def distribute_run(self):
'''
Parameters
----------
run_central_machine | `bool` |The default is False.
db_machine_id | `int` | The default is 0. Indicates the centralised store
where the data gets merged.
Returns
-------
None.
'''
# run the Scan script in distributed machines
config = self.config_data
if 'run_central_node' in config.keys():
run_central_node = config['run_central_node']
else:
run_central_node = False
update_db_n_seconds = 5
if 'DB_UPDATE_INTERVAL' in config['database'].keys():
update_db_n_seconds = int(config['database']['DB_UPDATE_INTERVAL'])
n_splits = len(config['machines'])
if run_central_node:
n_splits += 1
current_machine_id = str(return_current_machine_id(self))
if current_machine_id == str(0):
clients = ssh_connect(self)
for machine_id, client in clients.items():
new_config = config
new_config['current_machine_id'] = machine_id
with open('tmp/remote_config.json', 'w') as outfile:
json.dump(new_config, outfile)
ssh_file_transfer(self, client, machine_id)
threads = []
if run_central_node:
t = threading.Thread(
target=run_central_machine,
args=(self, n_splits, run_central_node),
)
t.start()
threads.append(t)
t = threading.Thread(
target=update_db,
args=([self, update_db_n_seconds, current_machine_id]),
)
t.start()
threads.append(t)
for machine_id, client in clients.items():
t = threading.Thread(
target=ssh_run,
args=(self,
client,
machine_id,
),
)
t.start()
threads.append(t)
for t in threads:
t.join()
| 24.961538
| 96
| 0.582049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 633
| 0.243837
|
1a0b60342365dfb5d7137cd8463e182aeaeff08e
| 8,063
|
py
|
Python
|
src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2020-09-10T22:09:45.000Z
|
2021-12-24T17:00:07.000Z
|
src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/database_management/models/sql_tuning_advisor_task_summary_finding_counts.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class SqlTuningAdvisorTaskSummaryFindingCounts(object):
"""
The finding counts data for the SQL Tuning Advisor summary report.
"""
def __init__(self, **kwargs):
"""
Initializes a new SqlTuningAdvisorTaskSummaryFindingCounts object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param recommended_sql_profile:
The value to assign to the recommended_sql_profile property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type recommended_sql_profile: int
:param implemented_sql_profile:
The value to assign to the implemented_sql_profile property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type implemented_sql_profile: int
:param index:
The value to assign to the index property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type index: int
:param restructure:
The value to assign to the restructure property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type restructure: int
:param statistics:
The value to assign to the statistics property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type statistics: int
:param alternate_plan:
The value to assign to the alternate_plan property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type alternate_plan: int
"""
self.swagger_types = {
'recommended_sql_profile': 'int',
'implemented_sql_profile': 'int',
'index': 'int',
'restructure': 'int',
'statistics': 'int',
'alternate_plan': 'int'
}
self.attribute_map = {
'recommended_sql_profile': 'recommendedSqlProfile',
'implemented_sql_profile': 'implementedSqlProfile',
'index': 'index',
'restructure': 'restructure',
'statistics': 'statistics',
'alternate_plan': 'alternatePlan'
}
self._recommended_sql_profile = None
self._implemented_sql_profile = None
self._index = None
self._restructure = None
self._statistics = None
self._alternate_plan = None
@property
def recommended_sql_profile(self):
"""
**[Required]** Gets the recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with recommended SQL profiles.
:return: The recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._recommended_sql_profile
@recommended_sql_profile.setter
def recommended_sql_profile(self, recommended_sql_profile):
"""
Sets the recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with recommended SQL profiles.
:param recommended_sql_profile: The recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._recommended_sql_profile = recommended_sql_profile
@property
def implemented_sql_profile(self):
"""
**[Required]** Gets the implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with implemented SQL profiles.
:return: The implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._implemented_sql_profile
@implemented_sql_profile.setter
def implemented_sql_profile(self, implemented_sql_profile):
"""
Sets the implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with implemented SQL profiles.
:param implemented_sql_profile: The implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._implemented_sql_profile = implemented_sql_profile
@property
def index(self):
"""
**[Required]** Gets the index of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with index recommendations.
:return: The index of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._index
@index.setter
def index(self, index):
"""
Sets the index of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with index recommendations.
:param index: The index of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._index = index
@property
def restructure(self):
"""
**[Required]** Gets the restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with restructure SQL recommendations.
:return: The restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._restructure
@restructure.setter
def restructure(self, restructure):
"""
Sets the restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with restructure SQL recommendations.
:param restructure: The restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._restructure = restructure
@property
def statistics(self):
"""
**[Required]** Gets the statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with stale/missing optimizer statistics recommendations.
:return: The statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._statistics
@statistics.setter
def statistics(self, statistics):
"""
Sets the statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with stale/missing optimizer statistics recommendations.
:param statistics: The statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._statistics = statistics
@property
def alternate_plan(self):
"""
**[Required]** Gets the alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with alternative plan recommendations.
:return: The alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._alternate_plan
@alternate_plan.setter
def alternate_plan(self, alternate_plan):
"""
Sets the alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with alternative plan recommendations.
:param alternate_plan: The alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._alternate_plan = alternate_plan
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 35.676991
| 245
| 0.693538
| 7,523
| 0.933027
| 0
| 0
| 7,553
| 0.936748
| 0
| 0
| 5,692
| 0.705941
|
1a0bb86d8ca4b904367d6aab9aa4d773f2edd6c4
| 2,220
|
py
|
Python
|
main.py
|
Araime/wine
|
3ab07d38b4321475ec6daf50e5d52c474fcd14cc
|
[
"MIT"
] | null | null | null |
main.py
|
Araime/wine
|
3ab07d38b4321475ec6daf50e5d52c474fcd14cc
|
[
"MIT"
] | null | null | null |
main.py
|
Araime/wine
|
3ab07d38b4321475ec6daf50e5d52c474fcd14cc
|
[
"MIT"
] | null | null | null |
import datetime
import pandas
import collections
import argparse
from collections import OrderedDict
from jinja2 import Environment, FileSystemLoader, select_autoescape
from http.server import HTTPServer, SimpleHTTPRequestHandler
def get_age(foundation_year):
today = datetime.datetime.now()
current_year = today.year
age_of_the_winery = current_year - foundation_year
return age_of_the_winery
def get_years_caption(age):
if age % 10 == 1 and age != 11 and age % 100 != 11:
word = 'год'
elif 1 < age % 10 <= 4 and age != 12 and age != 13 and age != 14:
word = 'года'
else:
word = 'лет'
return word
def get_ordered_wines(path_to_file):
wines_from_file = pandas.read_excel(
path_to_file,
sheet_name='Лист1',
na_values=' ',
keep_default_na=False
).to_dict(orient='records')
products = collections.defaultdict(list)
for wine in wines_from_file:
key = wine.get('Категория')
products[key].append(wine)
ordered_products = OrderedDict(sorted(products.items()))
return ordered_products
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Программа принимает файл в формате xlsx'
' для отображения на сайте', )
parser.add_argument('file_path', help='Необходимо в качестве аргумента при запуске указать'
' полный путь к файлу')
args = parser.parse_args()
path_to_file = args.file_path
foundation_year = 1920
age = get_age(foundation_year)
age_in_years = get_years_caption(age)
age_label = f'Уже {age} {age_in_years} с вами'
ordered_wines = get_ordered_wines(path_to_file)
env = Environment(
loader=FileSystemLoader('.'),
autoescape=select_autoescape(['html', 'xml'])
)
template = env.get_template('template.html')
rendered_page = template.render(
age_label=age_label,
ordered_wines=ordered_wines
)
with open('index.html', 'w', encoding='utf8') as file:
file.write(rendered_page)
server = HTTPServer(('0.0.0.0', 8000), SimpleHTTPRequestHandler)
server.serve_forever()
| 29.6
| 95
| 0.660811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 446
| 0.188743
|
1a0c55e81de2d1138689ce8a17aa9a9f32891ab7
| 2,703
|
py
|
Python
|
src/genie/libs/parser/junos/tests/ShowOspfStatistics/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/junos/tests/ShowOspfStatistics/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/junos/tests/ShowOspfStatistics/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"ospf-statistics-information": {
"ospf-statistics": {
"dbds-retransmit": "203656",
"dbds-retransmit-5seconds": "0",
"flood-queue-depth": "0",
"lsas-acknowledged": "225554974",
"lsas-acknowledged-5seconds": "0",
"lsas-flooded": "66582263",
"lsas-flooded-5seconds": "0",
"lsas-high-prio-flooded": "375568998",
"lsas-high-prio-flooded-5seconds": "0",
"lsas-nbr-transmit": "3423982",
"lsas-nbr-transmit-5seconds": "0",
"lsas-requested": "3517",
"lsas-requested-5seconds": "0",
"lsas-retransmit": "8064643",
"lsas-retransmit-5seconds": "0",
"ospf-errors": {
"subnet-mismatch-error": "12"
},
"packet-statistics": [
{
"ospf-packet-type": "Hello",
"packets-received": "5703920",
"packets-received-5seconds": "3",
"packets-sent": "6202169",
"packets-sent-5seconds": "0"
},
{
"ospf-packet-type": "DbD",
"packets-received": "185459",
"packets-received-5seconds": "0",
"packets-sent": "212983",
"packets-sent-5seconds": "0"
},
{
"ospf-packet-type": "LSReq",
"packets-received": "208",
"packets-received-5seconds": "0",
"packets-sent": "214",
"packets-sent-5seconds": "0"
},
{
"ospf-packet-type": "LSUpdate",
"packets-received": "16742100",
"packets-received-5seconds": "0",
"packets-sent": "15671465",
"packets-sent-5seconds": "0"
},
{
"ospf-packet-type": "LSAck",
"packets-received": "2964236",
"packets-received-5seconds": "0",
"packets-sent": "5229203",
"packets-sent-5seconds": "0"
}
],
"total-database-summaries": "0",
"total-linkstate-request": "0",
"total-retransmits": "0"
}
}
}
| 41.584615
| 57
| 0.371439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,254
| 0.463929
|
1a0cfe1974d3fead0e36d406bfbe33d55d632379
| 6,981
|
py
|
Python
|
marc_5gempower/run_5gempower.py
|
arled-papa/marc
|
cb94636d786e215195e914b37131277f835bcf52
|
[
"Apache-2.0"
] | 1
|
2021-11-30T00:07:28.000Z
|
2021-11-30T00:07:28.000Z
|
marc_5gempower/run_5gempower.py
|
arled-papa/marc
|
cb94636d786e215195e914b37131277f835bcf52
|
[
"Apache-2.0"
] | null | null | null |
marc_5gempower/run_5gempower.py
|
arled-papa/marc
|
cb94636d786e215195e914b37131277f835bcf52
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright (c) 2021 Arled Papa
# Author: Arled Papa <arled.papa@tum.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from marc_5gempower.agent_func import empower_agent
import marc_5gempower.measurement_helper_func.measurement_func as util_stats_report
import time
import asyncio
import aiomultiprocess as mp
controller_ip = "Your Controller's PC IP" # Place your own controller ip
controller_port = 2210 # Typical 5G-EmPOWER controller port
measurement_time = 600 # Place your own measurement time (currently 10 min)
"""
This dictionary stores all the agent ids that are recognized by the 5G-EmPOWER controller.
Since the controller only accepts agents that registered at the database this has to be done beforehand
Currently 100 agent ids are registered as follows
"""
agntMAC = {0: b'\x00\x00\x00\x00\x00\x00', 1: b'\x00\x00\x00\x00\x00\x01', 2: b'\x00\x00\x00\x00\x00\x02',
3: b'\x00\x00\x00\x00\x00\x03', 4: b'\x00\x00\x00\x00\x00\x04', 5: b'\x00\x00\x00\x00\x00\x05',
6: b'\x00\x00\x00\x00\x00\x06', 7: b'\x00\x00\x00\x00\x00\x07', 8: b'\x00\x00\x00\x00\x00\x08',
9: b'\x00\x00\x00\x00\x01\x00', 10: b'\x00\x00\x00\x00\x01\x01', 11: b'\x00\x00\x00\x00\x01\x02',
12: b'\x00\x00\x00\x00\x01\x03', 13: b'\x00\x00\x00\x00\x01\x04', 14: b'\x00\x00\x00\x00\x01\x05',
15: b'\x00\x00\x00\x00\x01\x06', 16: b'\x00\x00\x00\x00\x01\x07', 17: b'\x00\x00\x00\x00\x01\x08',
18: b'\x00\x00\x00\x00\x02\x00', 19: b'\x00\x00\x00\x00\x02\x01', 20: b'\x00\x00\x00\x00\x02\x02',
21: b'\x00\x00\x00\x00\x02\x03', 22: b'\x00\x00\x00\x00\x02\x04', 23: b'\x00\x00\x00\x00\x02\x05',
24: b'\x00\x00\x00\x00\x02\x06', 25: b'\x00\x00\x00\x00\x02\x07', 26: b'\x00\x00\x00\x00\x02\x08',
27: b'\x00\x00\x00\x00\x03\x00', 28: b'\x00\x00\x00\x00\x03\x01', 29: b'\x00\x00\x00\x00\x03\x02',
30: b'\x00\x00\x00\x00\x03\x03', 31: b'\x00\x00\x00\x00\x03\x04', 32: b'\x00\x00\x00\x00\x03\x05',
33: b'\x00\x00\x00\x00\x03\x06', 34: b'\x00\x00\x00\x00\x03\x07', 35: b'\x00\x00\x00\x00\x03\x08',
36: b'\x00\x00\x00\x00\x04\x00', 37: b'\x00\x00\x00\x00\x04\x01', 38: b'\x00\x00\x00\x00\x04\x02',
39: b'\x00\x00\x00\x00\x04\x03', 40: b'\x00\x00\x00\x00\x04\x04', 41: b'\x00\x00\x00\x00\x04\x05',
42: b'\x00\x00\x00\x00\x04\x06', 43: b'\x00\x00\x00\x00\x04\x07', 44: b'\x00\x00\x00\x00\x04\x08',
45: b'\x00\x00\x00\x00\x05\x00', 46: b'\x00\x00\x00\x00\x05\x01', 47: b'\x00\x00\x00\x00\x05\x02',
48: b'\x00\x00\x00\x00\x05\x03', 49: b'\x00\x00\x00\x00\x05\x04', 50: b'\x00\x00\x00\x00\x05\x05',
51: b'\x00\x00\x00\x00\x05\x06', 52: b'\x00\x00\x00\x00\x05\x07', 53: b'\x00\x00\x00\x00\x05\x08',
54: b'\x00\x00\x00\x00\x06\x00', 55: b'\x00\x00\x00\x00\x06\x01', 56: b'\x00\x00\x00\x00\x06\x02',
57: b'\x00\x00\x00\x00\x06\x03', 58: b'\x00\x00\x00\x00\x06\x04', 59: b'\x00\x00\x00\x00\x06\x05',
60: b'\x00\x00\x00\x00\x06\x06', 61: b'\x00\x00\x00\x00\x06\x07', 62: b'\x00\x00\x00\x00\x06\x08',
63: b'\x00\x00\x00\x00\x07\x00', 64: b'\x00\x00\x00\x00\x07\x01', 65: b'\x00\x00\x00\x00\x07\x02',
66: b'\x00\x00\x00\x00\x07\x03', 67: b'\x00\x00\x00\x00\x07\x04', 68: b'\x00\x00\x00\x00\x07\x05',
69: b'\x00\x00\x00\x00\x07\x06', 70: b'\x00\x00\x00\x00\x07\x07', 71: b'\x00\x00\x00\x00\x07\x08',
72: b'\x00\x00\x00\x00\x08\x00', 73: b'\x00\x00\x00\x00\x08\x01', 74: b'\x00\x00\x00\x00\x08\x02',
75: b'\x00\x00\x00\x00\x08\x03', 76: b'\x00\x00\x00\x00\x08\x04', 77: b'\x00\x00\x00\x00\x08\x05',
78: b'\x00\x00\x00\x00\x08\x06', 79: b'\x00\x00\x00\x00\x08\x07', 80: b'\x00\x00\x00\x00\x08\x08',
81: b'\x00\x00\x00\x01\x00\x00', 82: b'\x00\x00\x00\x01\x00\x01', 83: b'\x00\x00\x00\x01\x00\x02',
84: b'\x00\x00\x00\x01\x00\x03', 85: b'\x00\x00\x00\x01\x00\x04', 86: b'\x00\x00\x00\x01\x00\x05',
87: b'\x00\x00\x00\x01\x00\x06', 88: b'\x00\x00\x00\x01\x00\x07', 89: b'\x00\x00\x00\x01\x00\x08',
90: b'\x00\x00\x00\x01\x01\x00', 91: b'\x00\x00\x00\x01\x01\x01', 92: b'\x00\x00\x00\x01\x01\x02',
93: b'\x00\x00\x00\x01\x01\x03', 94: b'\x00\x00\x00\x01\x01\x04', 95: b'\x00\x00\x00\x01\x01\x05',
96: b'\x00\x00\x00\x01\x01\x06', 97: b'\x00\x00\x00\x01\x01\x07', 98: b'\x00\x00\x00\x01\x01\x08',
99: b'\x00\x00\x00\x01\x02\x00'}
# Function that terminates all processes once the measurement time has been finalized
def terminate_all_processes(processes):
target_time = time.time() + measurement_time
while time.time() < target_time:
pass
for proc in processes:
proc.terminate()
"""
Function that initiates the run of 5G-EmPOWER.
Args:
agents: The number of 5G-EmPOWER agents to initiate
users: The number of users per each initiated FlexRAN agent
delay: Binary that indicated if agent related delay measurement are taking place
"""
async def run_5gempower(args=None):
print(args)
processes = []
# Generate the user activations and de-activations from the data plane according to the configuration setup
util_stats_report.update_configuration_file(args["users"])
measure_agent_delay = False
if args["agents"]:
for agnt in range(int(args["agents"])):
if agnt == int(args["agents"]) - 1:
# In case that delay is True measure the initialization delay of the FlexRAN agent
if args["delay"] == "True":
measure_agent_delay = True
print("Start measuring agent's delay")
# Each 5G-EmPOWER agent is initiated as a process targeting the 5G-EmPOWER agent function
task = mp.Process(target=empower_agent.enode_agent, args=(controller_ip, controller_port, args["users"],
args["agents"], agntMAC[agnt],
measure_agent_delay))
# Append the process to the list of processes
processes.append(task)
# Task initiation
task.start()
time.sleep(0.5)
# Once the measurement has finalized terminate all tasks
terminate_all_processes(processes)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run_5gempower())
loop.close()
| 56.756098
| 116
| 0.627847
| 0
| 0
| 0
| 0
| 0
| 0
| 1,344
| 0.192523
| 4,559
| 0.653058
|
1a0dcd546c9fb9cfb2c22a03b6cf3ce13d629047
| 3,531
|
py
|
Python
|
jina/peapods/peas/gateway/grpc/__init__.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | 1
|
2020-12-23T08:58:49.000Z
|
2020-12-23T08:58:49.000Z
|
jina/peapods/peas/gateway/grpc/__init__.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
jina/peapods/peas/gateway/grpc/__init__.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import argparse
import os
from multiprocessing.synchronize import Event
from typing import Union, Dict
import grpc
import zmq.asyncio
from .async_call import AsyncPrefetchCall
from ... import BasePea
from ....zmq import send_message_async, recv_message_async, _init_socket
from .....enums import SocketType
from .....proto import jina_pb2
from .....proto import jina_pb2_grpc
__all__ = ['GatewayPea']
class GatewayPea(BasePea):
def __init__(self,
args: Union['argparse.Namespace', Dict],
ctrl_addr: str,
ctrl_with_ipc: bool,
**kwargs):
super().__init__(args, **kwargs)
self.ctrl_addr = ctrl_addr
self.ctrl_with_ipc = ctrl_with_ipc
def run(self, is_ready_event: 'Event'):
"""Do NOT override this method when inheriting from :class:`GatewayPea`"""
try:
asyncio.run(self._loop_body(is_ready_event))
except KeyboardInterrupt:
self.logger.info('Loop interrupted by user')
except SystemError as ex:
self.logger.error(f'SystemError interrupted pea loop {repr(ex)}')
except Exception as ex:
self.logger.critical(f'unknown exception: {repr(ex)}', exc_info=True)
finally:
self._teardown()
async def _wait_for_shutdown(self):
"""Do NOT override this method when inheriting from :class:`GatewayPea`"""
with zmq.asyncio.Context() as ctx, \
_init_socket(ctx, self.ctrl_addr, None, SocketType.PAIR_BIND, use_ipc=True)[0] as sock:
msg = await recv_message_async(sock)
if msg.request.command == 'TERMINATE':
msg.envelope.status.code = jina_pb2.StatusProto.SUCCESS
await self.serve_terminate()
await send_message_async(sock, msg)
async def serve_terminate(self):
"""Shutdown the server with async interface
This method needs to be overridden when inherited from :class:`GatewayPea`
"""
await self.server.stop(0)
async def serve_forever(self, is_ready_event: 'Event'):
"""Serve an async service forever
This method needs to be overridden when inherited from :class:`GatewayPea`
"""
if not self.args.proxy and os.name != 'nt':
os.unsetenv('http_proxy')
os.unsetenv('https_proxy')
self.server = grpc.aio.server(options=[('grpc.max_send_message_length', self.args.max_message_size),
('grpc.max_receive_message_length', self.args.max_message_size)])
jina_pb2_grpc.add_JinaRPCServicer_to_server(AsyncPrefetchCall(self.args), self.server)
bind_addr = f'{self.args.host}:{self.args.port_expose}'
self.server.add_insecure_port(bind_addr)
await self.server.start()
self.logger.success(f'{self.__class__.__name__} is listening at: {bind_addr}')
# TODO: proper handling of set_ready
is_ready_event.set()
await self.server.wait_for_termination()
async def _loop_body(self, is_ready_event: 'Event'):
"""Do NOT override this method when inheriting from :class:`GatewayPea`"""
try:
await asyncio.gather(self.serve_forever(is_ready_event), self._wait_for_shutdown())
except asyncio.CancelledError:
self.logger.warning('received terminate ctrl message from main process')
await self.serve_terminate()
def __enter__(self):
return self
| 38.802198
| 112
| 0.652223
| 3,109
| 0.880487
| 0
| 0
| 0
| 0
| 2,152
| 0.609459
| 937
| 0.265364
|
1a0ddf6aed80f212b94b5faabe9879bd5b5f6957
| 895
|
py
|
Python
|
Spell Compendium/scr/Spell1059 - Improvisation.py
|
Sagenlicht/ToEE_Mods
|
a4b07f300df6067f834e09fcbc4c788f1f4e417b
|
[
"MIT"
] | 1
|
2021-04-26T08:03:56.000Z
|
2021-04-26T08:03:56.000Z
|
Spell Compendium/scr/Spell1059 - Improvisation.py
|
Sagenlicht/ToEE_Mods
|
a4b07f300df6067f834e09fcbc4c788f1f4e417b
|
[
"MIT"
] | 2
|
2021-06-11T05:55:01.000Z
|
2021-08-03T23:41:02.000Z
|
Spell Compendium/scr/Spell1059 - Improvisation.py
|
Sagenlicht/ToEE_Mods
|
a4b07f300df6067f834e09fcbc4c788f1f4e417b
|
[
"MIT"
] | 1
|
2021-05-17T15:37:58.000Z
|
2021-05-17T15:37:58.000Z
|
from toee import *
def OnBeginSpellCast(spell):
print "Improvisation OnBeginSpellCast"
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
def OnSpellEffect(spell):
print "Improvisation OnSpellEffect"
spell.duration = spell.caster_level #1 round/cl
spellTarget = spell.target_list[0]
bonusPool = spell.caster_level * 2 #Luck Pool is twice casterlevel
bonusToAdd = spell.caster_level/2 #single bonus cannot exeed half casterlevel
spellTarget.obj.condition_add_with_args('sp-Improvisation', spell.id, spell.duration, bonusToAdd, bonusPool, 0, 0, 0)
spellTarget.partsys_id = game.particles('sp-Heroism', spellTarget.obj)
spell.spell_end(spell.id)
def OnBeginRound(spell):
print "Improvisation OnBeginRound"
def OnEndSpellCast(spell):
print "Improvisation OnEndSpellCast"
| 35.8
| 121
| 0.755307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 286
| 0.319553
|
1a0ee9a3148043007875afdc8ae0b227516a59d4
| 131,586
|
py
|
Python
|
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import admin_group_lists
import auto_bandwidth
class secondary_path(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls-operational - based on the path /mpls-state/lsp/secondary-path. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MPLS LSP secondary path information
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__path_name','__instance_id','__path_up','__active','__committed','__is_new_instance','__is_current_secondary','__is_selected_secondary','__config_adaptive','__config_reoptimize_timer_configured','__config_reoptimize_timer','__config_reoptimize_timer_count','__config_tspec_mtu_configured','__config_tspec_mtu','__config_cos_configured','__config_cos','__config_mtu_configured','__config_mtu','__config_tie_breaking_configured','__config_tie_break_random','__config_tie_break_least_fill','__config_tie_break_most_fill','__config_cspf_disabled','__config_rro_disabled','__config_hot_standby','__config_pinned','__config_persistent','__config_soft_prempt','__config_priority_configured','__config_setup_prority','__config_holding_prority','__config_hop_limit_configured','__config_hop_limit','__config_traffic_eng_rate_configured','__config_traffic_eng_mean_rate','__config_traffic_eng_max_rate','__config_traffic_eng_max_burst','__config_abw_configured','__config_cspf_computation_mode','__config_admin_group_configured','__admin_group_lists','__auto_bandwidth',)
_yang_name = 'secondary-path'
_rest_name = 'secondary-path'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__path_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cos = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__committed = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__instance_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_persistent = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_reoptimize_timer_count = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__auto_bandwidth = YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
self.__is_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
self.__config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__admin_group_lists = YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_priority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__path_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__config_abw_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_rro_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-state', u'lsp', u'secondary-path']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'mpls-state', u'lsp', u'secondary-path']
def _get_path_name(self):
"""
Getter method for path_name, mapped from YANG variable /mpls_state/lsp/secondary_path/path_name (string)
YANG Description: lsp_sec_path_path_name
"""
return self.__path_name
def _set_path_name(self, v, load=False):
"""
Setter method for path_name, mapped from YANG variable /mpls_state/lsp/secondary_path/path_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_path_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_path_name() directly.
YANG Description: lsp_sec_path_path_name
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """path_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__path_name = t
if hasattr(self, '_set'):
self._set()
def _unset_path_name(self):
self.__path_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
def _get_instance_id(self):
"""
Getter method for instance_id, mapped from YANG variable /mpls_state/lsp/secondary_path/instance_id (boolean)
YANG Description: lsp_sec_path_instance_id
"""
return self.__instance_id
def _set_instance_id(self, v, load=False):
"""
Setter method for instance_id, mapped from YANG variable /mpls_state/lsp/secondary_path/instance_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance_id() directly.
YANG Description: lsp_sec_path_instance_id
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """instance_id must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__instance_id = t
if hasattr(self, '_set'):
self._set()
def _unset_instance_id(self):
self.__instance_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_path_up(self):
"""
Getter method for path_up, mapped from YANG variable /mpls_state/lsp/secondary_path/path_up (boolean)
YANG Description: lsp_sec_path_up
"""
return self.__path_up
def _set_path_up(self, v, load=False):
"""
Setter method for path_up, mapped from YANG variable /mpls_state/lsp/secondary_path/path_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_path_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_path_up() directly.
YANG Description: lsp_sec_path_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """path_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__path_up = t
if hasattr(self, '_set'):
self._set()
def _unset_path_up(self):
self.__path_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /mpls_state/lsp/secondary_path/active (boolean)
YANG Description: lsp_sec_path_active
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /mpls_state/lsp/secondary_path/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: lsp_sec_path_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__active = t
if hasattr(self, '_set'):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_committed(self):
"""
Getter method for committed, mapped from YANG variable /mpls_state/lsp/secondary_path/committed (boolean)
YANG Description: lsp_sec_path_committed
"""
return self.__committed
def _set_committed(self, v, load=False):
"""
Setter method for committed, mapped from YANG variable /mpls_state/lsp/secondary_path/committed (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_committed is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_committed() directly.
YANG Description: lsp_sec_path_committed
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """committed must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__committed = t
if hasattr(self, '_set'):
self._set()
def _unset_committed(self):
self.__committed = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_new_instance(self):
"""
Getter method for is_new_instance, mapped from YANG variable /mpls_state/lsp/secondary_path/is_new_instance (boolean)
YANG Description: lsp_sec_path_is_new_instance
"""
return self.__is_new_instance
def _set_is_new_instance(self, v, load=False):
"""
Setter method for is_new_instance, mapped from YANG variable /mpls_state/lsp/secondary_path/is_new_instance (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_new_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_new_instance() directly.
YANG Description: lsp_sec_path_is_new_instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_new_instance must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_new_instance = t
if hasattr(self, '_set'):
self._set()
def _unset_is_new_instance(self):
self.__is_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_current_secondary(self):
"""
Getter method for is_current_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_current_secondary (boolean)
YANG Description: lsp_sec_path_is_current_secondary
"""
return self.__is_current_secondary
def _set_is_current_secondary(self, v, load=False):
"""
Setter method for is_current_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_current_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_current_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_current_secondary() directly.
YANG Description: lsp_sec_path_is_current_secondary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_current_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_current_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_is_current_secondary(self):
self.__is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_selected_secondary(self):
"""
Getter method for is_selected_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_selected_secondary (boolean)
YANG Description: lsp_sec_path_is_selected_secondary
"""
return self.__is_selected_secondary
def _set_is_selected_secondary(self, v, load=False):
"""
Setter method for is_selected_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_selected_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_selected_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_selected_secondary() directly.
YANG Description: lsp_sec_path_is_selected_secondary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_selected_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_selected_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_is_selected_secondary(self):
self.__is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_adaptive(self):
"""
Getter method for config_adaptive, mapped from YANG variable /mpls_state/lsp/secondary_path/config_adaptive (boolean)
YANG Description: lsp_sec_path_config_adaptive
"""
return self.__config_adaptive
def _set_config_adaptive(self, v, load=False):
"""
Setter method for config_adaptive, mapped from YANG variable /mpls_state/lsp/secondary_path/config_adaptive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_adaptive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_adaptive() directly.
YANG Description: lsp_sec_path_config_adaptive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_adaptive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_adaptive = t
if hasattr(self, '_set'):
self._set()
def _unset_config_adaptive(self):
self.__config_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_reoptimize_timer_configured(self):
"""
Getter method for config_reoptimize_timer_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_configured (boolean)
YANG Description: lsp_sec_path_config_reoptimize_timer_configured
"""
return self.__config_reoptimize_timer_configured
def _set_config_reoptimize_timer_configured(self, v, load=False):
"""
Setter method for config_reoptimize_timer_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer_configured() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_reoptimize_timer_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer_configured(self):
self.__config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_reoptimize_timer(self):
"""
Getter method for config_reoptimize_timer, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer (uint32)
YANG Description: lsp_sec_path_config_reoptimize_timer
"""
return self.__config_reoptimize_timer
def _set_config_reoptimize_timer(self, v, load=False):
"""
Setter method for config_reoptimize_timer, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_reoptimize_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer(self):
self.__config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_reoptimize_timer_count(self):
"""
Getter method for config_reoptimize_timer_count, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_count (uint32)
YANG Description: lsp_sec_path_config_reoptimize_timer_count
"""
return self.__config_reoptimize_timer_count
def _set_config_reoptimize_timer_count(self, v, load=False):
"""
Setter method for config_reoptimize_timer_count, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_count (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer_count is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer_count() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer_count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer_count must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_reoptimize_timer_count = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer_count(self):
self.__config_reoptimize_timer_count = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_tspec_mtu_configured(self):
"""
Getter method for config_tspec_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu_configured (boolean)
YANG Description: lsp_sec_path_config_tspec_mtu_configured
"""
return self.__config_tspec_mtu_configured
def _set_config_tspec_mtu_configured(self, v, load=False):
"""
Setter method for config_tspec_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tspec_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tspec_mtu_configured() directly.
YANG Description: lsp_sec_path_config_tspec_mtu_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tspec_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tspec_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tspec_mtu_configured(self):
self.__config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tspec_mtu(self):
"""
Getter method for config_tspec_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu (uint32)
YANG Description: lsp_sec_path_config_tspec_mtu
"""
return self.__config_tspec_mtu
def _set_config_tspec_mtu(self, v, load=False):
"""
Setter method for config_tspec_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tspec_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tspec_mtu() directly.
YANG Description: lsp_sec_path_config_tspec_mtu
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tspec_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_tspec_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tspec_mtu(self):
self.__config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_cos_configured(self):
"""
Getter method for config_cos_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos_configured (boolean)
YANG Description: lsp_sec_path_config_cos_configured
"""
return self.__config_cos_configured
def _set_config_cos_configured(self, v, load=False):
"""
Setter method for config_cos_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cos_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cos_configured() directly.
YANG Description: lsp_sec_path_config_cos_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cos_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_cos_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cos_configured(self):
self.__config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cos(self):
"""
Getter method for config_cos, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos (uint32)
YANG Description: lsp_sec_path_config_cos
"""
return self.__config_cos
def _set_config_cos(self, v, load=False):
"""
Setter method for config_cos, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cos() directly.
YANG Description: lsp_sec_path_config_cos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cos must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cos(self):
self.__config_cos = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_mtu_configured(self):
"""
Getter method for config_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu_configured (boolean)
YANG Description: lsp_sec_path_config_mtu_configured
"""
return self.__config_mtu_configured
def _set_config_mtu_configured(self, v, load=False):
"""
Setter method for config_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_mtu_configured() directly.
YANG Description: lsp_sec_path_config_mtu_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_mtu_configured(self):
self.__config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_mtu(self):
"""
Getter method for config_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu (uint32)
YANG Description: lsp_sec_path_config_mtu
"""
return self.__config_mtu
def _set_config_mtu(self, v, load=False):
"""
Setter method for config_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_mtu() directly.
YANG Description: lsp_sec_path_config_mtu
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_config_mtu(self):
self.__config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_tie_breaking_configured(self):
"""
Getter method for config_tie_breaking_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_breaking_configured (boolean)
YANG Description: lsp_sec_path_config_tie_breaking_configured
"""
return self.__config_tie_breaking_configured
def _set_config_tie_breaking_configured(self, v, load=False):
"""
Setter method for config_tie_breaking_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_breaking_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_breaking_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_breaking_configured() directly.
YANG Description: lsp_sec_path_config_tie_breaking_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_breaking_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_breaking_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_breaking_configured(self):
self.__config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_random(self):
"""
Getter method for config_tie_break_random, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_random (boolean)
YANG Description: lsp_sec_path_config_tie_break_random
"""
return self.__config_tie_break_random
def _set_config_tie_break_random(self, v, load=False):
"""
Setter method for config_tie_break_random, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_random (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_random is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_random() directly.
YANG Description: lsp_sec_path_config_tie_break_random
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_random must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_random = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_random(self):
self.__config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_least_fill(self):
"""
Getter method for config_tie_break_least_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_least_fill (boolean)
YANG Description: lsp_sec_path_config_tie_break_least_fill
"""
return self.__config_tie_break_least_fill
def _set_config_tie_break_least_fill(self, v, load=False):
"""
Setter method for config_tie_break_least_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_least_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_least_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_least_fill() directly.
YANG Description: lsp_sec_path_config_tie_break_least_fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_least_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_least_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_least_fill(self):
self.__config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_most_fill(self):
"""
Getter method for config_tie_break_most_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_most_fill (boolean)
YANG Description: lsp_sec_path_config_tie_break_most_fill
"""
return self.__config_tie_break_most_fill
def _set_config_tie_break_most_fill(self, v, load=False):
"""
Setter method for config_tie_break_most_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_most_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_most_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_most_fill() directly.
YANG Description: lsp_sec_path_config_tie_break_most_fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_most_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_most_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_most_fill(self):
self.__config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cspf_disabled(self):
"""
Getter method for config_cspf_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_disabled (boolean)
YANG Description: lsp_sec_path_config_cspf_disabled
"""
return self.__config_cspf_disabled
def _set_config_cspf_disabled(self, v, load=False):
"""
Setter method for config_cspf_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_disabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cspf_disabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cspf_disabled() directly.
YANG Description: lsp_sec_path_config_cspf_disabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cspf_disabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_cspf_disabled = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cspf_disabled(self):
self.__config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_rro_disabled(self):
"""
Getter method for config_rro_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_rro_disabled (boolean)
YANG Description: lsp_sec_path_config_rro_disabled
"""
return self.__config_rro_disabled
def _set_config_rro_disabled(self, v, load=False):
"""
Setter method for config_rro_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_rro_disabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_rro_disabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_rro_disabled() directly.
YANG Description: lsp_sec_path_config_rro_disabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_rro_disabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_rro_disabled = t
if hasattr(self, '_set'):
self._set()
def _unset_config_rro_disabled(self):
self.__config_rro_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_hot_standby(self):
"""
Getter method for config_hot_standby, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hot_standby (boolean)
YANG Description: lsp_sec_path_config_hot_standby
"""
return self.__config_hot_standby
def _set_config_hot_standby(self, v, load=False):
"""
Setter method for config_hot_standby, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hot_standby (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hot_standby is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hot_standby() directly.
YANG Description: lsp_sec_path_config_hot_standby
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hot_standby must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_hot_standby = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hot_standby(self):
self.__config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_pinned(self):
"""
Getter method for config_pinned, mapped from YANG variable /mpls_state/lsp/secondary_path/config_pinned (boolean)
YANG Description: lsp_sec_path_config_pinned
"""
return self.__config_pinned
def _set_config_pinned(self, v, load=False):
"""
Setter method for config_pinned, mapped from YANG variable /mpls_state/lsp/secondary_path/config_pinned (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_pinned is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_pinned() directly.
YANG Description: lsp_sec_path_config_pinned
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_pinned must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_pinned = t
if hasattr(self, '_set'):
self._set()
def _unset_config_pinned(self):
self.__config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_persistent(self):
"""
Getter method for config_persistent, mapped from YANG variable /mpls_state/lsp/secondary_path/config_persistent (boolean)
YANG Description: lsp_sec_path_config_persistent
"""
return self.__config_persistent
def _set_config_persistent(self, v, load=False):
"""
Setter method for config_persistent, mapped from YANG variable /mpls_state/lsp/secondary_path/config_persistent (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_persistent is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_persistent() directly.
YANG Description: lsp_sec_path_config_persistent
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_persistent must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_persistent = t
if hasattr(self, '_set'):
self._set()
def _unset_config_persistent(self):
self.__config_persistent = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_soft_prempt(self):
"""
Getter method for config_soft_prempt, mapped from YANG variable /mpls_state/lsp/secondary_path/config_soft_prempt (boolean)
YANG Description: lsp_sec_path_config_soft_prempt
"""
return self.__config_soft_prempt
def _set_config_soft_prempt(self, v, load=False):
"""
Setter method for config_soft_prempt, mapped from YANG variable /mpls_state/lsp/secondary_path/config_soft_prempt (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_soft_prempt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_soft_prempt() directly.
YANG Description: lsp_sec_path_config_soft_prempt
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_soft_prempt must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_soft_prempt = t
if hasattr(self, '_set'):
self._set()
def _unset_config_soft_prempt(self):
self.__config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_priority_configured(self):
"""
Getter method for config_priority_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_priority_configured (boolean)
YANG Description: lsp_sec_path_config_priority_configured
"""
return self.__config_priority_configured
def _set_config_priority_configured(self, v, load=False):
"""
Setter method for config_priority_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_priority_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_priority_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_priority_configured() directly.
YANG Description: lsp_sec_path_config_priority_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_priority_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_priority_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_priority_configured(self):
self.__config_priority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_setup_prority(self):
"""
Getter method for config_setup_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_setup_prority (uint8)
YANG Description: lsp_sec_path_config_setup_prority
"""
return self.__config_setup_prority
def _set_config_setup_prority(self, v, load=False):
"""
Setter method for config_setup_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_setup_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_setup_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_setup_prority() directly.
YANG Description: lsp_sec_path_config_setup_prority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_setup_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_setup_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_config_setup_prority(self):
self.__config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_holding_prority(self):
"""
Getter method for config_holding_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_holding_prority (uint8)
YANG Description: lsp_sec_path_config_holding_prority
"""
return self.__config_holding_prority
def _set_config_holding_prority(self, v, load=False):
"""
Setter method for config_holding_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_holding_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_holding_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_holding_prority() directly.
YANG Description: lsp_sec_path_config_holding_prority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_holding_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_holding_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_config_holding_prority(self):
self.__config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_hop_limit_configured(self):
"""
Getter method for config_hop_limit_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit_configured (boolean)
YANG Description: lsp_sec_path_config_hop_limit_configured
"""
return self.__config_hop_limit_configured
def _set_config_hop_limit_configured(self, v, load=False):
"""
Setter method for config_hop_limit_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hop_limit_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hop_limit_configured() directly.
YANG Description: lsp_sec_path_config_hop_limit_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hop_limit_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_hop_limit_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hop_limit_configured(self):
self.__config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_hop_limit(self):
"""
Getter method for config_hop_limit, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit (uint8)
YANG Description: lsp_sec_path_config_hop_limit
"""
return self.__config_hop_limit
def _set_config_hop_limit(self, v, load=False):
"""
Setter method for config_hop_limit, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hop_limit() directly.
YANG Description: lsp_sec_path_config_hop_limit
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hop_limit must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_hop_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hop_limit(self):
self.__config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_traffic_eng_rate_configured(self):
"""
Getter method for config_traffic_eng_rate_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_rate_configured (boolean)
YANG Description: lsp_sec_path_config_traffic_eng_rate_configured
"""
return self.__config_traffic_eng_rate_configured
def _set_config_traffic_eng_rate_configured(self, v, load=False):
"""
Setter method for config_traffic_eng_rate_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_rate_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_rate_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_rate_configured() directly.
YANG Description: lsp_sec_path_config_traffic_eng_rate_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_rate_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_traffic_eng_rate_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_rate_configured(self):
self.__config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_traffic_eng_mean_rate(self):
"""
Getter method for config_traffic_eng_mean_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_mean_rate (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_mean_rate
"""
return self.__config_traffic_eng_mean_rate
def _set_config_traffic_eng_mean_rate(self, v, load=False):
"""
Setter method for config_traffic_eng_mean_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_mean_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_mean_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_mean_rate() directly.
YANG Description: lsp_sec_path_config_traffic_eng_mean_rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_mean_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_mean_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_mean_rate(self):
self.__config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_traffic_eng_max_rate(self):
"""
Getter method for config_traffic_eng_max_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_rate (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_max_rate
"""
return self.__config_traffic_eng_max_rate
def _set_config_traffic_eng_max_rate(self, v, load=False):
"""
Setter method for config_traffic_eng_max_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_max_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_max_rate() directly.
YANG Description: lsp_sec_path_config_traffic_eng_max_rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_max_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_max_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_max_rate(self):
self.__config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_traffic_eng_max_burst(self):
"""
Getter method for config_traffic_eng_max_burst, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_burst (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_max_burst
"""
return self.__config_traffic_eng_max_burst
def _set_config_traffic_eng_max_burst(self, v, load=False):
"""
Setter method for config_traffic_eng_max_burst, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_burst (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_max_burst is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_max_burst() directly.
YANG Description: lsp_sec_path_config_traffic_eng_max_burst
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_max_burst must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_max_burst = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_max_burst(self):
self.__config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_abw_configured(self):
"""
Getter method for config_abw_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_abw_configured (boolean)
YANG Description: lsp_sec_path_config_abw_configured
"""
return self.__config_abw_configured
def _set_config_abw_configured(self, v, load=False):
"""
Setter method for config_abw_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_abw_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_abw_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_abw_configured() directly.
YANG Description: lsp_sec_path_config_abw_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_abw_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_abw_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_abw_configured(self):
self.__config_abw_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cspf_computation_mode(self):
"""
Getter method for config_cspf_computation_mode, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_computation_mode (lsp-cspf-computation-mode)
YANG Description: lsp sec path configured cspf computation mode
"""
return self.__config_cspf_computation_mode
def _set_config_cspf_computation_mode(self, v, load=False):
"""
Setter method for config_cspf_computation_mode, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_computation_mode (lsp-cspf-computation-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cspf_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cspf_computation_mode() directly.
YANG Description: lsp sec path configured cspf computation mode
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cspf_computation_mode must be of a type compatible with lsp-cspf-computation-mode""",
'defined-type': "brocade-mpls-operational:lsp-cspf-computation-mode",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)""",
})
self.__config_cspf_computation_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cspf_computation_mode(self):
self.__config_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
def _get_config_admin_group_configured(self):
"""
Getter method for config_admin_group_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_admin_group_configured (boolean)
YANG Description: lsp_sec_path_config_admin_group_configured
"""
return self.__config_admin_group_configured
def _set_config_admin_group_configured(self, v, load=False):
"""
Setter method for config_admin_group_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_admin_group_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_admin_group_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_admin_group_configured() directly.
YANG Description: lsp_sec_path_config_admin_group_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_admin_group_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_admin_group_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_admin_group_configured(self):
self.__config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_admin_group_lists(self):
"""
Getter method for admin_group_lists, mapped from YANG variable /mpls_state/lsp/secondary_path/admin_group_lists (list)
"""
return self.__admin_group_lists
def _set_admin_group_lists(self, v, load=False):
"""
Setter method for admin_group_lists, mapped from YANG variable /mpls_state/lsp/secondary_path/admin_group_lists (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_admin_group_lists is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_admin_group_lists() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """admin_group_lists must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)""",
})
self.__admin_group_lists = t
if hasattr(self, '_set'):
self._set()
def _unset_admin_group_lists(self):
self.__admin_group_lists = YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
def _get_auto_bandwidth(self):
"""
Getter method for auto_bandwidth, mapped from YANG variable /mpls_state/lsp/secondary_path/auto_bandwidth (container)
"""
return self.__auto_bandwidth
def _set_auto_bandwidth(self, v, load=False):
"""
Setter method for auto_bandwidth, mapped from YANG variable /mpls_state/lsp/secondary_path/auto_bandwidth (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_auto_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auto_bandwidth() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """auto_bandwidth must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""",
})
self.__auto_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_auto_bandwidth(self):
self.__auto_bandwidth = YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
path_name = __builtin__.property(_get_path_name)
instance_id = __builtin__.property(_get_instance_id)
path_up = __builtin__.property(_get_path_up)
active = __builtin__.property(_get_active)
committed = __builtin__.property(_get_committed)
is_new_instance = __builtin__.property(_get_is_new_instance)
is_current_secondary = __builtin__.property(_get_is_current_secondary)
is_selected_secondary = __builtin__.property(_get_is_selected_secondary)
config_adaptive = __builtin__.property(_get_config_adaptive)
config_reoptimize_timer_configured = __builtin__.property(_get_config_reoptimize_timer_configured)
config_reoptimize_timer = __builtin__.property(_get_config_reoptimize_timer)
config_reoptimize_timer_count = __builtin__.property(_get_config_reoptimize_timer_count)
config_tspec_mtu_configured = __builtin__.property(_get_config_tspec_mtu_configured)
config_tspec_mtu = __builtin__.property(_get_config_tspec_mtu)
config_cos_configured = __builtin__.property(_get_config_cos_configured)
config_cos = __builtin__.property(_get_config_cos)
config_mtu_configured = __builtin__.property(_get_config_mtu_configured)
config_mtu = __builtin__.property(_get_config_mtu)
config_tie_breaking_configured = __builtin__.property(_get_config_tie_breaking_configured)
config_tie_break_random = __builtin__.property(_get_config_tie_break_random)
config_tie_break_least_fill = __builtin__.property(_get_config_tie_break_least_fill)
config_tie_break_most_fill = __builtin__.property(_get_config_tie_break_most_fill)
config_cspf_disabled = __builtin__.property(_get_config_cspf_disabled)
config_rro_disabled = __builtin__.property(_get_config_rro_disabled)
config_hot_standby = __builtin__.property(_get_config_hot_standby)
config_pinned = __builtin__.property(_get_config_pinned)
config_persistent = __builtin__.property(_get_config_persistent)
config_soft_prempt = __builtin__.property(_get_config_soft_prempt)
config_priority_configured = __builtin__.property(_get_config_priority_configured)
config_setup_prority = __builtin__.property(_get_config_setup_prority)
config_holding_prority = __builtin__.property(_get_config_holding_prority)
config_hop_limit_configured = __builtin__.property(_get_config_hop_limit_configured)
config_hop_limit = __builtin__.property(_get_config_hop_limit)
config_traffic_eng_rate_configured = __builtin__.property(_get_config_traffic_eng_rate_configured)
config_traffic_eng_mean_rate = __builtin__.property(_get_config_traffic_eng_mean_rate)
config_traffic_eng_max_rate = __builtin__.property(_get_config_traffic_eng_max_rate)
config_traffic_eng_max_burst = __builtin__.property(_get_config_traffic_eng_max_burst)
config_abw_configured = __builtin__.property(_get_config_abw_configured)
config_cspf_computation_mode = __builtin__.property(_get_config_cspf_computation_mode)
config_admin_group_configured = __builtin__.property(_get_config_admin_group_configured)
admin_group_lists = __builtin__.property(_get_admin_group_lists)
auto_bandwidth = __builtin__.property(_get_auto_bandwidth)
_pyangbind_elements = {'path_name': path_name, 'instance_id': instance_id, 'path_up': path_up, 'active': active, 'committed': committed, 'is_new_instance': is_new_instance, 'is_current_secondary': is_current_secondary, 'is_selected_secondary': is_selected_secondary, 'config_adaptive': config_adaptive, 'config_reoptimize_timer_configured': config_reoptimize_timer_configured, 'config_reoptimize_timer': config_reoptimize_timer, 'config_reoptimize_timer_count': config_reoptimize_timer_count, 'config_tspec_mtu_configured': config_tspec_mtu_configured, 'config_tspec_mtu': config_tspec_mtu, 'config_cos_configured': config_cos_configured, 'config_cos': config_cos, 'config_mtu_configured': config_mtu_configured, 'config_mtu': config_mtu, 'config_tie_breaking_configured': config_tie_breaking_configured, 'config_tie_break_random': config_tie_break_random, 'config_tie_break_least_fill': config_tie_break_least_fill, 'config_tie_break_most_fill': config_tie_break_most_fill, 'config_cspf_disabled': config_cspf_disabled, 'config_rro_disabled': config_rro_disabled, 'config_hot_standby': config_hot_standby, 'config_pinned': config_pinned, 'config_persistent': config_persistent, 'config_soft_prempt': config_soft_prempt, 'config_priority_configured': config_priority_configured, 'config_setup_prority': config_setup_prority, 'config_holding_prority': config_holding_prority, 'config_hop_limit_configured': config_hop_limit_configured, 'config_hop_limit': config_hop_limit, 'config_traffic_eng_rate_configured': config_traffic_eng_rate_configured, 'config_traffic_eng_mean_rate': config_traffic_eng_mean_rate, 'config_traffic_eng_max_rate': config_traffic_eng_max_rate, 'config_traffic_eng_max_burst': config_traffic_eng_max_burst, 'config_abw_configured': config_abw_configured, 'config_cspf_computation_mode': config_cspf_computation_mode, 'config_admin_group_configured': config_admin_group_configured, 'admin_group_lists': admin_group_lists, 'auto_bandwidth': auto_bandwidth, }
| 75.973441
| 1,982
| 0.766221
| 131,145
| 0.996649
| 0
| 0
| 0
| 0
| 0
| 0
| 71,803
| 0.545674
|
1a11560f409eb43a0ed24b3b54e89719dbd21b76
| 171
|
py
|
Python
|
Theseus/Tests/__init__.py
|
amias-iohk/theseus
|
88d9294721e3bbbb756b983f55df6d669e632da4
|
[
"MIT"
] | 4
|
2018-08-08T07:11:29.000Z
|
2018-11-08T02:43:11.000Z
|
Theseus/Tests/__init__.py
|
amias-iohk/theseus
|
88d9294721e3bbbb756b983f55df6d669e632da4
|
[
"MIT"
] | null | null | null |
Theseus/Tests/__init__.py
|
amias-iohk/theseus
|
88d9294721e3bbbb756b983f55df6d669e632da4
|
[
"MIT"
] | 3
|
2018-10-18T13:42:24.000Z
|
2021-01-20T15:21:25.000Z
|
__author__ = 'Amias Channer <amias.channer@iohk.io> for IOHK'
__doc__ = 'Daedalus Testing functions'
from .Cardano import *
from .Daedalus import *
from .Common import *
| 24.428571
| 61
| 0.754386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 76
| 0.444444
|
1a118f7d8b03da075a37997cfb06c80ceb08fc58
| 907
|
py
|
Python
|
mobula/operators/Multiply.py
|
wkcn/mobula
|
4eec938d6477776f5f2d68bcf41de83fb8da5195
|
[
"MIT"
] | 47
|
2017-07-15T02:13:18.000Z
|
2022-01-01T09:37:59.000Z
|
mobula/operators/Multiply.py
|
wkcn/mobula
|
4eec938d6477776f5f2d68bcf41de83fb8da5195
|
[
"MIT"
] | 3
|
2018-06-22T13:55:12.000Z
|
2020-01-29T01:41:13.000Z
|
mobula/operators/Multiply.py
|
wkcn/mobula
|
4eec938d6477776f5f2d68bcf41de83fb8da5195
|
[
"MIT"
] | 8
|
2017-09-03T12:42:54.000Z
|
2020-09-27T03:38:59.000Z
|
from .Layer import *
class Multiply(Layer):
def __init__(self, models, *args, **kwargs):
self.check_inputs(models, 2)
Layer.__init__(self, models, *args, **kwargs)
def reshape(self):
self.Y = np.zeros(self.X[0].shape)
def forward(self):
self.Y = np.multiply(self.X[0], self.X[1])
def backward(self):
self.dX = [np.multiply(self.dY, self.X[1]), np.multiply(self.dY, self.X[0])]
class MultiplyConstant(Layer):
def __init__(self, model, *args, **kwargs):
self.check_inputs(model, 1)
Layer.__init__(self, model, *args, **kwargs)
self.constant = kwargs["constant"]
def reshape(self):
self.Y = np.zeros(self.X.shape)
def forward(self):
self.Y = self.X * self.constant
def backward(self):
self.dX = self.dY * self.constant
Multiply.OP_L = MultiplyConstant
Multiply.OP_R = MultiplyConstant
| 32.392857
| 84
| 0.624035
| 814
| 0.897464
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 0.011025
|
1a1196d66c0c37b1c2d9a2fa6bdb80334a47691b
| 581
|
py
|
Python
|
project/settings_deploy.py
|
djstein/vue-django-webpack
|
d072e116d45800064b3972decbc6ec493801ea5b
|
[
"MIT"
] | 43
|
2017-02-24T17:59:27.000Z
|
2020-02-04T16:49:38.000Z
|
project/settings_deploy.py
|
djstein/vue-django-webpack
|
d072e116d45800064b3972decbc6ec493801ea5b
|
[
"MIT"
] | 6
|
2017-01-10T01:49:03.000Z
|
2017-10-03T06:12:59.000Z
|
project/settings_deploy.py
|
djstein/vue-django-webpack
|
d072e116d45800064b3972decbc6ec493801ea5b
|
[
"MIT"
] | 13
|
2017-05-18T20:00:24.000Z
|
2021-01-22T06:32:45.000Z
|
from project.settings import INSTALLED_APPS, ALLOWED_HOSTS, BASE_DIR
import os
INSTALLED_APPS.append( 'webpack_loader',)
INSTALLED_APPS.append( 'app',)
ALLOWED_HOSTS.append('*',)
# STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static',)
os.path.join(BASE_DIR, 'app', 'vueapp','dist', 'static')
]
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'static/vueapp/',
'STATS_FILE': os.path.join(BASE_DIR, 'app', 'vueapp', 'webpack-stats.json')
}
}
INTERNAL_IPS = (
'0.0.0.0',
'127.0.0.1',
)
| 22.346154
| 83
| 0.652324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 241
| 0.414802
|
1a124f44649021a9482a062fee582fc8ecf4209e
| 268
|
py
|
Python
|
dockerfiles/igv/igv.py
|
leipzig/gatk-sv
|
96566cbbaf0f8f9c8452517b38eea1e5dd6ed33a
|
[
"BSD-3-Clause"
] | 76
|
2020-06-18T21:31:43.000Z
|
2022-03-02T18:42:58.000Z
|
dockerfiles/igv/igv.py
|
iamh2o/gatk-sv
|
bf3704bd1d705339577530e267cd4d1b2f77a17f
|
[
"BSD-3-Clause"
] | 195
|
2020-06-22T15:12:28.000Z
|
2022-03-28T18:06:46.000Z
|
dockerfiles/igv/igv.py
|
iamh2o/gatk-sv
|
bf3704bd1d705339577530e267cd4d1b2f77a17f
|
[
"BSD-3-Clause"
] | 39
|
2020-07-03T06:47:18.000Z
|
2022-03-03T03:47:25.000Z
|
import sys
[_, varfile] = sys.argv
plotdir = "plots"
igvfile = "igv.txt"
igvsh = "igv.sh"
with open(varfile, 'r') as f:
for line in f:
dat = line.split('\t')
chr = dat[0]
start = dat[1]
end = dat[2]
data = dat[3].split(',')
| 20.615385
| 32
| 0.511194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 34
| 0.126866
|
1a12b43b837e725bb85bfe8e14b6c166c2be8e99
| 691
|
py
|
Python
|
model/sample/adg.py
|
sdy99/PowerAI
|
ef40bacddbad72322e3e423417ae13d478d56a6d
|
[
"MIT"
] | 7
|
2020-04-11T03:28:50.000Z
|
2021-03-29T14:53:36.000Z
|
model/sample/adg.py
|
sdy99/PowerAI
|
ef40bacddbad72322e3e423417ae13d478d56a6d
|
[
"MIT"
] | null | null | null |
model/sample/adg.py
|
sdy99/PowerAI
|
ef40bacddbad72322e3e423417ae13d478d56a6d
|
[
"MIT"
] | 5
|
2020-04-11T03:28:52.000Z
|
2021-11-27T05:23:12.000Z
|
# coding: gbk
"""
@author: sdy
@email: sdy@epri.sgcc.com.cn
Abstract distribution and generation class
"""
class ADG(object):
def __init__(self, work_path, fmt):
self.work_path = work_path
self.fmt = fmt
self.features = None
self.mode = 'all'
def distribution_assess(self):
raise NotImplementedError
def generate_all(self):
raise NotImplementedError
def choose_samples(self, size):
raise NotImplementedError
def generate_one(self, power, idx, out_path):
raise NotImplementedError
def remove_samples(self):
raise NotImplementedError
def done(self):
raise NotImplementedError
| 20.323529
| 49
| 0.662808
| 580
| 0.839363
| 0
| 0
| 0
| 0
| 0
| 0
| 111
| 0.160637
|
1a135f93a11c5cc15bf96c7f89491d4b0c295264
| 903
|
py
|
Python
|
src/validatesigner.py
|
harryttd/remote-signer
|
a1af4e58b1d6628b09166368362d05d6e876e466
|
[
"MIT"
] | null | null | null |
src/validatesigner.py
|
harryttd/remote-signer
|
a1af4e58b1d6628b09166368362d05d6e876e466
|
[
"MIT"
] | null | null | null |
src/validatesigner.py
|
harryttd/remote-signer
|
a1af4e58b1d6628b09166368362d05d6e876e466
|
[
"MIT"
] | null | null | null |
#
# The ValidateSigner applies a ChainRatchet to the signature request
# and then passes it down to a signer. In order to do this, it must
# parse the request and to obtain the level and round to pass to the
# ratchet code.
import logging
from src.sigreq import SignatureReq
class ValidateSigner:
def __init__(self, config, ratchet=None, subsigner=None):
self.keys = config['keys']
self.ratchet = ratchet
self.subsigner = subsigner
self.node_addr = config['node_addr']
def sign(self, handle, sigreq):
sig_type = f"{sigreq.get_type()}_{sigreq.get_chainid()}"
logging.debug(f"About to sign {sigreq.get_payload()} " +
f"with key handle {handle}")
level = sigreq.get_level()
round = sigreq.get_round()
self.ratchet.check(sig_type, level, round)
return self.subsigner.sign(handle, sigreq)
| 30.1
| 68
| 0.663344
| 622
| 0.688815
| 0
| 0
| 0
| 0
| 0
| 0
| 349
| 0.386489
|
1a146284d92debe9f0fcbd843d3eb7e8ae94afbb
| 25,766
|
py
|
Python
|
CODE/web_server/server/audio_processing/DataLoader.py
|
andrewbartels1/Marine-Mammal-Acoustics
|
9f833d97ac26ecd51b4c4e276a815ab9d2b67bb6
|
[
"MIT"
] | 1
|
2022-03-29T12:24:11.000Z
|
2022-03-29T12:24:11.000Z
|
CODE/web_server/server/audio_processing/DataLoader.py
|
andrewbartels1/Marine-Mammal-Acoustics
|
9f833d97ac26ecd51b4c4e276a815ab9d2b67bb6
|
[
"MIT"
] | null | null | null |
CODE/web_server/server/audio_processing/DataLoader.py
|
andrewbartels1/Marine-Mammal-Acoustics
|
9f833d97ac26ecd51b4c4e276a815ab9d2b67bb6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 1 17:48:01 2021
@author: bartelsaa
"""
# boiler plate stuff
import re, os, io
from glob import glob
import pandas as pd
import sys
import pytz
from models import Sensor
from datetime import datetime, timezone
import pathlib
from data_schema import data_schema
from maad.rois import (select_rois, create_mask)
from maad.features import (centroid_features)
from maad.sound import (load, resample, spectrogram, remove_background, median_equalizer,
remove_background_morpho, remove_background_along_axis,
sharpness, spectral_snr, trim, write, smooth)
from maad.util import (power2dB, plot2d, dB2power,format_features, overlay_rois,
overlay_centroid, crop_image)
import base64
# setup django settings and configs
import django
import argparse
# Setup django env and add to models
sys.path.append("/app")
os.environ['DJANGO_SETTINGS_MODULE'] = 'pams.settings'
django.setup()
from django.utils.timezone import make_aware
from django import db
print(db.connections.databases)
print("DB NAME ")
from pams.models.audio_clip import AudioClip
class DataLoader(object):
def __init__(self, data_folder, output_folder, max_rows_to_send=None,
file_type=None, min_roi_pixel_area_to_omit=None,
max_roi_pixel_area_to_omit = None,
time_around_feature=5,
verbose=True,
command="demo"):
self.data_folder = data_folder
self.output_folder = output_folder
self.file_type = file_type if file_type is not None else ".flac"
self.max_rows_to_send = max_rows_to_send
self.min_roi_pixel_area_to_omit = min_roi_pixel_area_to_omit if min_roi_pixel_area_to_omit else 50
self.max_roi_pixel_area_to_omit = max_roi_pixel_area_to_omit
self.time_around_feature = time_around_feature
self.verbose = verbose
self.command = command
print(os.path.join(self.data_folder,"*" + self.file_type))
self.raw_audio_list = glob(os.path.join(self.data_folder,"*" + self.file_type))
print("found {} {} files to process!".format(len(self.raw_audio_list), self.file_type))
print(self.raw_audio_list)
file_type = os.path.splitext(self.raw_audio_list[0])[1]
print(file_type)
print(file_type)
if file_type == ".flac":
self.file_type = file_type
elif file_type == ".wav":
self.file_type = file_type
else:
raise TypeError("file type must be flac or wav!")
print("found file type {}".format(self.file_type))
def __str__(self):
return f"Processing files: {self.raw_audio_list}"
def get_files(self, data_folder, file_type):
return glob(self.data_folder + self.file_type, recursive=True)
def _parse_path(self, path):
"""
formats:
AMAR504.4.20180501T210951Z.wav
SanctSound_GR03_02_671666216_190502113002.flac
"""
time_str = re.split("[._]", path)[-2]
format = "%y%m%d%H%M%S" if "_" in path else "%Y%m%dT%H%M%SZ"
# not implemented, for troubleshooting/visualization, run the make spec function and collect the variables running around in there
def display_results(self, Sxx_power, X5, Sxx_db_noNoise_smooth_finale,
df_centroid, freq_sample_in_hz, ext, df_rois,
display=True, plot_chip=False):
# if display is true
print("working on plotting items to the to_process folder! Check there for any interesting chips found")
if display:
# Visualize
Sxx_db = power2dB(Sxx_power)
plot2d(power2dB(Sxx_power), title="raw")
plot2d(X5, title="Smoothed as itll get")
plot2d(Sxx_db_noNoise_smooth_finale,
title="Smoothed as itll get Final")
ax0, fig0 = overlay_rois(
create_mask(
im=Sxx_db_noNoise_smooth_finale,
mode_bin="relative",
bin_std=8,
bin_per=0.5,
verbose=True,
display=display,
),
df_rois,
)
# ax1, fig = plot2d(Sxx_db)
ax1, fig1 = overlay_rois(dB2power(Sxx_power), df_centroid)
ax1.scatter(df_centroid.centroid_x, df_centroid.centroid_y)
ax1.set_title("centroid overlays")
for i in range(len(df_centroid)):
write("/app/to_process/audio_snippet_test_{}.wav".format(i),
freq_sample_in_hz,
df_centroid.audio_clip[i],
)
if plot_chip:
# for viewing spec chips
for i in range(len(df_centroid)):
ax, fig = plot2d(
df_centroid["spec_cropped"][i], **{
"extent": [
df_centroid["tn_crop"][i].min(),
df_centroid["tn_crop"][i].max(),
ext[2],
ext[3],
],
"title":
"centroid {i}, freq={freq}, time={time}".format(
i=i,
freq=df_centroid.centroid_f[i],
time=df_centroid.centroid_t[i],
),
})
ax.scatter(df_centroid.centroid_t[i], df_centroid.centroid_f[i])
fig.savefig(
"/app/to_process/centroid_{i}_freq_{freq}_time_{time}.png".format(
i=i,
freq=df_centroid.centroid_f[i],
time=df_centroid.centroid_t[i]),
dpi=1000,
)
fig1.savefig("/app/to_process/centroid_overlay.png", dpi=1000)
# with open("centroid_overlay.png", "rb") as imageFile:
# byteArrayImage = base64.b64encode(imageFile.read())
# print("this is byteArrayImage {byteArrayImage}")
# return byteArrayImage
# this does all the algorithm stuff
def make_spec(self, wav_file,
display=False,
min_roi_pixel_area_to_omit=None,
max_roi_pixel_area_to_omit=None,
time_around_feature=5,
verbose=False):
audio_signal, freq_sample_in_hz = load(
wav_file, display=display) # replace with wav_file
# downsample to what everythgin else is at
# audio_signal = resample(audio_signal, freq_sample_in_hz, _get_sample_rate(wav_file))
# Make spectrogram
Sxx_power, time_vector, frequency_vector, ext = spectrogram(
audio_signal, self._get_sample_rate())
# take out some noise
Sxx_power_noNoise = median_equalizer(Sxx_power)
# First we remove the stationary background in order to increase the contrast [1]
# Then we convert the spectrogram into dB
Sxx_db_noNoise = power2dB(Sxx_power_noNoise)
Sxx_db_noNoise_smooth = smooth(Sxx_db_noNoise, std=0.25)
# do some removing along the axis and smooth to get better shapes
X4, noise_profile4 = remove_background_along_axis(Sxx_db_noNoise_smooth,
axis=1,
N=50)
X5, noise_profile5 = remove_background_along_axis(X4,
mode="median",
axis=0)
Sxx_db_noNoise_smooth_finale = smooth(
X5, std=2) # heavy handed smoothing on large areas are ok for now
# create regions of interest and downsample to ones that are of a reasonable size (configurable)
mask = create_mask(
im=Sxx_db_noNoise_smooth_finale,
mode_bin="relative",
bin_std=8,
bin_per=0.5,
verbose=False,
display=display)
im_rois, df_rois = select_rois(mask,
display=display,
min_roi=self.min_roi_pixel_area_to_omit,
max_roi=self.max_roi_pixel_area_to_omit,
verbose=self.verbose)
# format, use this for cropping
df_rois = format_features(df_rois, time_vector, frequency_vector)
# get the final dataframe of features and format them properly
df_centroid = centroid_features(Sxx_db_noNoise_smooth_finale, df_rois,
im_rois)
df_centroid = format_features(df_centroid, time_vector, frequency_vector)
# Filter for some attributes
df_centroid = df_centroid[(df_centroid.centroid_t > 1) &
(df_centroid.centroid_t < time_vector.max() - 1)]
df_centroid = df_centroid.sort_values(by=["area_tf", "centroid_t"],
ignore_index=True,
ascending=(False, False))
# crop the spec around the centroid position
if len(df_centroid) > 1:
df_centroid["spec_cropped"] = df_centroid.apply(
lambda row: self._crop_spec(
row,
Sxx_db_noNoise_smooth_finale,
time_vector,
frequency_vector,
time_around_feature,
),
axis=1,
)
# separate spec and time array for chip
df_centroid[['spec_cropped','tn_crop']] = pd.DataFrame(df_centroid.spec_cropped.values.tolist(), index= df_centroid.index)
# set audio array
df_centroid["audio_clip"] = df_centroid.apply(
lambda row: self._crop_audio(row, audio_signal, self._get_sample_rate()), axis=1)
# get start and end time of chip
df_centroid['chip_start_seconds'] = pd.to_timedelta(df_centroid.apply( lambda row: self._get_start_min_time_crop(row), axis=1), 's')
df_centroid['chip_end_seconds'] = pd.to_timedelta(df_centroid.apply( lambda row: self._get_end_max_time_crop(row), axis=1), 's')
df_centroid['audio'] = 'placeholder'
self.display_results(Sxx_power, X5, Sxx_db_noNoise_smooth_finale,
df_centroid, freq_sample_in_hz, ext, df_rois,
display=True, plot_chip=False)
df_centroid = df_centroid.drop(['tn_crop', 'spec_cropped','labelID'], axis=1)
else:
df_centroid = None
return df_centroid
def flac_to_wav(self, flac_file, remove_flac=False):
wav_file = os.path.splitext(flac_file)[0] + ".wav"
# flac = AudioSegment.from_file(flac_file, format='flac')
# stream = io.BytesIO()
# flac.export(stream, format='wav')
os.system("ffmpeg -nostats -loglevel panic -hide_banner -i {inputfile} {output} -y".format(inputfile=flac_file,
output=wav_file))
if remove_flac:
os.remove("{flac_file}".format(flac_file=flac_file))
print("removing {}".format(flac_file))
return wav_file
def wipe_table(self, AudioClip):
AudioClip.objects.all().delete()
print("WIPING TABLE!")
# Using the Maad package to load and clean spec
def _crop_spec(self, row, Sxx_db_noNoise_smooth_finale, time_vector, frequency_vector,
time_around_feature):
crop_end = row.centroid_t + time_around_feature
crop_start = row.centroid_t - time_around_feature
if crop_start < time_vector.min():
crop_start = time_vector.min()
if crop_end > time_vector.max():
crop_end = time_vector.max()
S_xx_crop, tn_new, _ = crop_image(
Sxx_db_noNoise_smooth_finale,
time_vector,
frequency_vector,
fcrop=(frequency_vector.min(), frequency_vector.max()),
tcrop=(crop_start, crop_end),
)
return (S_xx_crop, tn_new)
def _crop_audio(self, row, audio_signal, freq_sample_in_hz):
return trim(audio_signal,
freq_sample_in_hz,
min_t=row["tn_crop"].min(),
max_t=row["tn_crop"].max())
def _get_start_min_time_crop(self, row):
return row.tn_crop.min()
def _get_end_max_time_crop(self, row):
return row.tn_crop.max()
def _get_sample_rate(self):
# need to downsmaple!!
sample_freqs = {"adeon": 8000, "SanctSound": 48000}
if "AMAR" in self.wav_file:
sample_freq = sample_freqs["adeon"]
elif "Sant" in self.wav_file:
sample_freq = sample_freqs["SanctSound"]
return sample_freq
def _get_meta_data(self, wav_file):
return glob(os.path.join(os.path.dirname(os.path.dirname(wav_file)), 'metadata', "*.json"))
def _parse_path(self, path):
"""
formats:
AMAR504.4.20180501T210951Z.wav
SanctSound_GR03_02_671666216_190502113002.flac
"""
time_str = re.split("[._]", path)[-2]
format = "%y%m%d%H%M%S" if "_" in path else "%Y%m%dT%H%M%SZ"
return datetime.strptime(time_str, format)
def _make_chip_id_path(self, temp_last_row, output_folder, row_id):
# Make dir and set to audio_path column
temp_last_row.audio_path = os.path.join(output_folder, str(row_id), "")
clip_path = pathlib.Path(temp_last_row.audio_path)
clip_path.mkdir(parents=True, exist_ok=True)
return clip_path
def _to_django(self, row, AudioClip, output_folder, wav_file, AlwaysDownSampleTo=8000):
# convert to dict
row_dict = row.to_dict()
raw_audio_file = row_dict.pop("audio_clip")
# could probably do this with a wild card AudioClip(**row_dict) but I want readable
audio = AudioClip(label=row_dict['label'],
min_y=row_dict['min_y'],
min_x=row_dict['min_x'],
max_y=row_dict['max_y'],
max_x=row_dict['max_x'],
min_f=row_dict['min_f'],
min_t=row_dict['min_t'],
max_f=row_dict['max_f'],
max_t=row_dict['max_t'],
centroid_y=row_dict['centroid_y'],
centroid_x=row_dict['centroid_x'],
duration_x=row_dict['duration_x'],
bandwidth_y=row_dict['bandwidth_y'],
area_xy=row_dict['area_xy'],
centroid_f=row_dict['centroid_f'],
centroid_t=row_dict['centroid_t'],
duration_t=row_dict['duration_t'],
bandwidth_f=row_dict['bandwidth_f'],
area_tf=row_dict['area_tf'],
audio_path=row_dict['audio'],
chip_start_seconds=make_aware(row_dict['start_time']), # make_aware() gives it the django UTC setting time zone
chip_end_seconds=make_aware(row_dict['end_time']),
sensor_platform=row_dict['sensor_platform'],
lat=row_dict['lat'],
long=row_dict['long'])
# save the record and get the id
audio.save()
row_id = audio.id
# get the row back to update audio path
temp_last_row = AudioClip.objects.get(id=row_id)
output_path = self._make_chip_id_path(self, temp_last_row, output_folder, row_id)
print("SAVING AUDIO TO PATH {} AS {}".format(output_path, os.path.join(temp_last_row.audio_path, 'chip.wav')))
temp_last_row.save()
# actually write chip to files
print(len(raw_audio_file))
# actually write chip to files
write(os.path.join(temp_last_row.audio_path, 'chip.wav'), AlwaysDownSampleTo, raw_audio_file) # "chip.wav"
file_size = os.path.getsize(os.path.join(temp_last_row.audio_path, 'chip.wav'))
print("File Size is :", file_size, "bytes")
def do_loop(self):
if self.command =="demo":
self.verbose = True
self.wav_file = self.flac_to_wav(self.raw_audio_list[0], remove_flac=False) #make this a stream no leaving a bunch of wav files around
try:
sensor = Sensor(self._get_meta_data(self.raw_audio_list[0])[0])
except:
print("couldnt get metadata, moving on!")
pass
df_centroid = self.make_spec(self.wav_file,
display=True,
min_roi_pixel_area_to_omit=self.min_roi_pixel_area_to_omit, # NOTE: these are in the area^2 of the image (not ideal)
max_roi_pixel_area_to_omit=self.max_roi_pixel_area_to_omit,
time_around_feature=self.time_around_feature)
print(df_centroid)
# give it a shot putting it into the db
try:
if df_centroid is not None:
if self.verbose:
print(self.wav_file)
print("found {} number of features!".format(len(df_centroid)))
print("features found {}".format(df_centroid))
start_time = self._parse_path(os.path.split( self.wav_file)[-1])
df_centroid['start_time'], df_centroid['end_time'] = [start_time + df_centroid['chip_start_seconds'],start_time + df_centroid['chip_end_seconds']]
df_centroid['sensor_platform'], df_centroid['lat'], df_centroid['long'] = [sensor.name, sensor.lat, sensor.long]
# clean up
df_centroid = df_centroid.drop(['chip_start_seconds', 'chip_end_seconds'], axis=1)
df_centroid = df_centroid.astype(data_schema)
# limit values
df_centroid = df_centroid.sort_values('area_xy',ascending = False).head(self.max_rows_to_send)
# the audio_raw column needs to be saved as preprocessed_data/id/chip.wav
df_centroid.apply(lambda row: self._to_django(row, AudioClip, self.output_folder, self.wav_file), axis=1)
print("removing {}".format(self.wav_file))
os.remove(self.wav_file)
except:
print('missing something, maybe no metadata? cant push to sqlite database')
elif self.command =="process-data":
for idx, i in enumerate(sorted(self.raw_audio_list, reverse=True)):
if self.verbose:
print("processing {} of {}".format(idx, len(self.raw_audio_list)))
print(i)
print(self._get_meta_data(i))
try:
sensor = Sensor(self._get_meta_data(i)[0])
except:
print("couldnt get metadata, moving on!")
self.wav_file = self.flac_to_wav(i, remove_flac=False) #make this a stream no leaving a bunch of wav files around
df_centroid = self.make_spec(self.wav_file,
display=True,
min_roi_pixel_area_to_omit=self.min_roi_pixel_area_to_omit, # NOTE: these are in the area^2 of the image (not ideal)
max_roi_pixel_area_to_omit=self.max_roi_pixel_area_to_omit,
time_around_feature=self.time_around_feature)
# Calc start time
if df_centroid is not None:
if self.verbose:
print(self.wav_file)
print("found {} number of features!".format(len(df_centroid)))
print("features found {}".format(df_centroid))
start_time = self._parse_path(os.path.split( self.wav_file)[-1])
df_centroid['start_time'], df_centroid['end_time'] = [start_time + df_centroid['chip_start_seconds'],start_time + df_centroid['chip_end_seconds']]
df_centroid['sensor_platform'], df_centroid['lat'], df_centroid['long'] = [sensor.name, sensor.lat, sensor.long]
# clean up
df_centroid = df_centroid.drop(['chip_start_seconds', 'chip_end_seconds'], axis=1)
df_centroid = df_centroid.astype(data_schema)
# limit values
df_centroid = df_centroid.sort_values('area_xy',ascending = False).head(self.max_rows_to_send)
# the audio_raw column needs to be saved as preprocessed_data/id/chip.wav
df_centroid.apply(lambda row: self._to_django(row, AudioClip, self.output_folder, self.wav_file), axis=1)
print("removing {}".format(self.wav_file))
os.remove(self.wav_file)
def dir_path(string):
if os.path.isdir(string):
return string
else:
raise NotADirectoryError(string)
def main():
print("FILES")
parser = argparse.ArgumentParser(description="sample command: python DataLoader.py -i 5 -d 0.85 simplified_pagerank network.tsv")
parser.add_argument("command", help="Sub-command to execute. Can be demo or process-data.")
parser.add_argument("--data_folder", type=dir_path, default='/app/to_process',help="folder where the flac files lives")
parser.add_argument("--output_dir", type=str, default='/data/processed_data',help="folder where wav chips will be dumped")
parser.add_argument("-mrts", "--max_rows_to_send", dest="max_rows_to_send",
help="specify the number (rows/chips of data) of features to send for each audio file, by default sorts largest to smallest areas, Default: 10",
default=10, type=int)
parser.add_argument("-minrp", "--min_roi_pixel_area_to_omit", dest="min_roi_pixel_area_to_omit",
help="specify the min feater size to keep (pixels^2), Default: 50",
default=50, type=int)
parser.add_argument("-maxrp", "--max_roi_pixel_area_to_omit", dest="max_roi_pixel_area_to_omit",
help="specify the max feater size to keep (pixels^2), Default: None (no limit)",
default=None)
parser.add_argument("-taf", "--time_around_feature", dest="time_around_feature",
help="specify the damping factor for pagerank. Default: 5 sec before & after",
default=5, type=int)
parser.add_argument('-v', '--verbose', default=True, action=argparse.BooleanOptionalAction)
print(os.system("ls to_process"))
args = parser.parse_args()
output_dir = pathlib.Path(args.output_dir).mkdir(parents=True, exist_ok=True)
if args.command == "process-data":
data_loader = DataLoader(data_folder=args.data_folder, output_folder=output_dir,
min_roi_pixel_area_to_omit=args.min_roi_pixel_area_to_omit,
max_roi_pixel_area_to_omit=args.max_roi_pixel_area_to_omit,
time_around_feature=args.time_around_feature,
verbose=args.verbose, command=args.command)
data_loader.do_loop()
elif args.command == "demo":
data_loader = DataLoader(data_folder=args.data_folder, output_folder=args.output_dir,
min_roi_pixel_area_to_omit=args.min_roi_pixel_area_to_omit,
max_roi_pixel_area_to_omit=args.max_roi_pixel_area_to_omit,
time_around_feature=args.time_around_feature,
verbose=args.verbose, command=args.command)
data_loader.do_loop()
else:
sys.exit("Incorrect command")
# (data_folder, output_folder, max_rows_to_send=None,
# file_type=None, min_roi_pixel_area_to_omit=None,
# max_roi_pixel_area_to_omit = None,
# time_around_feature=5)
# data_loader.setup_django()
# print("django setup, starting to crunch data now")
if __name__ == "__main__":
main()
| 46.175627
| 168
| 0.557595
| 21,204
| 0.822945
| 0
| 0
| 0
| 0
| 0
| 0
| 5,893
| 0.228712
|
1a150533d8cad7a2aba7a53cd1cb833f76eb2499
| 3,078
|
py
|
Python
|
tests/lib/test_otping.py
|
reputage/py-didery
|
2d54a9e39fb01a81d4d6f7814ca7a611a7418a47
|
[
"Apache-2.0"
] | null | null | null |
tests/lib/test_otping.py
|
reputage/py-didery
|
2d54a9e39fb01a81d4d6f7814ca7a611a7418a47
|
[
"Apache-2.0"
] | 15
|
2018-05-24T23:30:21.000Z
|
2018-05-25T17:39:51.000Z
|
tests/lib/test_otping.py
|
reputage/py-didery
|
2d54a9e39fb01a81d4d6f7814ca7a611a7418a47
|
[
"Apache-2.0"
] | null | null | null |
import pytest
try:
import simplejson as json
except ImportError:
import json
from ioflo.aio.http import Valet
# import didery.routing
from diderypy.lib import generating as gen
from diderypy.lib import otping as otp
vk, sk, did = gen.keyGen()
otpData = {
"id": did,
"blob": "AeYbsHot0pmdWAcgTo5sD8iAuSQAfnH5U6wiIGpVNJQQoYKBYrPPxAoIc1i5SHCIDS8KFFgf8i0tDq8XGizaCgo9yjuKHHNJZFi0QD9K"
"6Vpt6fP0XgXlj8z_4D-7s3CcYmuoWAh6NVtYaf_GWw_2sCrHBAA2mAEsml3thLmu50Dw"
}
url1, url2 = "http://localhost:8080/blob", "http://localhost:8000/blob"
urls = ["http://localhost:8080", "http://localhost:8000"]
def testPostOtpBlob():
result = otp.postOtpBlob(otpData, sk, urls)
assert result[url1].status == 201
assert result[url2].status == 201
def testPostOtpBlobNoUrls():
with pytest.raises(ValueError) as ex:
otp.postOtpBlob(otpData, sk, None)
def testPostOtpBlobEmptyUrls():
with pytest.raises(ValueError) as ex:
otp.postOtpBlob(otpData, sk, [])
def testPostOtpBlobNoSk():
with pytest.raises(ValueError) as ex:
otp.postOtpBlob(otpData, None, urls)
def testPostOtpBlobEmptySk():
with pytest.raises(ValueError) as ex:
otp.postOtpBlob(otpData, "", urls)
def testGetOtpBlob():
data, result = otp.getOtpBlob(did, urls)
assert data['otp_data'] == otpData
def testGetOtpBlobNoUrls():
with pytest.raises(ValueError) as ex:
otp.getOtpBlob(did, None)
def testGetOtpBlobEmptyUrls():
with pytest.raises(ValueError) as ex:
otp.getOtpBlob(did, [])
def testPutOtpBlob():
result = otp.putOtpBlob(otpData, sk, urls)
assert result[url1+"/"+otpData["id"]].status == 200
assert result[url1+"/"+otpData["id"]].response.body == otpData
assert result[url2+"/"+otpData["id"]].status == 200
assert result[url2+"/"+otpData["id"]].response.body == otpData
def testPutOtpBlobNoUrls():
with pytest.raises(ValueError) as ex:
otp.putOtpBlob(otpData, sk, None)
def testPutOtpBlobEmptyUrls():
with pytest.raises(ValueError) as ex:
otp.putOtpBlob(otpData, sk, [])
def testPutOtpBlobNoSk():
with pytest.raises(ValueError) as ex:
otp.putOtpBlob(otpData, None, urls)
def testPutOtpBlobEmptySk():
with pytest.raises(ValueError) as ex:
otp.putOtpBlob(otpData, "", urls)
def testRemoveOtpBlob():
result = otp.removeOtpBlob(did, sk, urls)
assert result[url1+"/"+did].status == 200
assert result[url1+"/"+did].response.body == otpData
assert result[url2+"/"+did].status == 200
assert result[url2+"/"+did].response.body == otpData
def testRemoveOtpBlobNoUrls():
with pytest.raises(ValueError) as ex:
otp.removeOtpBlob(did, sk, None)
def testRemoveOtpBlobEmptyUrls():
with pytest.raises(ValueError) as ex:
otp.removeOtpBlob(did, sk, [])
def testRemoveOtpBlobNoSk():
with pytest.raises(ValueError) as ex:
otp.removeOtpBlob(did, None, urls)
def testRemoveOtpBlobEmptySk():
with pytest.raises(ValueError) as ex:
otp.removeOtpBlob(did, "", urls)
| 24.624
| 118
| 0.692982
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 367
| 0.119233
|
1a16d3dc97a2e27b58e28cd919840f007fc7b43f
| 3,722
|
py
|
Python
|
src/DCGMM/measuring/Logging.py
|
anon-scientist/dcgmm
|
1d2d96d1d9811c387ee11d462ff0a3819a66e137
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
src/DCGMM/measuring/Logging.py
|
anon-scientist/dcgmm
|
1d2d96d1d9811c387ee11d462ff0a3819a66e137
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
src/DCGMM/measuring/Logging.py
|
anon-scientist/dcgmm
|
1d2d96d1d9811c387ee11d462ff0a3819a66e137
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import time
import json
from collections import defaultdict
from DCGMM.utils import log
from DCGMM.parsers import Kwarg_Parser
import numpy as np
class Logging(object):
def __init__(self, **kwargs):
''' structured and uniform logging (json format) '''
parser = Kwarg_Parser(**kwargs)
self.exp_id = parser.add_argument ('--exp_id' , type=str , default='0' , help='unique experiment id (for experiment evaluation)')
self.tmp_dir = parser.add_argument ('--tmp_dir' , type=str , default='.' , help='directory for output files (for experiment evaluation)')
self.model_type = parser.add_argument ('--model_type' , type=str , default='Unknown', help='class to load form module "model"')
self.results_dir = parser.add_argument ('--results_dir' , type=str , default='./results' , help='set the default directory to search for dataset files')
filename = f'{self.exp_id}_log.json'
self.output_file = os.path.join(self.results_dir, filename)
self.log = {
'parameters' : dict() , # <name>: <value>, ...
'eval' : defaultdict(list), # <metric>_<taskname>_<layer>: [<task>, <iteration>, <value>], ... # task -1=DAll, 0=DNow, x=Dx
'created' : time.asctime() , # <timestamp>
}
for k,v in kwargs.items():
self.add_parameter(k, v)
def _is_jsonable(self, k, v):
def check(x, name):
try:
json.dumps(x)
if isinstance(v, list) and len(v) == 0: return False # remove empty lists that "could" be filled with not serializable values
return True
except Exception as ex:
log.debug(f'could not serialize {name} {k}: {v} because {ex}')
return False
return check(k, 'key') and check(v, 'value')
def add_parameter(self, k, v):
if not self._is_jsonable(k, v): return
self.log['parameters'][k] = v
def add_eval(self, k, v):
if not self._is_jsonable(k, v): return
self.log['eval'][k].append(v)
def add_eval_name_value(self,
metricname = 'nometric' ,
taskname = 'notask' ,
layername = 'nolayer' ,
task = -2 ,
iteration = -1 ,
value = -1 ,
):
k = f'{metricname}_{taskname}_{layername}'
v = [task, iteration, value]
try : self.add_eval(k, v)
except Exception as ex: print(metricname, taskname, layername, task, iteration, value, ex)
def add_eval_combination(self, keys, values):
''' create a key string of keys and value list of values '''
k = '_'.join(keys)
vals = list()
for v in values: vals.append(float(v) if isinstance(v, np.floating) else v) # detect and substitute numpy floats with python floats
self.add_eval(k, vals)
def write_to_file(self):
log.info(f'write logging to {self.output_file}')
with open(self.output_file, 'w') as file:
json.dump(self.log, file)
# json.load(open(self.output_file, 'r')) # test load file
| 39.595745
| 166
| 0.610962
| 2,976
| 0.79957
| 0
| 0
| 0
| 0
| 0
| 0
| 1,492
| 0.40086
|
1a16e50ba3f5373ee622624d05fe6164d2927423
| 1,863
|
py
|
Python
|
demos/demo_pyqtgraph_threadsafe_static.py
|
Dennis-van-Gils/python-dvg-pyqtgraph-threadsafe
|
c766cef85c60195ecfdeacc6b62f16fd1b90dcf0
|
[
"MIT"
] | null | null | null |
demos/demo_pyqtgraph_threadsafe_static.py
|
Dennis-van-Gils/python-dvg-pyqtgraph-threadsafe
|
c766cef85c60195ecfdeacc6b62f16fd1b90dcf0
|
[
"MIT"
] | 1
|
2020-10-24T05:18:48.000Z
|
2020-10-24T11:37:09.000Z
|
demos/demo_pyqtgraph_threadsafe_static.py
|
Dennis-van-Gils/python-dvg-pyqtgraph-threadsafe
|
c766cef85c60195ecfdeacc6b62f16fd1b90dcf0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import numpy as np
from PyQt5 import QtWidgets as QtWid
import pyqtgraph as pg
from dvg_pyqtgraph_threadsafe import PlotCurve
USE_OPENGL = True
if USE_OPENGL:
print("OpenGL acceleration: Enabled")
pg.setConfigOptions(useOpenGL=True)
pg.setConfigOptions(antialias=True)
pg.setConfigOptions(enableExperimental=True)
# ------------------------------------------------------------------------------
# MainWindow
# ------------------------------------------------------------------------------
class MainWindow(QtWid.QWidget):
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setGeometry(350, 50, 800, 660)
self.setWindowTitle("Demo: dvg_pyqtgraph_threadsafe")
# GraphicsLayoutWidget
self.gw = pg.GraphicsLayoutWidget()
self.plot_1 = self.gw.addPlot()
self.plot_1.showGrid(x=1, y=1)
self.plot_1.setRange(
xRange=[0, 5], yRange=[0, 4], disableAutoRange=True,
)
self.tscurve = PlotCurve(
linked_curve=self.plot_1.plot(
pen=pg.mkPen(color=[255, 255, 0], width=3)
),
)
x = np.array([0, 1, 2, 3, 4])
y = np.array([0, 1, np.nan, 3, 3])
# x = np.array([np.nan] * 5)
# y = np.array([np.nan] * 5)
self.tscurve.setData(x, y)
self.tscurve.update()
# Round up full window
hbox = QtWid.QHBoxLayout(self)
hbox.addWidget(self.gw, 1)
# ------------------------------------------------------------------------------
# Main
# ------------------------------------------------------------------------------
if __name__ == "__main__":
app = QtWid.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| 27.397059
| 80
| 0.500805
| 980
| 0.526033
| 0
| 0
| 0
| 0
| 0
| 0
| 559
| 0.300054
|
1a17b1be2074f64913108dde8915d54ffd44bd53
| 1,889
|
py
|
Python
|
froide/team/services.py
|
manonthemat/froide
|
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
|
[
"MIT"
] | null | null | null |
froide/team/services.py
|
manonthemat/froide
|
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
|
[
"MIT"
] | null | null | null |
froide/team/services.py
|
manonthemat/froide
|
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
|
[
"MIT"
] | null | null | null |
import hashlib
import hmac
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.crypto import constant_time_compare
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from froide.helper.email_sending import send_mail
from .models import Team
def can_use_team(user):
if not user.is_authenticated:
return False
in_team = Team.objects.get_for_user(user).exists()
return in_team or user.has_perm('team.can_use_teams')
class TeamService(object):
def __init__(self, member):
self.member = member
def generate_invite_secret(self):
to_sign = [str(self.member.pk), str(self.member.email)]
return hmac.new(
settings.SECRET_KEY.encode('utf-8'),
('.'.join(to_sign)).encode('utf-8'),
digestmod=hashlib.sha256
).hexdigest()
def check_invite_secret(self, secret):
return constant_time_compare(
secret,
self.generate_invite_secret()
)
def send_team_invite(self, invited_by):
secret = self.generate_invite_secret()
url_kwargs = {
"pk": self.member.pk,
"secret": secret,
}
url = '%s%s' % (
settings.SITE_URL,
reverse('team-join', kwargs=url_kwargs),
)
message = render_to_string('team/emails/team_invite.txt',
{'url': url,
'name': invited_by.get_full_name(),
'site_name': settings.SITE_NAME,
'site_url': settings.SITE_URL
})
# Translators: Mail subject
send_mail(
str(_("Team invite from %(name)s") % {
"name": invited_by.get_full_name()
}),
message,
self.member.email,
priority=True
)
| 29.061538
| 65
| 0.599788
| 1,360
| 0.719958
| 0
| 0
| 0
| 0
| 0
| 0
| 187
| 0.098994
|
1a183c6499418e4965e990ea2623cf57e6ec50c1
| 340
|
py
|
Python
|
turtle/snowflake.py
|
yunzhang599/Python3_Package_Examples
|
3e479925f3f6818bf35e46123f720839acf075eb
|
[
"MIT"
] | 1
|
2019-11-16T05:06:01.000Z
|
2019-11-16T05:06:01.000Z
|
turtle/snowflake.py
|
yunzhang599/Python3_Package_Examples
|
3e479925f3f6818bf35e46123f720839acf075eb
|
[
"MIT"
] | null | null | null |
turtle/snowflake.py
|
yunzhang599/Python3_Package_Examples
|
3e479925f3f6818bf35e46123f720839acf075eb
|
[
"MIT"
] | null | null | null |
from turtle import forward, left, right, width, color, clearscreen
clearscreen()
color("lightblue")
width(3)
for i in range(6):
forward(50)
left(60)
forward(25)
left(180)
forward(25)
left(60)
forward(25)
left(180)
forward(25)
right(120)
forward(25)
left(180)
forward(75)
left(120)
| 14.782609
| 66
| 0.608824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 11
| 0.032353
|
1a1935f678ac36846905eb87be171d453fa2af35
| 1,185
|
py
|
Python
|
pyechonest/config.py
|
gleitz/automaticdj
|
3880c175bc09c17ed9f71ba9902e348a00bb64ef
|
[
"MIT"
] | 14
|
2015-06-19T22:00:41.000Z
|
2021-03-14T07:41:38.000Z
|
pyechonest/config.py
|
gleitz/automaticdj
|
3880c175bc09c17ed9f71ba9902e348a00bb64ef
|
[
"MIT"
] | null | null | null |
pyechonest/config.py
|
gleitz/automaticdj
|
3880c175bc09c17ed9f71ba9902e348a00bb64ef
|
[
"MIT"
] | 2
|
2015-07-19T10:51:23.000Z
|
2019-04-10T14:46:23.000Z
|
#!/usr/bin/env python
# encoding: utf-8
"""
Copyright (c) 2010 The Echo Nest. All rights reserved.
Created by Tyler Williams on 2010-04-25.
Global configuration variables for accessing the Echo Nest web API.
"""
__version__ = "4.2.8"
import os
if('ECHO_NEST_API_KEY' in os.environ):
ECHO_NEST_API_KEY = os.environ['ECHO_NEST_API_KEY']
else:
ECHO_NEST_API_KEY = None
API_HOST = 'developer.echonest.com'
API_SELECTOR = 'api'
"Locations for the Analyze API calls."
API_VERSION = 'v4'
"Version of api to use... only 4 for now"
HTTP_USER_AGENT = 'PyEchonest'
"""
You may change this to be a user agent string of your
own choosing.
"""
MP3_BITRATE = 128
"""
Default bitrate for MP3 output. Conventionally an
integer divisible by 32kbits/sec.
"""
CACHE = True
"""
You may change this to False to prevent local caching
of API results.
"""
TRACE_API_CALLS = True
"""
If true, API calls will be traced to the console
"""
CALL_TIMEOUT = 10
"""
The API call timeout in seconds.
"""
CODEGEN_BINARY_OVERRIDE = None
"""
Location of your codegen binary. If not given, we will guess codegen.platform-architecture on your system path, e.g. codegen.Darwin, codegen.Linux-i386
"""
| 19.42623
| 151
| 0.729958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 878
| 0.740928
|
1a19acc97f7f0626e13396cbd8314c6fbb0fd66e
| 18,836
|
py
|
Python
|
norbert/__init__.py
|
AppleHolic/norbert
|
cceaa24bce625bcba3146198271a20e4c265f2c8
|
[
"MIT"
] | 142
|
2019-03-19T18:36:28.000Z
|
2022-03-22T21:28:25.000Z
|
norbert/__init__.py
|
AppleHolic/norbert
|
cceaa24bce625bcba3146198271a20e4c265f2c8
|
[
"MIT"
] | 15
|
2019-03-07T15:54:31.000Z
|
2022-03-04T15:13:21.000Z
|
norbert/__init__.py
|
AppleHolic/norbert
|
cceaa24bce625bcba3146198271a20e4c265f2c8
|
[
"MIT"
] | 27
|
2018-10-28T14:13:34.000Z
|
2021-09-13T12:12:41.000Z
|
import numpy as np
import itertools
from .contrib import compress_filter, smooth, residual_model
from .contrib import reduce_interferences
def expectation_maximization(y, x, iterations=2, verbose=0, eps=None):
r"""Expectation maximization algorithm, for refining source separation
estimates.
This algorithm allows to make source separation results better by
enforcing multichannel consistency for the estimates. This usually means
a better perceptual quality in terms of spatial artifacts.
The implementation follows the details presented in [1]_, taking
inspiration from the original EM algorithm proposed in [2]_ and its
weighted refinement proposed in [3]_, [4]_.
It works by iteratively:
* Re-estimate source parameters (power spectral densities and spatial
covariance matrices) through :func:`get_local_gaussian_model`.
* Separate again the mixture with the new parameters by first computing
the new modelled mixture covariance matrices with :func:`get_mix_model`,
prepare the Wiener filters through :func:`wiener_gain` and apply them
with :func:`apply_filter``.
References
----------
.. [1] S. Uhlich and M. Porcu and F. Giron and M. Enenkl and T. Kemp and
N. Takahashi and Y. Mitsufuji, "Improving music source separation based
on deep neural networks through data augmentation and network
blending." 2017 IEEE International Conference on Acoustics, Speech
and Signal Processing (ICASSP). IEEE, 2017.
.. [2] N.Q. Duong and E. Vincent and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
.. [3] A. Nugraha and A. Liutkus and E. Vincent. "Multichannel audio source
separation with deep neural networks." IEEE/ACM Transactions on Audio,
Speech, and Language Processing 24.9 (2016): 1652-1664.
.. [4] A. Nugraha and A. Liutkus and E. Vincent. "Multichannel music
separation with deep neural networks." 2016 24th European Signal
Processing Conference (EUSIPCO). IEEE, 2016.
.. [5] A. Liutkus and R. Badeau and G. Richard "Kernel additive models for
source separation." IEEE Transactions on Signal Processing
62.16 (2014): 4298-4310.
Parameters
----------
y: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
initial estimates for the sources
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
complex STFT of the mixture signal
iterations: int [scalar]
number of iterations for the EM algorithm.
verbose: boolean
display some information if True
eps: float or None [scalar]
The epsilon value to use for regularization and filters.
If None, the default will use the epsilon of np.real(x) dtype.
Returns
-------
y: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
estimated sources after iterations
v: np.ndarray [shape=(nb_frames, nb_bins, nb_sources)]
estimated power spectral densities
R: np.ndarray [shape=(nb_bins, nb_channels, nb_channels, nb_sources)]
estimated spatial covariance matrices
Note
-----
* You need an initial estimate for the sources to apply this
algorithm. This is precisely what the :func:`wiener` function does.
* This algorithm *is not* an implementation of the "exact" EM
proposed in [1]_. In particular, it does compute the posterior
covariance matrices the same (exact) way. Instead, it uses the
simplified approximate scheme initially proposed in [5]_ and further
refined in [3]_, [4]_, that boils down to just take the empirical
covariance of the recent source estimates, followed by a weighted
average for the update of the spatial covariance matrix. It has been
empirically demonstrated that this simplified algorithm is more
robust for music separation.
Warning
-------
It is *very* important to make sure `x.dtype` is `np.complex`
if you want double precision, because this function will **not**
do such conversion for you from `np.complex64`, in case you want the
smaller RAM usage on purpose.
It is usually always better in terms of quality to have double
precision, by e.g. calling :func:`expectation_maximization`
with ``x.astype(np.complex)``.
This is notably needed if you let common deep learning frameworks like
PyTorch or TensorFlow do the STFT, because this usually happens in
single precision.
"""
# to avoid dividing by zero
if eps is None:
eps = np.finfo(np.real(x[0]).dtype).eps
# dimensions
(nb_frames, nb_bins, nb_channels) = x.shape
nb_sources = y.shape[-1]
# allocate the spatial covariance matrices and PSD
R = np.zeros((nb_bins, nb_channels, nb_channels, nb_sources), x.dtype)
v = np.zeros((nb_frames, nb_bins, nb_sources))
if verbose:
print('Number of iterations: ', iterations)
regularization = np.sqrt(eps) * (
np.tile(np.eye(nb_channels, dtype=np.complex64),
(1, nb_bins, 1, 1)))
for it in range(iterations):
# constructing the mixture covariance matrix. Doing it with a loop
# to avoid storing anytime in RAM the whole 6D tensor
if verbose:
print('EM, iteration %d' % (it+1))
for j in range(nb_sources):
# update the spectrogram model for source j
v[..., j], R[..., j] = get_local_gaussian_model(
y[..., j],
eps)
for t in range(nb_frames):
Cxx = get_mix_model(v[None, t, ...], R)
Cxx += regularization
inv_Cxx = _invert(Cxx, eps)
# separate the sources
for j in range(nb_sources):
W_j = wiener_gain(v[None, t, ..., j], R[..., j], inv_Cxx)
y[t, ..., j] = apply_filter(x[None, t, ...], W_j)[0]
return y, v, R
def wiener(v, x, iterations=1, use_softmask=True, eps=None):
"""Wiener-based separation for multichannel audio.
The method uses the (possibly multichannel) spectrograms `v` of the
sources to separate the (complex) Short Term Fourier Transform `x` of the
mix. Separation is done in a sequential way by:
* Getting an initial estimate. This can be done in two ways: either by
directly using the spectrograms with the mixture phase, or
by using :func:`softmask`.
* Refinining these initial estimates through a call to
:func:`expectation_maximization`.
This implementation also allows to specify the epsilon value used for
regularization. It is based on [1]_, [2]_, [3]_, [4]_.
References
----------
.. [1] S. Uhlich and M. Porcu and F. Giron and M. Enenkl and T. Kemp and
N. Takahashi and Y. Mitsufuji, "Improving music source separation based
on deep neural networks through data augmentation and network
blending." 2017 IEEE International Conference on Acoustics, Speech
and Signal Processing (ICASSP). IEEE, 2017.
.. [2] A. Nugraha and A. Liutkus and E. Vincent. "Multichannel audio source
separation with deep neural networks." IEEE/ACM Transactions on Audio,
Speech, and Language Processing 24.9 (2016): 1652-1664.
.. [3] A. Nugraha and A. Liutkus and E. Vincent. "Multichannel music
separation with deep neural networks." 2016 24th European Signal
Processing Conference (EUSIPCO). IEEE, 2016.
.. [4] A. Liutkus and R. Badeau and G. Richard "Kernel additive models for
source separation." IEEE Transactions on Signal Processing
62.16 (2014): 4298-4310.
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, {1,nb_channels}, nb_sources)]
spectrograms of the sources. This is a nonnegative tensor that is
usually the output of the actual separation method of the user. The
spectrograms may be mono, but they need to be 4-dimensional in all
cases.
x: np.ndarray [complex, shape=(nb_frames, nb_bins, nb_channels)]
STFT of the mixture signal.
iterations: int [scalar]
number of iterations for the EM algorithm
use_softmask: boolean
* if `False`, then the mixture phase will directly be used with the
spectrogram as initial estimates.
* if `True`, a softmasking strategy will be used as described in
:func:`softmask`.
eps: {None, float}
Epsilon value to use for computing the separations. This is used
whenever division with a model energy is performed, i.e. when
softmasking and when iterating the EM.
It can be understood as the energy of the additional white noise
that is taken out when separating.
If `None`, the default value is taken as `np.finfo(np.real(x[0])).eps`.
Returns
-------
y: np.ndarray
[complex, shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
STFT of estimated sources
Note
----
* Be careful that you need *magnitude spectrogram estimates* for the
case `softmask==False`.
* We recommand to use `softmask=False` only if your spectrogram model is
pretty good, e.g. when the output of a deep neural net. In the case
it is not so great, opt for an initial softmasking strategy.
* The epsilon value will have a huge impact on performance. If it's large,
only the parts of the signal with a significant energy will be kept in
the sources. This epsilon then directly controls the energy of the
reconstruction error.
Warning
-------
As in :func:`expectation_maximization`, we recommend converting the
mixture `x` to double precision `np.complex` *before* calling
:func:`wiener`.
"""
if use_softmask:
y = softmask(v, x, eps=eps)
else:
y = v * np.exp(1j*np.angle(x[..., None]))
if not iterations:
return y
# we need to refine the estimates. Scales down the estimates for
# numerical stability
max_abs = max(1, np.abs(x).max()/10.)
x /= max_abs
y = expectation_maximization(y/max_abs, x, iterations, eps=eps)[0]
return y*max_abs
def softmask(v, x, logit=None, eps=None):
"""Separates a mixture with a ratio mask, using the provided sources
spectrograms estimates. Additionally allows compressing the mask with
a logit function for soft binarization.
The filter does *not* take multichannel correlations into account.
The masking strategy can be traced back to the work of N. Wiener in the
case of *power* spectrograms [1]_. In the case of *fractional* spectrograms
like magnitude, this filter is often referred to a "ratio mask", and
has been shown to be the optimal separation procedure under alpha-stable
assumptions [2]_.
References
----------
.. [1] N. Wiener,"Extrapolation, Inerpolation, and Smoothing of Stationary
Time Series." 1949.
.. [2] A. Liutkus and R. Badeau. "Generalized Wiener filtering with
fractional power spectrograms." 2015 IEEE International Conference on
Acoustics, Speech and Signal Processing (ICASSP). IEEE, 2015.
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
spectrograms of the sources
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
mixture signal
logit: {None, float between 0 and 1}
enable a compression of the filter. If not None, it is the threshold
value for the logit function: a softmask above this threshold is
brought closer to 1, and a softmask below is brought closer to 0.
Returns
-------
ndarray, shape=(nb_frames, nb_bins, nb_channels, nb_sources)
estimated sources
"""
# to avoid dividing by zero
if eps is None:
eps = np.finfo(np.real(x[0]).dtype).eps
total_energy = np.sum(v, axis=-1, keepdims=True)
filter = v/(eps + total_energy.astype(x.dtype))
if logit is not None:
filter = compress_filter(filter, eps, thresh=logit, multichannel=False)
return filter * x[..., None]
def _invert(M, eps):
"""
Invert matrices, with special fast handling of the 1x1 and 2x2 cases.
Will generate errors if the matrices are singular: user must handle this
through his own regularization schemes.
Parameters
----------
M: np.ndarray [shape=(..., nb_channels, nb_channels)]
matrices to invert: must be square along the last two dimensions
eps: [scalar]
regularization parameter to use _only in the case of matrices
bigger than 2x2
Returns
-------
invM: np.ndarray, [shape=M.shape]
inverses of M
"""
nb_channels = M.shape[-1]
if nb_channels == 1:
# scalar case
invM = 1.0/(M+eps)
elif nb_channels == 2:
# two channels case: analytical expression
det = (
M[..., 0, 0]*M[..., 1, 1] -
M[..., 0, 1]*M[..., 1, 0])
invDet = 1.0/(det)
invM = np.empty_like(M)
invM[..., 0, 0] = invDet*M[..., 1, 1]
invM[..., 1, 0] = -invDet*M[..., 1, 0]
invM[..., 0, 1] = -invDet*M[..., 0, 1]
invM[..., 1, 1] = invDet*M[..., 0, 0]
else:
# general case : no use of analytical expression (slow!)
invM = np.linalg.pinv(M, eps)
return invM
def wiener_gain(v_j, R_j, inv_Cxx):
"""
Compute the wiener gain for separating one source, given all parameters.
It is the matrix applied to the mix to get the posterior mean of the source
as in [1]_
References
----------
.. [1] N.Q. Duong and E. Vincent and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
Parameters
----------
v_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
power spectral density of the target source.
R_j: np.ndarray [shape=(nb_bins, nb_channels, nb_channels)]
spatial covariance matrix of the target source
inv_Cxx: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
inverse of the mixture covariance matrices
Returns
-------
G: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
wiener filtering matrices, to apply to the mix, e.g. through
:func:`apply_filter` to get the target source estimate.
"""
(_, nb_channels) = R_j.shape[:2]
# computes multichannel Wiener gain as v_j R_j inv_Cxx
G = np.zeros_like(inv_Cxx)
for (i1, i2, i3) in itertools.product(*(range(nb_channels),)*3):
G[..., i1, i2] += (R_j[None, :, i1, i3] * inv_Cxx[..., i3, i2])
G *= v_j[..., None, None]
return G
def apply_filter(x, W):
"""
Applies a filter on the mixture. Just corresponds to a matrix
multiplication.
Parameters
----------
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
STFT of the signal on which to apply the filter.
W: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
filtering matrices, as returned, e.g. by :func:`wiener_gain`
Returns
-------
y_hat: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
filtered signal
"""
nb_channels = W.shape[-1]
# apply the filter
y_hat = 0+0j
for i in range(nb_channels):
y_hat += W[..., i] * x[..., i, None]
return y_hat
def get_mix_model(v, R):
"""
Compute the model covariance of a mixture based on local Gaussian models.
simply adds up all the v[..., j] * R[..., j]
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, nb_sources)]
Power spectral densities for the sources
R: np.ndarray [shape=(nb_bins, nb_channels, nb_channels, nb_sources)]
Spatial covariance matrices of each sources
Returns
-------
Cxx: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
Covariance matrix for the mixture
"""
nb_channels = R.shape[1]
(nb_frames, nb_bins, nb_sources) = v.shape
Cxx = np.zeros((nb_frames, nb_bins, nb_channels, nb_channels), R.dtype)
for j in range(nb_sources):
Cxx += v[..., j, None, None] * R[None, ..., j]
return Cxx
def _covariance(y_j):
"""
Compute the empirical covariance for a source.
Parameters
----------
y_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)].
complex stft of the source.
Returns
-------
Cj: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
just y_j * conj(y_j.T): empirical covariance for each TF bin.
"""
(nb_frames, nb_bins, nb_channels) = y_j.shape
Cj = np.zeros((nb_frames, nb_bins, nb_channels, nb_channels),
y_j.dtype)
for (i1, i2) in itertools.product(*(range(nb_channels),)*2):
Cj[..., i1, i2] += y_j[..., i1] * np.conj(y_j[..., i2])
return Cj
def get_local_gaussian_model(y_j, eps=1.):
r"""
Compute the local Gaussian model [1]_ for a source given the complex STFT.
First get the power spectral densities, and then the spatial covariance
matrix, as done in [1]_, [2]_
References
----------
.. [1] N.Q. Duong and E. Vincent and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
.. [2] A. Liutkus and R. Badeau and G. Richard. "Low bitrate informed
source separation of realistic mixtures." 2013 IEEE International
Conference on Acoustics, Speech and Signal Processing. IEEE, 2013.
Parameters
----------
y_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
complex stft of the source.
eps: float [scalar]
regularization term
Returns
-------
v_j: np.ndarray [shape=(nb_frames, nb_bins)]
power spectral density of the source
R_J: np.ndarray [shape=(nb_bins, nb_channels, nb_channels)]
Spatial covariance matrix of the source
"""
v_j = np.mean(np.abs(y_j)**2, axis=2)
# updates the spatial covariance matrix
nb_frames = y_j.shape[0]
R_j = 0
weight = eps
for t in range(nb_frames):
R_j += _covariance(y_j[None, t, ...])
weight += v_j[None, t, ...]
R_j /= weight[..., None, None]
return v_j, R_j
| 36.223077
| 79
| 0.646369
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 14,777
| 0.784508
|
1a1ab30134ffb46a2768f3d6e4b82bd7fcbd06d6
| 9,170
|
py
|
Python
|
postr/twitter_postr.py
|
dbgrigsby/Postr
|
c374648134123f857babb65aff161a4c3c470502
|
[
"MIT"
] | 3
|
2018-10-09T17:02:05.000Z
|
2022-03-21T08:58:49.000Z
|
postr/twitter_postr.py
|
dbgrigsby/Postr
|
c374648134123f857babb65aff161a4c3c470502
|
[
"MIT"
] | 11
|
2018-09-26T05:33:30.000Z
|
2019-04-06T04:06:51.000Z
|
postr/twitter_postr.py
|
dbgrigsby/Postr
|
c374648134123f857babb65aff161a4c3c470502
|
[
"MIT"
] | 3
|
2018-12-20T18:35:25.000Z
|
2022-03-21T08:58:54.000Z
|
import csv
import datetime
import json
import re
import os
import time
from typing import List
import matplotlib
import matplotlib.pyplot as plt
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy.api import API
from tweepy.streaming import StreamListener
from tweepy.cursor import Cursor
from textblob import TextBlob
from .api_interface import ApiInterface
from .twitter.twitter_key import TwitterKey
from .twitter.twitter_info import TwitterInfo
from .twitter.twitter_bio import TwitterBio
matplotlib.use('TkAgg')
# Precision to truncate on a datetime object, down to the minute
DATETIME_MILLISECOND_PRECISION = 23
# Precision to truncate scores when plotting twitter stream scores
SCORE_PRECISION = 5
class TwitterStreamer():
"""
Class for streaming and processing live tweets
"""
def __init__(self, keys: TwitterKey, graphfile: str) -> None:
""" Holds API keys for twitter access """
self.keys = keys
self.graphfile = graphfile
def stream_tweets(self, hashtags: List[str], output_filename: str, auth: OAuthHandler) -> None:
""" Finds realtime tweets given a list of hashtags to look for.
Writes results to an output file"""
listener = StdOutListener(output_filename, self.graphfile)
stream = Stream(auth, listener)
# This line filter Twitter Streams to capture data by the keywords:
stream.filter(track=hashtags)
class StdOutListener(StreamListener):
""" A basic listener for real time hashtags """
def __init__(self, filename: str, graphfile: str) -> None:
"""Constructor for the realtime streaming, writes results to the filename output file"""
self.fetched_tweets_filename = filename
self.graphfile = graphfile
self.counter = 0
super().__init__()
def on_data(self, raw_data: str) -> bool:
"""Writes a tweet and all associated info that was streamed to an output file """
try:
if self.counter == 10:
return False
print('found tweet #%d' % self.counter)
with open(self.fetched_tweets_filename, 'a') as tf:
j = json.loads(raw_data)
tf.write(j['text'])
with open(self.graphfile, 'a') as gf:
writer = csv.writer(gf)
writer.writerow([j['text'], datetime.datetime.now()])
self.counter += 1
return True
except BaseException as e:
print('Error on data %s' % str(e))
return True
@staticmethod
def on_error(status_code: int) -> None:
"""Print an error if the hashtag streaming fails for any reason.
I can't seem to trigger this function. It probably only gets
called if the twitter website itself is down. """
print(status_code)
class Twitter(ApiInterface):
def __init__(self) -> None:
""" Store easy access for keys """
self.keys = TwitterKey()
""" Store pointer for OAuth access """
auth = OAuthHandler(self.keys.consumer_pub, self.keys.consumer_sec)
auth.set_access_token(self.keys.access_pub, self.keys.access_sec)
self.auth = auth
self.api = API(auth)
""" Store easy access for twitter info operations """
self.info = TwitterInfo(self.api)
self.bio = TwitterBio(self.api)
""" Contains info for real-time graphing """
self.streamfile = os.path.join('postr', 'twitter', 'twitter_stream.txt')
self.graphfile = os.path.join('postr', 'twitter', 'twitter_graphing.csv')
self.blobfile = os.path.join('postr', 'twitter', 'twitter_blob.csv')
def post_text(self, text: str) -> bool:
""" Posts a tweet containing text """
try:
self.api.update_status(status=text)
return True
except BaseException as e:
print(e)
return False
# pylint: disable=no-self-use, unused-argument
def post_video(self, url: str, text: str) -> bool:
""" Not applicable """
return False
def post_photo(self, url: str, text: str) -> bool:
""" Posts a tweet with text and a picture """
try:
self.api.update_with_media(filename=url, status=text)
return True
except BaseException as e:
print(e)
return False
def get_user_followers(self, text: str) -> List[str]:
""" Gets user followers, note: this is rate limited """
my_followers = []
i = 0
# Use the cursor module for pagination
for follower in Cursor(self.api.followers, screen_name=text).items():
my_followers.append(follower.screen_name)
i += 1
# Simple rate limit for requests
if i >= 100:
i = 0
time.sleep(1)
return my_followers
def remove_post(self, post_id: str) -> bool:
""" Removes a tweet given its ID """
try:
self.api.destroy_status(post_id)
return True
except BaseException as e:
print(e)
return False
def stream_tweets(self, hashtags: List[str], output_filename: str) -> None:
""" Streams tweets from a hashtag and writes data into an output file """
self.setup_csv()
twitter_streamer = TwitterStreamer(self.keys, self.graphfile)
twitter_streamer.stream_tweets(hashtags, output_filename, self.auth)
print('done streaming')
def setup_csv(self) -> None:
""" Initializes a csv file for time series graphing """
csvData = ['Tweet', 'Time']
with open(self.graphfile, 'w') as csvFile:
writer = csv.writer(csvFile)
writer.writerow(csvData)
csvFile.close()
# pylint: disable=no-self-use, unused-argument
def get_user_likes(self) -> int:
""" Not applicable, see helper methods in TwitterInfo class"""
return -1
def read_csv_col(self, colNum: int, filename: str) -> List[str]:
""" Reads a specific column by index in the graph csv"""
col = []
with open(filename, 'r') as rf:
reader = csv.reader(rf, delimiter=',')
for row in reader:
col.append(str(row[colNum]))
return col[1::] # Ignore the csv header
def analyzeSentiment(self) -> None:
""" Converts a real-time tweet content into a positivity score"""
with open(self.blobfile, 'w') as bf:
writer = csv.writer(bf)
graph_data = zip(
self.read_csv_col(0, self.graphfile),
self.read_csv_col(1, self.graphfile),
)
for pair in graph_data:
text = str(re.sub(r'[^a-zA-Z ]+', '', pair[0]))
score = Twitter.polarity(text)
writer.writerow([pair[1], score])
bf.close()
@staticmethod
def polarity(text: str) -> float:
""" Returns the polarity of text. Made into a separate
method to provide easy modification if needed in the future """
return float(TextBlob(text).sentiment.polarity)
def stream_and_graph(self, hashtags: List[str]) -> None:
""" Streams tweets in real time, then graphs their sentiment """
self.stream_tweets(hashtags, self.streamfile)
self.analyzeSentiment()
self.graph_blob()
def graph_blob(self) -> None:
""" Graphs a blob file for twitter sentiment """
dates = self.read_csv_col(0, self.blobfile)
# Truncate the datetime object to the minute precision
dates = [d[:DATETIME_MILLISECOND_PRECISION] for d in dates]
# Truncate off scores past a precision for easy viewing on the plot
scores = list(map(lambda x: x[:SCORE_PRECISION], self.read_csv_col(1, self.blobfile)))
plt.plot(
dates,
scores,
)
plt.ylabel('Positivity Score')
plt.xlabel('Time')
# beautify the x-labels
plt.gcf().autofmt_xdate()
plt.show()
def update_bio(self, message: str) -> None:
""" Sets an authenticated user's bio to a specified message """
self.api.update_profile(description=message)
def examples() -> None:
""" Runs through major use cases """
t = Twitter()
# text and picture posting
t.post_text('sample API text')
# t.post_photo('enter path here', 'sample API text'), put a valid path here to use
# Get/Set info about the authenticated user
print(t.bio.username())
print(t.bio.bio())
t.update_bio('sample API bio')
t.bio.update_name('Postr Project')
# Get info about the authenticated user's tweets
twt = t.info.last_tweet() # Returns a Status object. Let's use it.
# All methods for a Status object:: https://gist.github.com/dev-techmoe/ef676cdd03ac47ac503e856282077bf2
print(twt.text)
print(twt.retweet_count)
print(twt.favorite_count)
# Let's stream some hashtags and graph them in real time
t.stream_and_graph(['Politics', 'News', 'School'])
if __name__ == '__main__':
examples()
| 33.589744
| 108
| 0.619738
| 7,545
| 0.822792
| 0
| 0
| 536
| 0.058451
| 0
| 0
| 2,899
| 0.31614
|
1a1ae330b5d97b072b7de9905431059440f2b93a
| 2,149
|
py
|
Python
|
Contents/Libraries/Shared/subliminal_patch/providers/legendastv.py
|
Acidburn0zzz/Sub-Zero.bundle
|
eb3a0d52fde281773ba5109fad9801ede9c938ba
|
[
"MIT"
] | 1
|
2018-02-01T18:00:59.000Z
|
2018-02-01T18:00:59.000Z
|
Contents/Libraries/Shared/subliminal_patch/providers/legendastv.py
|
Acidburn0zzz/Sub-Zero.bundle
|
eb3a0d52fde281773ba5109fad9801ede9c938ba
|
[
"MIT"
] | null | null | null |
Contents/Libraries/Shared/subliminal_patch/providers/legendastv.py
|
Acidburn0zzz/Sub-Zero.bundle
|
eb3a0d52fde281773ba5109fad9801ede9c938ba
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import logging
from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, \
LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize
logger = logging.getLogger(__name__)
class LegendasTVSubtitle(_LegendasTVSubtitle):
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
super(LegendasTVSubtitle, self).__init__(language, type, title, year, imdb_id, season, archive, name)
self.archive.content = None
self.release_info = archive.name
self.page_link = archive.link
def make_picklable(self):
self.archive.content = None
return self
def get_matches(self, video, hearing_impaired=False):
matches = set()
# episode
if isinstance(video, Episode) and self.type == 'episode':
# series
if video.series and sanitize(self.title) == sanitize(video.series):
matches.add('series')
# year
if video.original_series and self.year is None or video.year and video.year == self.year:
matches.add('year')
# imdb_id
if video.series_imdb_id and self.imdb_id == video.series_imdb_id:
matches.add('series_imdb_id')
# movie
elif isinstance(video, Movie) and self.type == 'movie':
# title
if video.title and sanitize(self.title) == sanitize(video.title):
matches.add('title')
# year
if video.year and self.year == video.year:
matches.add('year')
# imdb_id
if video.imdb_id and self.imdb_id == video.imdb_id:
matches.add('imdb_id')
# name
matches |= guess_matches(video, guessit(self.name, {'type': self.type, 'single_value': True}))
return matches
class LegendasTVProvider(_LegendasTVProvider):
subtitle_class = LegendasTVSubtitle
def download_subtitle(self, subtitle):
super(LegendasTVProvider, self).download_subtitle(subtitle)
subtitle.archive.content = None
| 33.578125
| 109
| 0.635179
| 1,889
| 0.879013
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.078641
|
1a1b2daed4ebe5ea602e637a406c3b3e1a5fa4ac
| 339
|
py
|
Python
|
takeyourmeds/groups/groups_billing/plans.py
|
takeyourmeds/takeyourmeds-web
|
edf24188f26948902cfb69793b4d5aa3cf8b6dea
|
[
"MIT"
] | 11
|
2015-06-01T16:31:42.000Z
|
2022-03-01T01:20:58.000Z
|
takeyourmeds/groups/groups_billing/plans.py
|
takeyourmeds/takeyourmeds-web
|
edf24188f26948902cfb69793b4d5aa3cf8b6dea
|
[
"MIT"
] | 111
|
2015-07-20T13:23:16.000Z
|
2017-09-08T08:17:10.000Z
|
takeyourmeds/groups/groups_billing/plans.py
|
takeyourmeds/takeyourmeds-web
|
edf24188f26948902cfb69793b4d5aa3cf8b6dea
|
[
"MIT"
] | 6
|
2015-07-15T08:08:12.000Z
|
2018-06-23T00:13:13.000Z
|
"""
This file must be kept up-to-date with Stripe, especially the slugs:
https://manage.stripe.com/plans
"""
PLANS = {}
class Plan(object):
def __init__(self, value, slug, display):
self.value = value
self.slug = slug
self.display = display
PLANS[slug] = self
FREE = Plan(1, 'free', "Free plan")
| 18.833333
| 68
| 0.613569
| 176
| 0.519174
| 0
| 0
| 0
| 0
| 0
| 0
| 128
| 0.377581
|
1a1bc945602cc44132190cba60d0afbf196c8e4e
| 34,864
|
py
|
Python
|
footprint/socialnetwork/views.py
|
hairleng/Footprint
|
3c5ab2743584bcdf19161972f4a7e7581ba9d1ee
|
[
"MIT"
] | null | null | null |
footprint/socialnetwork/views.py
|
hairleng/Footprint
|
3c5ab2743584bcdf19161972f4a7e7581ba9d1ee
|
[
"MIT"
] | null | null | null |
footprint/socialnetwork/views.py
|
hairleng/Footprint
|
3c5ab2743584bcdf19161972f4a7e7581ba9d1ee
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect, get_object_or_404
import json
from django.http import HttpResponse, Http404
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django.utils import timezone
# Create your views here.
from socialnetwork.forms import *
from socialnetwork.models import *
from socialnetwork.forms import ProfileForm, UpdateProfileForm
from socialnetwork.models import Profile
from allauth.account.views import SignupView, LoginView
from .models import User
import requests
from notifications.signals import notify
from notifications.models import Notification
import datetime
class MySignupView(SignupView):
template_name = 'templates/login.html'
class MyLoginView(LoginView):
template_name = 'templates/register.html'
@login_required
def user_profile(request):
profile = Profile.objects.get_or_create(user=request.user)
context = {}
context['form'] = ProfileForm()
context['userform'] = UpdateProfileForm()
current_user = get_object_or_404(Profile,
user=request.user)
context['p'] = current_user
context['following'] = current_user.following.all()
return render(request, 'user_profile.html', context)
@login_required
# return a list of usernames
def get_following_list(user_profile):
all_followings = user_profile.following.all()
following_list = [following.user.username for following in all_followings]
return following_list
@login_required
def get_photo(request, id):
item = get_object_or_404(Profile, id=id)
# Probably don't need this check as form validation requires a picture be uploaded.
if not item.picture:
raise Http404
return HttpResponse(item.picture, content_type=item.content_type)
@login_required
def get_profile(request, username):
context = {}
# Make sure profile is created for new users.
_ = Profile.objects.get_or_create(user=request.user)
request_user = get_object_or_404(Profile, user=request.user)
profile_by_give_username = None
try:
profile_by_give_username = get_object_or_404(
Profile, user__username=username)
except:
context["message"] = "User does not exist."
return render(request, 'error.html', context)
context['p'] = profile_by_give_username
logs_of_profile = Log.objects.filter(
user_id=profile_by_give_username.user.id)
following_list = profile_by_give_username.following.all()
follower_list = profile_by_give_username.follower.all()
bookmarked_logs = profile_by_give_username.bookmarked_logs.all()
context['following'] = following_list
context['followers'] = follower_list
context['bookmarked_logs'] = bookmarked_logs
context['logs_created_by_user'] = logs_of_profile
context['num_followers'] = len(follower_list)
context['num_followings'] = len(following_list)
context['num_logs'] = len(logs_of_profile)
if request.user.username == profile_by_give_username.user.username and request.user.email == profile_by_give_username.user.email:
return render(request, 'user_profile.html', context)
# Post_user is not request.user, means it must be someone else's profile.
follow_status = False
if profile_by_give_username in request_user.following.all():
follow_status = True
context['following_status'] = follow_status
return render(request, 'other_profile.html', context)
@login_required
def home(request):
context = {}
self_logs = Log.objects.filter(user_id=request.user.id)
other_logs = Log.objects.exclude(user_id=request.user.id)
self_ls = []
for log in self_logs:
self_geoinfo = [log.location.lat, log.location.lng,
log.location.placeID, str(log.picture), log.id]
self_ls.append(self_geoinfo)
self_ls = json.dumps(self_ls)
context["self_geoinfo"] = self_ls
other_ls = []
for log in other_logs:
if log.visibility:
other_geoinfo = [log.location.lat, log.location.lng,
log.location.placeID, str(log.picture), log.id]
other_ls.append(other_geoinfo)
other_ls = json.dumps(other_ls)
context["other_geoinfo"] = other_ls
return render(request, 'home.html', context)
@login_required
def filter_date(request):
if request.method == 'POST':
if 'start_date' not in request.POST or 'end_date' not in request.POST:
context = {}
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
start_date = datetime.datetime.strptime(
request.POST['start_date'], '%Y-%m-%d').date()
end_date = datetime.datetime.strptime(request.POST['end_date'], '%Y-%m-%d')
end_date = (end_date + datetime.timedelta(days=1)).date()
if start_date > end_date:
return render(request, 'error.html', {'message': "Start date must be earlier than end date"})
filter_logs = Log.objects.filter(
creation_time__range=(start_date, end_date))
except ValueError as ve:
return render(request, 'error.html', {'message': "ValueError"})
return HttpResponse(serialize_log(filter_logs, request), content_type='application/json')
else:
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
@login_required
def filtered_stream(request):
return render(request, 'filtered_stream.html', {})
@login_required
def get_one_log(request, log_id):
request_log = []
log = get_object_or_404(Log, id=log_id)
request_log.append(log)
return HttpResponse(serialize_log(request_log, request), content_type='application/json')
@login_required
def get_user_logs(request, user_id):
user_logs = Log.objects.filter(user__id=user_id)
return HttpResponse(serialize_log(user_logs, request), content_type='application/json')
@login_required
def get_logs(request):
# Get request user's following list
logs = Log.objects.all()
return HttpResponse(serialize_log(logs, request), content_type='application/json')
@login_required
def get_bookmark_logs(request):
# Get request user's following list
request_user_profile = get_object_or_404(Profile, user=request.user)
bookmark_list = request_user_profile.bookmarked_logs.all()
return HttpResponse(serialize_log(bookmark_list, request), content_type='application/json')
def serialize_log(logs, request):
request_user_profile = get_object_or_404(Profile, user=request.user)
following_list = request_user_profile.following.all()
bookmark_list = request_user_profile.bookmarked_logs.all()
all_logs = []
for log in logs:
log_creator = log.user
# If log creator is already followed, pass this information
creator_profile, _ = Profile.objects.get_or_create(user=log.user)
is_self = False
if creator_profile == request_user_profile:
is_self = True
follow_status = False
if creator_profile in following_list:
follow_status = True
bookmarked = False
if log in bookmark_list:
bookmarked = True
liked = False
if request_user_profile in log.liked_users.all():
liked = True
num_likes = len(log.liked_users.all())
comments = []
for comment_item in Comment.objects.all():
if comment_item.of_log.id == log.id:
commentor_profile = get_object_or_404(Profile, user=comment_item.created_by)
comment = {
'comment_id': comment_item.id,
'text': comment_item.comment_content,
'date': comment_item.created_at.isoformat(),
'comment_profile_pic': str(commentor_profile.picture),
'username': comment_item.created_by.username,
'user_fn': comment_item.created_by.first_name,
'user_ln': comment_item.created_by.last_name,
}
comments.append(comment)
log_info = {
'user_id': log_creator.id,
'already_followed': follow_status,
'log_id': log.id,
'username': log_creator.username,
'profile_pic': str(creator_profile.picture),
'log_title': log.log_title,
'log_text': log.log_text,
'log_location': log.location.location_name,
'date': log.creation_time.isoformat(),
'log_pic': str(log.picture),
'bookmark_status': bookmarked,
'num_likes': num_likes,
'already_liked': liked,
'comments': comments,
'is_self': is_self,
'visibility': log.visibility
}
all_logs.append(log_info)
response_json = json.dumps(all_logs)
return response_json
@login_required
def add_profile(request):
context = {}
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
user_form = UpdateProfileForm(request.POST)
if not user_form.is_valid():
if 'first_name' in request.POST and request.POST['first_name']:
request.user.first_name = request.POST['first_name']
if 'last_name' in request.POST and request.POST['last_name']:
request.user.last_name = request.POST['last_name']
if 'username' in request.POST and request.POST['username']:
num_users_with_username = User.objects.filter(username=request.POST['username']).count()
if num_users_with_username > 0 and request.POST['username'] != request.user.username:
context['message'] = 'Username already exists.'
return render(request, 'error.html', context)
request.user.username = request.POST['username']
request.user.save()
else:
request.user.first_name = request.POST['first_name']
request.user.last_name = request.POST['last_name']
num_users_with_username = User.objects.filter(username=request.POST['username']).count()
if num_users_with_username > 0 and request.POST['username'] != request.user.username:
context['message'] = 'Username already exists.'
return render(request, 'error.html', context)
request.user.username = request.POST['username']
request.user.save()
new_item = Profile.objects.get(user=request.user)
form = ProfileForm(request.POST, request.FILES, instance=new_item)
if not form.is_valid(): # 检查两个field
# context['form'] = form
if 'bio' in request.POST and request.POST['bio']:
new_item.bio = request.POST['bio']
if 'picture' in form.cleaned_data:
new_item.picture = form.cleaned_data['picture']
new_item.content_type = form.cleaned_data['picture'].content_type
# else:
# context["message"] = "Image setting failed. You must upload an image."
# return render(request, 'error.html', context)
new_item.save()
context['p'] = new_item
return get_profile(request, request.user.username)
else:
# Must copy content_type into a new model field because the model
# FileField will not store this in the database. (The uploaded file
# is actually a different object than what's return from a DB read.)
new_item.pic = form.cleaned_data['picture']
new_item.bio = form.cleaned_data['bio']
new_item.content_type = form.cleaned_data['picture'].content_type
new_item.save()
context['message'] = 'Item #{0} saved.'.format(new_item.id)
context['p'] = new_item
return get_profile(request, request.user.username)
@login_required
def follow(request, id):
context = {}
other_user = None
try:
other_user = get_object_or_404(Profile, id=id)
except:
context["message"] = "The user profile you are trying to follow doesn't exist."
return render(request, 'error.html', context)
current_user = request.user
# Other user is a profile
current_user.profile.following.add(other_user)
current_user.save()
other_user.follower.add(current_user.profile)
other_user.save()
context['following_status'] = True
context['p'] = other_user
return get_profile(request, other_user.user.username)
@login_required
def ajax_follow(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'user_id' in request.POST or not request.POST['user_id']:
return render(request, 'error.html', {'message': "The user you are trying to follow should not have empty ID."})
user_id = request.POST['user_id']
if user_id.isnumeric():
other_user_profile = get_object_or_404(Profile, user_id=user_id)
request_user_profile = get_object_or_404(Profile, user=request.user)
# Sanity check, users can't follow themselves
if request_user_profile != other_user_profile:
# Only Return when request_user_profile doesn't include the profile trying to follow.
if other_user_profile not in request_user_profile.following.all():
request_user_profile.following.add(other_user_profile)
request_user_profile.save()
other_user_profile.follower.add(request_user_profile)
other_user_profile.save()
else:
return get_logs(request)
return get_logs(request)
@login_required
def ajax_unfollow(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'user_id' in request.POST or not request.POST['user_id']:
return render(request, 'error.html', {'message': "The user you are trying to follow should not have empty ID."})
user_id = request.POST['user_id']
if user_id.isnumeric():
other_user_profile = get_object_or_404(Profile, user_id=user_id)
request_user_profile = get_object_or_404(Profile, user=request.user)
# Sanity check, users can't follow themselves
if request_user_profile != other_user_profile:
# Only Return when request_user_profile doesn't include the profile trying to follow.
if other_user_profile in request_user_profile.following.all():
request_user_profile.following.remove(other_user_profile)
request_user_profile.save()
other_user_profile.follower.remove(request_user_profile)
other_user_profile.save()
return get_logs(request)
@login_required
def unfollow(request, id):
context = {}
other_user = None
try:
other_user = get_object_or_404(Profile, id=id)
except:
context["message"] = "The user profile you are trying to unfollow doesn't exist."
return render(request, 'error.html', context)
current_user = request.user
current_user.profile.following.remove(other_user)
current_user.save()
other_user.follower.remove(current_user.profile)
other_user.save()
context['following_status'] = False
context['p'] = other_user
return get_profile(request, other_user.user.username)
@login_required
def add_comment(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'comment_text' in request.POST or not request.POST['comment_text']:
return render(request, 'error.html', {'message': "You comment should not be empty."})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "Comment needs to be made on a log."})
logid = request.POST['log_id']
if logid.isnumeric():
belong_to_log = Log.objects.get(id=logid)
new_comment = Comment(comment_content=request.POST['comment_text'],
created_by=request.user,
created_at=timezone.now(),
of_log=belong_to_log)
new_comment.save()
notify.send(sender=request.user, recipient=belong_to_log.user,
verb='your log: <i>{}</i> has a new reply from <strong>{}</strong>: "{}"'.format(
belong_to_log.log_title,
new_comment.created_by.username,
new_comment.comment_content),
description="Comment",
target=belong_to_log)
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message': "Please dont' make changes to comment field name"})
@login_required
def log_editor(request):
context = {}
if request.method == 'POST':
if 'latLng' not in request.POST or not request.POST['latLng']:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
latLng = json.loads(request.POST['latLng'])
except:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
if 'lat' not in latLng or 'lng' not in latLng:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
float(latLng['lat'])
float(latLng['lng'])
except ValueError:
context['message'] = "Some critical data is wrong! Please try again."
return render(request, 'error.html', context)
try:
location = Location.objects.get(lat=float(latLng['lat']), lng=float(latLng['lng']))
context['location_name'] = location.location_name
context['placeID'] = location.placeID
except Location.DoesNotExist:
if 'location_name' in request.POST and 'placeID' in request.POST:
context['location_name'] = request.POST['location_name']
context['placeID'] = request.POST['placeID']
else:
context['location_name'] = getLocationNameFromLatLng(latLng)
context['placeID'] = str(latLng['lat']) + str(latLng['lng'])
location = Location(
placeID=context['placeID'],
location_name=context['location_name'],
lat=float(latLng['lat']),
lng=float(latLng['lng']))
location.save()
context['log_id'] = 0
context['log_title'] = ''
context['log_text'] = ''
context['visibility'] = True
logs = Log.objects.filter(location=location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_editor.html', context)
else:
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
@login_required
def edit_log(request, log_id):
context = {}
log = get_object_or_404(Log, id=log_id)
if log.user != request.user:
context['message'] = "You cannot edit other user's log."
return render(request, 'error.html', context)
context['log_id'] = log.id
context['log_title'] = log.log_title
context['log_text'] = log.log_text
context['visibility'] = log.visibility
context['placeID'] = log.location.placeID
context['location_name'] = log.location.location_name
logs = Log.objects.filter(location=log.location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_editor.html', context)
@login_required
def add_log(request, log_id):
context = {}
if request.method == 'POST':
form = EditorForm(request.POST, request.FILES)
try:
location = Location.objects.get(placeID=request.POST['placeID'])
except Location.DoesNotExist:
location = None
if not location:
context['message'] = 'Location not found.'
return render(request, 'error.html', context)
try:
log = Log.objects.get(id=log_id)
if log.user.id != request.user.id:
context['message'] = "You cannot edit other user's log."
return render(request, 'error.html', context)
except Log.DoesNotExist:
log = None
if not form.is_valid():
error_messages = []
if 'log_title' in request.POST and len(request.POST['log_title']) > 200:
error_messages.append("Log title exceeds max length (200).")
if 'log_text' in request.POST and len(request.POST['log_text']) > 20000000:
error_messages.append("Log text exceeds max length (20000).")
if 'picture' not in form.cleaned_data and 'picture' in request.FILES:
if not hasattr(request.FILES['picture'], 'content_type'):
error_messages.append('You must upload a picture.')
elif not request.FILES['picture'].content_type or not request.FILES['picture'].content_type.startswith(
'image'):
error_messages.append('File type is not image.')
elif request.FILES['picture'].size > 2500000:
error_messages.append('Cover image exceeds max size (2500000).')
context['log_id'] = log_id
if 'log_title' in request.POST:
context['log_title'] = request.POST['log_title']
else:
context['log_title'] = ''
if 'log_text' in request.POST:
context['log_text'] = request.POST['log_text']
else:
context['log_text'] = ''
if 'visibility' in request.POST:
context['visibility'] = False
else:
context['visibility'] = True
context['placeID'] = form.cleaned_data['placeID']
context['location_name'] = location.location_name
context['error_messages'] = error_messages
return render(request, 'log_editor.html', context)
try:
log = Log.objects.get(id=log_id)
log.log_title = form.cleaned_data['log_title']
log.log_text = form.cleaned_data['log_text']
if form.cleaned_data['picture']:
log.picture = form.cleaned_data['picture']
log.content_type = form.cleaned_data['picture'].content_type
if 'visibility' in request.POST:
log.visibility = False
else:
log.visibility = True
log.save()
except Log.DoesNotExist:
new_log = Log(log_title=form.cleaned_data['log_title'],
log_text=form.cleaned_data['log_text'],
user=request.user,
location=location)
if form.cleaned_data['picture']:
new_log.picture = form.cleaned_data['picture']
new_log.content_type = form.cleaned_data['picture'].content_type
if 'visibility' in request.POST:
new_log.visibility = False
else:
new_log.visibility = True
new_log.save()
return redirect(reverse('home'))
else:
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
@login_required
def get_picture(request, log_id):
log = get_object_or_404(Log, id=log_id)
# Maybe we don't need this check as form validation requires a picture be uploaded.
# But someone could have delete the picture leaving the DB with a bad references.
if not log.picture:
raise Http404
return HttpResponse(log.picture, content_type=log.content_type)
@login_required
def log_display(request):
if request.method == 'POST':
context = {}
if 'latLng' not in request.POST or not request.POST['latLng']:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
latLng = json.loads(request.POST['latLng'])
except:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
if 'lat' not in latLng or 'lng' not in latLng:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
float(latLng['lat'])
float(latLng['lng'])
except ValueError:
context['message'] = "Some critical data is wrong! Please try again."
return render(request, 'error.html', context)
latLng = json.loads(request.POST['latLng'])
location = Location.objects.filter(
lat=float(latLng['lat']), lng=float(latLng['lng']))[0]
logs_to_display = list(Log.objects.filter(location=location, visibility=True))
logs_to_display.extend(Log.objects.filter(location=location, user=request.user, visibility=False))
context['logs'] = logs_to_display
logs = Log.objects.filter(location=location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_display.html', context)
else:
context = {}
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
def getLocationNameFromLatLng(latLng):
# detailed retured json information please visit: https: // maps.googleapis.com/maps/api/geocode/json?latlng = 40.714224, -73.961452 & key = AIzaSyBAzuMuqCtP0j8Yd7hJ6CG5jdei-Y4Pdlw
URL = "https://maps.googleapis.com/maps/api/geocode/json"
lat = latLng['lat']
lng = latLng['lng']
latLng_ = "{},{}".format(lat, lng)
# defining a params dict for the parameters to be sent to the API
PARAMS = {'latlng': latLng_,
'key': 'AIzaSyBAzuMuqCtP0j8Yd7hJ6CG5jdei-Y4Pdlw'
}
# sending get request and saving the response as response object
r = requests.get(url=URL, params=PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location(the nearest location to the given latlng)
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
formatted_address = data['results'][0]['formatted_address']
# # printing the output
return formatted_address
@login_required
def travel_stream(request):
# make sure user profile is created before accessing stream page.
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'travel_stream.html', {})
@login_required
def bookmark_stream(request):
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'bookmark_stream.html', {})
@login_required
def show_all_user_stream(request, user_id):
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'user_stream.html', {'user_id': user_id})
@login_required
def one_log(request, log_id):
try:
valid_log = get_object_or_404(Log, id=log_id)
except:
context = {}
context["message"] = "The log you are trying to display doesn't exist"
return render(request, 'error.html', context)
return render(request, 'one_log.html', {'log_id': log_id})
@login_required
def my_notifications(request):
context = {}
return render(request, 'my_notifications.html', context)
# Add this log to User's bookmarked collection
@login_required
def add_bookmark(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to bookmark shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_bookmark = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
request_user_current_collection = request_user.bookmarked_logs
if log_trying_to_bookmark in request_user_current_collection.all():
return render(request, 'error.html', {'message': "Log is already bookmarked, please check your collection"})
else:
request_user.bookmarked_logs.add(log_trying_to_bookmark)
request_user.save()
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message': "Please dont' make changes to comment field name"})
# Remove this log from User's bookmarked collection
@login_required
def remove_bookmark(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to bookmark shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_remove = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
request_user_current_collection = request_user.bookmarked_logs
if log_trying_to_remove not in request_user_current_collection.all():
return render(request, 'error.html', {'message': "You can not remove a collection that is not bookmarked."})
else:
request_user.bookmarked_logs.remove(log_trying_to_remove)
request_user.save()
return get_logs(request)
else:
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
# Like this log, add liked users to this log
@login_required
def like_log(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to like shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_like = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
logs_liked_users = log_trying_to_like.liked_users
if request_user in logs_liked_users.all():
return render(request, 'error.html', {'message': "You already liked this Log"})
else:
logs_liked_users.add(request_user)
log_trying_to_like.save()
notify.send(sender=request.user, recipient=log_trying_to_like.user,
verb='Wow! Your log: <i>{}</i> is liked by <strong>{}</strong>.'.format(
log_trying_to_like.log_title,
request.user.username),
description="Like",
target=log_trying_to_like)
return get_logs(request)
elif logid == 'xxxx':
# print(postid.isnumeric())
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
# Unlike this log, remove request user from liked_users of this Log
# Like this log, add liked users to this log
@login_required
def unlike_log(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to like shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_unlike = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
if request_user not in log_trying_to_unlike.liked_users.all():
return render(request, 'error.html', {'message':
"You can't unlike this Log since it's not liked."})
else:
log_trying_to_unlike.liked_users.remove(request_user)
log_trying_to_unlike.save()
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
@login_required
def mark_as_read_action(request):
if request.method == 'POST':
if 'notification_id' not in request.POST or not request.POST['notification_id'] or 'log_id' not in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "Some critical data is missing. Please try again!"})
notification_id = request.POST['notification_id']
try:
log_id = int(request.POST['log_id'])
except ValueError:
return render(request, 'error.html', {'message': "Some critical data is wrong. Please try again!"})
notification = get_object_or_404(Notification, id=notification_id)
if notification.unread:
notification.unread = False
notification.save()
return redirect(reverse('one_log', kwargs={'log_id': log_id}))
else:
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
@login_required
def about(request):
return render(request, 'about.html', {})
| 40.776608
| 184
| 0.645336
| 149
| 0.004273
| 0
| 0
| 30,146
| 0.864476
| 0
| 0
| 8,810
| 0.252638
|
1a1cb891a28d6f1130bc984167bf2fda46be3fe3
| 24,095
|
py
|
Python
|
Application/datasources/datapod_backup/utils.py
|
GraphicalDot/datapod-backend-layer
|
ab38a5b0e969cd0d762e9d7720ab89174c333c37
|
[
"Apache-2.0"
] | null | null | null |
Application/datasources/datapod_backup/utils.py
|
GraphicalDot/datapod-backend-layer
|
ab38a5b0e969cd0d762e9d7720ab89174c333c37
|
[
"Apache-2.0"
] | null | null | null |
Application/datasources/datapod_backup/utils.py
|
GraphicalDot/datapod-backend-layer
|
ab38a5b0e969cd0d762e9d7720ab89174c333c37
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC,abstractmethod
import os
import binascii
import subprocess
import time
import datetime
import platform
import tempfile
import requests
import json
import aiohttp
from sanic import response
from asyncinit import asyncinit
from errors_module.errors import MnemonicRequiredError
from errors_module.errors import APIBadRequest, PathDoesntExists
from loguru import logger
from .variables import DATASOURCE_NAME
from .db_calls import get_credentials, update_percentage
import subprocess
import shutil
import humanize
import aiomisc
##imported from another major module
from ..datapod_users.variables import DATASOURCE_NAME as USER_DATASOURCE_NAME
import boto3
from Crypto.Cipher import AES # pycryptodome
from Crypto import Random
import struct
def get_size(bucket, path):
s3 = boto3.resource('s3')
my_bucket = s3.Bucket(bucket)
total_size = 0
for obj in my_bucket.objects.filter(Prefix=path):
total_size = total_size + obj.size
return total_size
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
def dir_size(dirpath):
return subprocess.check_output(['du','-sh', dirpath]).split()[0].decode('utf-8')
class Backup(object):
def __init__(self, config, full_backup):
self.config = config
self.full_backup = full_backup
self.userdata_path = self.config.USERDATA_PATH
##file which keeps tracks of the data that has been backup last time
self.user_index_dir = self.config.USER_INDEX_DIR
##subdirectories in userdata path which deals with raw, parsed and database path
##for the userdata.
self.parsed_data_path = self.config.PARSED_DATA_PATH
#self.raw_data_path = os.path.join(self.config.RAW_DATA_PATH, "facebook")
self.raw_data_path = self.config.RAW_DATA_PATH
self.db_path = self.config.DB_PATH
self.backup_path = self.config.BACKUP_PATH
self.percentage = 1
async def send_sse_message(self, message):
res = {"message": message, "percentage": self.percentage}
await self.config["send_sse_message"](self.config, DATASOURCE_NAME, res)
await update_percentage(self.config[DATASOURCE_NAME]["tables"]["status_table"], "backup", self.percentage)
return
async def make_backup(self):
"""
--level=0, for fullbackup
--level=1, for incremental backup
--listed_incremental is equivalent to -g
--atime-preserve=system
brew install gnu-tar
#tar --create --lzma --verbose --multi-volume --tape-length 102400 --file=MyArchive.tgz raw -g user.index
With --newer you're simply updating/creating the archive with the files that have changed since the date you pass it.
tar --create --lzma --verbose --file=MyArchive raw/facebook/facebook-sauravverma14473426
"""
datasources = os.listdir(self.raw_data_path)
logger.debug(datasources)
if not datasources:
raise APIBadRequest("The directory whose backup needs to be made is empty")
archival_object = datetime.datetime.utcnow()
archival_name = archival_object.strftime("%B-%d-%Y_%H-%M-%S")
parent_destination_path = os.path.join(self.backup_path, archival_name)
s3_backup_instance = await BotoBackup(self.config)
step = int(90/len(datasources))
for (index, datasource_name) in enumerate(datasources):
s3_folder_name = archival_name + "/" + datasource_name
dst_path = os.path.join(self.backup_path, archival_name, datasource_name)
src_path = os.path.join(self.raw_data_path, datasource_name)
# if not os.path.exists(dst_path):
# os.makedirs(dst_path)
backup_archival_temporary_path = await self.create(src_path, dst_path, datasource_name)
# # res = {"message": "Progress", "percentage": int(i*step)}
# # await self.config["send_sse_message"](config, DATASOURCE_NAME, res)
await s3_backup_instance.sync_backup(datasource_name, backup_archival_temporary_path, archival_name)
##the split archival for a datasource in a temporary folder hasnt been removed yet, removing it now
self.remove_split_archival_dir(backup_archival_temporary_path)
logger.debug(f"Now removing the split files present {backup_archival_temporary_path} ")
self.percentage = (index +1)*step
await self.send_sse_message(f"Archiving of {datasource_name} completed")
self.percentage = 100
await self.send_sse_message(f"Backup completed")
return parent_destination_path, archival_name
async def create(self, src_path, dst_path, datasource_name):
#temp = tempfile.NamedTemporaryFile('wb', suffix='.tar.lzma', delete=False)
temp = tempfile.NamedTemporaryFile('wb', suffix='.tar.gz', delete=False)
#temp = tempfile.TemporaryFile()
# backup_path = f"{self.backup_path}/{archival_name}/backup.tar.lzma"
##this is the file under ~/.datapod/user_indexes for a corresponding datasource
## which wil keep track of all the files which have been backed up previously
user_index_file = os.path.join(self.user_index_dir, f"{datasource_name.lower()}.index")
logger.debug(f"{datasource_name} This is the user_index_file {user_index_file}, used to create a compressed file at {temp.name} from a directory at {src_path} ")
if platform.system() == "Linux":
if self.full_backup:
backup_command = f"tar --create --gzip --no-check-device --verbose -f {temp.name} {src_path}"
else:
backup_command = f"tar --create --gzip --no-check-device --verbose --listed-incremental={user_index_file} -f {temp.name} {src_path}"
elif platform.system() == "Darwin":
if self.full_backup:
backup_command = f"gtar --create --lzma --no-check-device --verbose -f {temp.name} {src_path}"
else:
backup_command = f"gtar --create --lzma --no-check-device --verbose --listed-incremental={user_index_file} -f {temp.name} {src_path}"
else:
raise APIBadRequest("The platform is not available for this os distribution")
#backup_command = f"tar --create --verbose --listed-incremental={user_index_file} --lzma {backup_path} {self.raw_data_path}"
initial_time = int(time.time())
next_time = initial_time+15
for out in self.config.OS_COMMAND_OUTPUT(backup_command, "Backup"):
if int(time.time()) >= next_time:
# await self.send_sse_message(f"Archiving {out.split('/')[-1]} for {datasource_name}")
logger.debug(f"Archiving {out.split('/')[-1]} for {datasource_name}")
next_time += 10
split_backup_dir = tempfile.mkdtemp()
logger.debug(f"Now, splitting the single compressed file {temp.name} in a temporary directory {split_backup_dir}")
async for msg in self.split(split_backup_dir, temp.name):
# await self.send_sse_message(msg)
logger.debug(msg)
##because temp.name will automatically be removed
logger.debug(f"Now removing single comporessed file at {temp.name}")
self.remove_temporary_archive(temp.name)
return split_backup_dir
def remove_split_archival_dir(self, dirpath):
shutil.rmtree(dirpath)
return
def remove_temporary_archive(self, file_name):
logger.warning(f"Removing temporary backup file {file_name}")
try:
os.remove(file_name)
except Exception as e:
logger.error(f"couldnt remove temporary archive file {file_name} with error {e}")
return
async def split(self, dst_path, file_path):
##TODO: filename in split command is fixed but it may change on the type of compression being used
dir_name, file_name = os.path.split(file_path)
with cd(dst_path):
logger.debug(f"The directory where split is taking place {dst_path}")
if platform.system() == "Linux":
#command = "tar --tape-length=%s -cMv --file=tar_archive.{tar,tar-{2..1000}} -C %s %s"%(self.config.TAR_SPLIT_SIZE, dir_name, file_name)
command = "split --bytes=%sMB %s backup.tar.gz.1"%(self.config.TAR_SPLIT_SIZE, file_path)
elif platform.system() == "Darwin":
command = "split -b %sm %s backup.tar.gz.1"%(self.config.TAR_SPLIT_SIZE, file_path)
#command = "gtar --tape-length=%s -cMv --file=tar_archive.{tar,tar-{2..1000}} -C %s %s"%(self.config.TAR_SPLIT_SIZE, dir_name, file_name)
else:
raise APIBadRequest("The platform is not available for this os distribution")
for out in self.config.OS_COMMAND_OUTPUT(command, "Split"):
yield (f"SPLIT in progress {out[-70:]}")
for name in os.listdir("."):
logger.info(f"Creating sha checksum for backup split file {name}")
for out in self.config.OS_COMMAND_OUTPUT(f"sha512sum {name} > {name}.sha512", "sha checksum"):
yield (f"Creating sha checksum {out}")
##calculating the whole backup file tar
for out in self.config.OS_COMMAND_OUTPUT(f"sha512sum {file_path} > backup.sha512", "sha checksum"):
yield (f"Creating sha checksum {out}")
return
@asyncinit
class S3Backup(object):
async def __init__(self, config):
"""
number is the percentage number which will be sent in sse message,
Then number has already been incremented by the backup scripts above,
"""
self.config = config
self.bucket_name = config.AWS_S3['bucket_name']
#self.credentials = get_credentials(config.CREDENTIALS_TBL)
self.credentials = await get_credentials(self.config[USER_DATASOURCE_NAME]["tables"]["creds_table"])
if not self.credentials:
raise APIBadRequest("User is not logged in")
self.credentials = list(self.credentials)[0]
##in this temporary file, private key is now written
if not self.credentials["encryption_key"]:
raise MnemonicRequiredError()
self.encryption_key = binascii.unhexlify(self.credentials["encryption_key"].encode())
self.identity_id, self.access_key, self.secret_key, self.session_token = await self.aws_temp_creds()
os.environ['AWS_ACCESS_KEY_ID'] = self.access_key # visible in this process + all children
os.environ['AWS_SECRET_ACCESS_KEY'] = self.secret_key # visible in this process + all children
os.environ['AWS_SESSION_TOKEN'] = self.session_token # visible in this process + all children
os.environ["AWS_DEFAULT_REGION"] = self.config.AWS_S3["default_region"]
def remove_temporary_dir(self, dirpath):
shutil.rmtree(dirpath)
return
def remove_temporary_file(self, file_name):
try:
os.remove(file_name)
except Exception as e:
logger.error(f"couldnt remove temporary file {file_name} with error {e}")
return
def list_s3_archives(self, bucket_name=None):
if not bucket_name:
bucket_name = self.config.AWS_S3['bucket_name']
s3 = boto3.resource('s3')
my_bucket = s3.Bucket(bucket_name)
##this will have backup folders name i.e archievalname of the forms December-25-2019_12-55-17
backup_folders = set()
for obj in my_bucket.objects.filter(Prefix=f"{self.identity_id}/"):
s3_key = obj.key
filename = os.path.basename(s3_key)
foldername = os.path.dirname(os.path.dirname(s3_key)).split("/")[-1]
backup_folders.add((foldername, obj.last_modified.strftime("%d-%m-%Y")))
backup_folders = list(backup_folders)
logger.info(backup_folders)
# result = map(lambda x: {"name": bucket_name + "/" + x[0], "last_modified": x[1]}, backup_folders)
return [ {"archive_name": name, "last_modified": last_modified,
"size": self.get_size(bucket_name, self.identity_id+"/"+name)
}
for (name, last_modified) in backup_folders]
def get_size(self, bucket, path=None):
##if path is None
##then get the whole size of the users directory at s3 i.e the identity_id
if not path:
path = self.identity_id
# logger.debug(f"bucker is <<{bucket}>> and path is <<{path}>>")
# s3 = boto3.resource('s3')
# my_bucket = s3.Bucket(bucket)
# total_size = 0
# for obj in my_bucket.objects.filter(Prefix=path):
# total_size = total_size + obj.size
s3 = boto3.resource('s3')
total_size = 0
bucket = s3.Bucket(bucket)
for key in bucket.objects.filter(Prefix=f'{path}/'):
total_size = total_size + key.size
return humanize.naturalsize(total_size)
async def aws_temp_creds(self):
creds = await get_credentials(self.config[USER_DATASOURCE_NAME]["tables"]["creds_table"])
if not creds:
raise APIBadRequest("User is not logged in")
creds = list(creds)[0]
# r = requests.post(self.config.LOGIN, data=json.dumps({"username": creds["username"], "password": creds["password"]}))
# result = r.json()
# if result.get("error"):
# logger.error(result["message"])
# raise APIBadRequest(result["message"])
r = requests.post(self.config.TEMPORARY_S3_CREDS, data=json.dumps({"id_token": creds["id_token"].decode()}), headers={"Authorization": creds["id_token"].decode()})
result = r.json()
if result.get("message") == 'The incoming token has expired':
return response.json({"error": True, "sucess": False, "message": "The id token has expired, Please login again", "data": None}, status=401)
if result.get("error"):
logger.error(result["message"])
raise APIBadRequest(result["message"])
return result["data"]["identity_id"], result["data"]["access_key"], result["data"]["secret_key"], result["data"]["session_token"]
async def sync_backup(self, datasource_name, src_path, backup_name):
raise Exception("Please subclass and overide this method")
class BotoBackup(S3Backup):
async def sync_backup(self, datasource_name, src_path, backup_name):
iv = Random.new().read(AES.block_size)
target_directory = tempfile.mkdtemp() # Caller is responsible for deleting the directory when done with it.
for in_filename in os.listdir(src_path): ##list allfilenames in the input_directory
in_filename_path = os.path.join(src_path, in_filename)##making the filename as the full path
original_size = os.stat(in_filename_path).st_size # unencrypted length of the input filename
"""
There are two types of files in this temporary archival directory for a particular datasource
one ends with in just .sha512, these shouldnt be encrypted and shall be uploaded as it is
The other ones needs encryption
"""
if os.path.splitext(in_filename)[-1] != ".sha512": ##filter out files whose extension is
out_filename_path = os.path.join(target_directory, in_filename.replace(".tar", ".encrypted.tar")) ##making the output file name and inserting encrypted
logger.debug(f"input filename is {in_filename} output dir is {out_filename_path}")
logger.debug(f"iv <<{iv}>>")
logger.debug(f"Original size <<{original_size}>>")
await self.encrypt_file(in_filename_path, out_filename_path, iv, original_size, target_directory)
##Delete temporary files here to optimize storage , and then finally dlete the empty temporary directory
await self.put_file(iv, out_filename_path, original_size, backup_name, datasource_name)
else:
out_filename_path = os.path.join(target_directory, in_filename)
await self.put_file(iv, in_filename_path, original_size, backup_name, datasource_name)
self.remove_temporary_file(out_filename_path)
self.remove_temporary_dir(target_directory)
logger.debug(f"Upload on s3 bucket for {datasource_name} is completed")
return
@aiomisc.threaded_separate
def encrypt_file(self, in_filename_path, out_filename_path, iv, original_size, target_directory, chunksize=16*1024):
with open(in_filename_path, 'rb') as infile:
cipher = AES.new(self.encryption_key, AES.MODE_CBC, iv)
# 3 cases here for padding at the end of file:
# - we get a full chunk of 16. pass it through.
# - we get a partial chunk at EOF. we pad it up to 16. generally speaking each byte is the byte number, so if we have 7 bytes, the following nine are "07 07 07 07 07 07 07 07 07".
# - we get a zero-byte chunk at EOF. This means the file was a perfect multiple of 16, but padding means the end of the file should be padded because IDK why but that's how it's done. See url below:
#
# the extra padding at zero-byte EOF: http://security.stackexchange.com/a/29997
# "The above problem is solved by knowing that you always pad your data, no matter the length."
with open(out_filename_path, 'wb') as outfile:
last_chunk_length = 0
while True:
chunk = infile.read(chunksize)
last_chunk_length = len(chunk)
if last_chunk_length == 0 or last_chunk_length < chunksize:
break
outfile.write(cipher.encrypt(chunk))
# write the final padding
length_to_pad = 16 - (last_chunk_length % 16)
# not py2 compatible
# chunk += bytes([length])*length
chunk += struct.pack('B', length_to_pad) * length_to_pad
outfile.write(cipher.encrypt(chunk))
return
@aiomisc.threaded_separate
def put_file(self, iv, upload_filename_path, unencrypted_file_size, backup_name, datasource_name):
"""
client = boto3.client('s3', 'us-west-2')
transfer = S3Transfer(client)
# Upload /tmp/myfile to s3://bucket/key
transfer.upload_file('/tmp/myfile', 'bucket', 'key')
# Download s3://bucket/key to /tmp/myfile
transfer.download_file('bucket', 'key', '/tmp/myfile')
More examples could be found here
https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/s3/transfer.html
"""
#'x-amz-key-v2': base64.b64encode(ciphertext_blob).decode('utf-8'),
filename = os.path.basename(upload_filename_path)
key_name = f"{self.identity_id}/{backup_name}/{datasource_name}/{filename}"
metadata = {
'x-amz-iv': binascii.hexlify(iv).decode(),
'x-amz-cek-alg': 'AES/CBC/PKCS5Padding',
'x-amz-unencrypted-content-length': str(unencrypted_file_size)
}
s3client = boto3.client('s3')
s3transfer = boto3.s3.transfer.S3Transfer(s3client)
s3transfer.upload_file(upload_filename_path, self.bucket_name, key_name, extra_args={'Metadata': metadata})
return
# for (name, last_modified) in backup_folders:
# logger.debug(f"Name of archival {name} and last_modified {last_modified} ")
# size = self.get_size(bucket_name, self.identity_id+"/"+name)
# logger.debug(f"Size archival {size} ")
def decrypt_file(self, key, in_filename, iv, original_size, out_filename, chunksize=16*1024):
with open(in_filename, 'rb') as infile:
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(original_size)
class AWSCliBackup(S3Backup):
def encryption_key_file(self):
encryption_key_file = tempfile.NamedTemporaryFile('wb', suffix='.txt')
with open(encryption_key_file.name, "wb") as f:
f.write(binascii.unhexlify(self.credentials["encryption_key"].encode()))
return encryption_key_file
def remove_temporary_file(self, file_name):
try:
os.remove(file_name)
except Exception as e:
logger.error(f"couldnt remove temporary archive file {file_name} with error {e}")
return
async def sync_backup(self, src_path, backup_name):
size = dir_size(src_path)
# _key = generate_aes_key(32)
# key = "".join(map(chr, _key))
# print (key)
# encryption_key_path = "/home/feynman/.Datapod/Keys/encryption.key"
encryption_key_file = self.encryption_key_file()
configure_command = f"aws configure set default.s3.max_bandwidth 15MB/s"
for out in self.config.OS_COMMAND_OUTPUT(configure_command, "Limit upload speed"):
logger.info (out)
#sync_command = f"aws s3 sync --sse-c AES256 --sse-c-key fileb://{self.encryption_key_file.name} {self.config.BACKUP_PATH} s3://{self.config.AWS_S3['bucket_name']}/{self.identity_id}"
sync_command = f"aws s3 mv --sse-c AES256 --sse-c-key fileb://{encryption_key_file.name} {src_path} s3://{self.config.AWS_S3['bucket_name']}/{self.identity_id}/{backup_name} --recursive"
print (sync_command)
for out in self.config.OS_COMMAND_OUTPUT(sync_command, "Files are in Sync"):
# if self.number < 98:
# res = {"message": "BACKUP_PROGRESS", "percentage": self.number}
# await self.config["send_sse_message"](self.config, DATASOURCE_NAME, res)
# self.number += 1
# await update_percentage(self.config[DATASOURCE_NAME]["tables"]["status_table"], "backup", self.number)
logger.debug(f"Syncing on cloud {out}")
# res = {"message": "BACKUP_PROGRESS", "percentage": 100}
# await self.config["send_sse_message"](self.config, DATASOURCE_NAME, res)
# await update_percentage(self.config[DATASOURCE_NAME]["tables"]["status_table"], "backup", 100)
return size
# if __name__ == "__main__":
# s3 = boto3.client('s3')
# location_info = s3.get_bucket_location(Bucket="datapod-backups-beta")
# bucket_region = location_info['LocationConstraint']
# # kms = boto3.client('kms')
# # encrypt_ctx = {"kms_cmk_id":kms_arn}
# # key_data = kms.generate_data_key(KeyId=kms_arn, EncryptionContext=encrypt_ctx, KeySpec="AES_256")
# new_iv = Random.new().read(AES.block_size)
# size_infile = os.stat(infile).st_size # unencrypted length
# outfile = infile + '.enc'
# encrypt_file(key_data['Plaintext'], infile, new_iv, size_infile, outfile, chunksize=16*1024)
# put_file(key_data['CiphertextBlob'], new_iv, encrypt_ctx, outfile, size_infile, bucket_name, key_name)
| 40.838983
| 210
| 0.638431
| 22,214
| 0.921934
| 1,723
| 0.071509
| 8,358
| 0.346877
| 13,955
| 0.579166
| 10,325
| 0.428512
|
1a1fd264d38d2e67d8ce555d1064ae3d9aad16df
| 141
|
py
|
Python
|
abc/abc145/abc145b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
abc/abc145/abc145b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
abc/abc145/abc145b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
N = int(input())
S = input()
if N % 2 == 1:
print('No')
exit()
if S[:N // 2] == S[N // 2:]:
print('Yes')
else:
print('No')
| 11.75
| 28
| 0.411348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 13
| 0.092199
|
1a20018810aca71e231531dd6b4c27f07d98ddd0
| 289
|
py
|
Python
|
gadget/reboot.py
|
vaginessa/RaspberryPiZero_HID_MultiTool
|
c6227c7263cb1321a5655f938462392eb014a352
|
[
"Apache-2.0"
] | 54
|
2017-01-06T21:43:40.000Z
|
2022-02-14T02:57:57.000Z
|
gadget/reboot.py
|
vaginessa/RaspberryPiZero_HID_MultiTool
|
c6227c7263cb1321a5655f938462392eb014a352
|
[
"Apache-2.0"
] | null | null | null |
gadget/reboot.py
|
vaginessa/RaspberryPiZero_HID_MultiTool
|
c6227c7263cb1321a5655f938462392eb014a352
|
[
"Apache-2.0"
] | 13
|
2017-01-31T23:35:21.000Z
|
2021-12-22T12:48:59.000Z
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import os
gpio_pin_number=21
GPIO.setmode(GPIO.BCM)
GPIO.setup(gpio_pin_number, GPIO.IN, pull_up_down=GPIO.PUD_UP)
try:
GPIO.wait_for_edge(gpio_pin_number, GPIO.FALLING)
os.system("sudo shutdown -h now")
except:
pass
GPIO.cleanup()
| 18.0625
| 62
| 0.750865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 39
| 0.134948
|
1a209ab2fb009b89d259657281d619b4962c46e2
| 64
|
py
|
Python
|
code/sample_1-2-16.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | 1
|
2022-03-29T13:50:12.000Z
|
2022-03-29T13:50:12.000Z
|
code/sample_1-2-16.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
code/sample_1-2-16.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
rows = int(input())
x = [input() for i in range(rows)]
print(x)
| 16
| 34
| 0.609375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1a20d5d763008a4d9582e33481f4795a17bbec47
| 1,056
|
py
|
Python
|
barcap/main.py
|
Barmaley13/CaptureBarcode
|
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
|
[
"MIT"
] | 1
|
2021-04-17T18:04:19.000Z
|
2021-04-17T18:04:19.000Z
|
barcap/main.py
|
Barmaley13/CaptureBarcode
|
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
|
[
"MIT"
] | 1
|
2021-07-08T09:48:07.000Z
|
2021-07-08T17:36:22.000Z
|
barcap/main.py
|
Barmaley13/CaptureBarcode
|
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
|
[
"MIT"
] | 1
|
2019-09-27T12:37:25.000Z
|
2019-09-27T12:37:25.000Z
|
"""
Run capture as a separate process
"""
import time
from barcap.barcode import BarcodeCapture
def main():
# Default camera index
camera_index = 0
# Camera selection routine
try:
from .device_list import select_camera, camera_list
# Get camera list
dev_list = camera_list()
# Select a camera
camera_index = select_camera(len(dev_list))
except:
print('Unable to run camera selection routine!')
# Start capture
# print(f'camera_index: {camera_index}')
capture = BarcodeCapture(camera=camera_index)
capture.start()
# Run capture loop
while capture.is_alive():
if capture.new:
# Debugging
print(f'output: {capture.output}')
# Debugging
time_stamp = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(capture.last_epoch))
print(f'last capture: {time_stamp}')
# # Stop capture on the first output reading
# capture.stop()
# break
time.sleep(0.1)
| 22.956522
| 95
| 0.602273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 401
| 0.379735
|
1a215082fa2f89d1a45dff26f70391daf14feaea
| 6,162
|
py
|
Python
|
gabriel_lego/lego_engine/config.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | null | null | null |
gabriel_lego/lego_engine/config.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | 1
|
2019-09-10T23:41:41.000Z
|
2019-09-11T20:21:11.000Z
|
gabriel_lego/lego_engine/config.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | 1
|
2022-02-22T15:29:27.000Z
|
2022-02-22T15:29:27.000Z
|
#!/usr/bin/env python
#
# Cloudlet Infrastructure for Mobile Computing
# - Task Assistance
#
# Author: Zhuo Chen <zhuoc@cs.cmu.edu>
#
# Copyright (C) 2011-2013 Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# If True, configurations are set to process video stream in real-time (use
# with lego_server.py)
# If False, configurations are set to process one independent image (use with
# img.py)
IS_STREAMING = True
RECOGNIZE_ONLY = False
# Port for communication between proxy and task server
TASK_SERVER_PORT = 6090
BEST_ENGINE = "LEGO_FAST"
CHECK_ALGORITHM = "table"
CHECK_LAST_TH = 1
# Port for communication between master and workder proxies
MASTER_SERVER_PORT = 6091
# Whether or not to save the displayed image in a temporary directory
SAVE_IMAGE = False
# Convert all incoming frames to a fixed size to ease processing
IMAGE_HEIGHT = 360
IMAGE_WIDTH = 640
BLUR_KERNEL_SIZE = int(IMAGE_WIDTH // 16 + 1)
# Display
DISPLAY_MAX_PIXEL = 640
DISPLAY_SCALE = 5
DISPLAY_LIST_ALL = ['test', 'input', 'DoB', 'mask_black', 'mask_black_dots',
'board', 'board_border_line', 'board_edge', 'board_grey',
'board_mask_black', 'board_mask_black_dots', 'board_DoB',
'edge_inv',
'edge',
'board_n0', 'board_n1', 'board_n2', 'board_n3', 'board_n4',
'board_n5', 'board_n6',
'lego_u_edge_S', 'lego_u_edge_norm_L', 'lego_u_dots_L',
'lego_full', 'lego', 'lego_only_color',
'lego_correct', 'lego_rect', 'lego_cropped', 'lego_color',
'plot_line', 'lego_syn',
'guidance']
DISPLAY_LIST_TEST = ['input', 'board', 'lego_u_edge_S', 'lego_u_edge_norm_L',
'lego_u_dots_L', 'lego_syn']
DISPLAY_LIST_STREAM = ['input', 'lego_syn']
# DISPLAY_LIST_TASK = ['input', 'board', 'lego_syn', 'guidance']
DISPLAY_LIST_TASK = []
if not IS_STREAMING:
DISPLAY_LIST = DISPLAY_LIST_TEST
else:
if RECOGNIZE_ONLY:
DISPLAY_LIST = DISPLAY_LIST_STREAM
else:
DISPLAY_LIST = DISPLAY_LIST_TASK
DISPLAY_WAIT_TIME = 1 if IS_STREAMING else 500
## Black dots
BD_COUNT_N_ROW = 9
BD_COUNT_N_COL = 16
BD_BLOCK_HEIGHT = IMAGE_HEIGHT // BD_COUNT_N_ROW
BD_BLOCK_WIDTH = IMAGE_WIDTH // BD_COUNT_N_COL
BD_BLOCK_SPAN = max(BD_BLOCK_HEIGHT, BD_BLOCK_WIDTH)
BD_BLOCK_AREA = BD_BLOCK_HEIGHT * BD_BLOCK_WIDTH
BD_COUNT_THRESH = 25
BD_MAX_PERI = (IMAGE_HEIGHT + IMAGE_HEIGHT) // 40
BD_MAX_SPAN = int(BD_MAX_PERI / 4.0 + 0.5)
# Two ways to check black dot size:
# 'simple': check contour length and area
# 'complete": check x & y max span also
CHECK_BD_SIZE = 'simple'
## Color detection
# H: hue, S: saturation, V: value (which means brightness)
# L: lower_bound, U: upper_bound, TH: threshold
# TODO:
BLUE = {'H': 110, 'S_L': 100, 'B_TH': 110} # H: 108
YELLOW = {'H': 30, 'S_L': 100, 'B_TH': 170} # H: 25 B_TH: 180
GREEN = {'H': 70, 'S_L': 100, 'B_TH': 60} # H: 80 B_TH: 75
RED = {'H': 0, 'S_L': 100, 'B_TH': 130}
BLACK = {'S_U': 70, 'V_U': 60}
# WHITE = {'S_U' : 60, 'B_L' : 101, 'B_TH' : 160} # this includes side white,
# too
WHITE = {'S_U': 60, 'V_L': 150}
BD_DOB_MIN_V = 30
# If using a labels to represent color, this is the right color: 0 means
# nothing (background) and 7 means unsure
COLOR_ORDER = ['nothing', 'white', 'green', 'yellow', 'red', 'blue', 'black',
'unsure']
## Board
BOARD_MIN_AREA = BD_BLOCK_AREA * 7
BOARD_MIN_LINE_LENGTH = BD_BLOCK_SPAN
BOARD_MIN_VOTE = BD_BLOCK_SPAN // 2
# Once board is detected, convert it to a perspective-corrected standard size
# for further processing
BOARD_RECONSTRUCT_HEIGHT = 155 * 1
BOARD_RECONSTRUCT_WIDTH = 270 * 1
BOARD_BD_MAX_PERI = (BOARD_RECONSTRUCT_HEIGHT + BOARD_RECONSTRUCT_WIDTH) // 30
BOARD_BD_MAX_SPAN = int(BOARD_BD_MAX_PERI / 4.0 + 1.5)
BOARD_RECONSTRUCT_AREA = BOARD_RECONSTRUCT_HEIGHT * BOARD_RECONSTRUCT_WIDTH
BOARD_RECONSTRUCT_PERI = (
BOARD_RECONSTRUCT_HEIGHT +
BOARD_RECONSTRUCT_WIDTH) * 2
BOARD_RECONSTRUCT_CENTER = (
BOARD_RECONSTRUCT_HEIGHT // 2, BOARD_RECONSTRUCT_WIDTH // 2)
## Bricks
BRICK_HEIGHT = BOARD_RECONSTRUCT_HEIGHT / 12.25 # magic number
BRICK_WIDTH = BOARD_RECONSTRUCT_WIDTH / 26.2 # magic number
BRICK_HEIGHT_THICKNESS_RATIO = 15 / 12.25 # magic number
BLOCK_DETECTION_OFFSET = 2
BRICK_MIN_BM_RATIO = .85
## Optimizations
# If True, performs a second step fine-grained board detection algorithm.
# Depending on the other algorithms, this is usually not needed.
OPT_FINE_BOARD = False
# Treat background pixels differently
OPT_NOTHING = False
BM_WINDOW_MIN_TIME = 0.1
BM_WINDOW_MIN_COUNT = 1
# The percentage of right pixels in each block must be higher than this
# threshold
WORST_RATIO_BLOCK_THRESH = 0.6
# If True, do perspective correction first, then color normalization
# If False, do perspective correction after color has been normalized
# Not used anymore...
PERS_NORM = True
## Consts
ACTION_ADD = 0
ACTION_REMOVE = 1
ACTION_TARGET = 2
ACTION_MOVE = 3
DIRECTION_NONE = 0
DIRECTION_UP = 1
DIRECTION_DOWN = 2
GOOD_WORDS = ["Excellent. ", "Great. ", "Good job. ", "Wonderful. "]
def setup(is_streaming):
global IS_STREAMING, DISPLAY_LIST, DISPLAY_WAIT_TIME, SAVE_IMAGE
IS_STREAMING = is_streaming
if not IS_STREAMING:
DISPLAY_LIST = DISPLAY_LIST_TEST
else:
if RECOGNIZE_ONLY:
DISPLAY_LIST = DISPLAY_LIST_STREAM
else:
DISPLAY_LIST = DISPLAY_LIST_TASK
DISPLAY_WAIT_TIME = 1 if IS_STREAMING else 500
SAVE_IMAGE = not IS_STREAMING
| 33.857143
| 79
| 0.697988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,030
| 0.491723
|
1a22da72753f4f1b92c14d244c71af9de316f1cd
| 5,670
|
py
|
Python
|
civis_jupyter_notebooks/platform_persistence.py
|
menglewis/civis-jupyter-notebook
|
71f9b3ae50d62280750a593e0125372f41ba90ab
|
[
"BSD-3-Clause"
] | null | null | null |
civis_jupyter_notebooks/platform_persistence.py
|
menglewis/civis-jupyter-notebook
|
71f9b3ae50d62280750a593e0125372f41ba90ab
|
[
"BSD-3-Clause"
] | null | null | null |
civis_jupyter_notebooks/platform_persistence.py
|
menglewis/civis-jupyter-notebook
|
71f9b3ae50d62280750a593e0125372f41ba90ab
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This file contains utilities that bind the Jupyter notebook to our platform.
It performs two functions:
1. On startup, pull the contents of the notebook from platform to the local disk
2. As a Jupyter post-save hook, push the contents of the notebook and a HTML preview of the same back to platform.
3. Custom Error class for when a Notebook does not correctly initialize
"""
import civis
import nbformat
import os
import sys
import subprocess
import requests
from io import open
from subprocess import check_call
from subprocess import CalledProcessError
from civis_jupyter_notebooks import log_utils
def initialize_notebook_from_platform(notebook_path):
""" This runs on startup to initialize the notebook """
logger.info('Retrieving notebook information from Platform')
client = get_client()
notebook_model = client.notebooks.get(os.environ['PLATFORM_OBJECT_ID'])
logger.info('Pulling contents of notebook file from S3')
r = requests.get(notebook_model.notebook_url)
if r.status_code != 200:
raise NotebookManagementError('Failed to pull down notebook file from S3')
notebook = nbformat.reads(r.content, nbformat.NO_CONVERT)
s3_notebook_new = notebook.get('metadata', {}).get('civis', {}).get('new_notebook', False)
if s3_notebook_new:
notebook.metadata.pop('civis')
# Only overwrite the git version of the notebook with the S3 version if
# the S3 version is not the brand new empty template
git_notebook_exists = os.path.isfile(notebook_path)
if not git_notebook_exists or not s3_notebook_new:
logger.info('Restoring notebook file from S3')
directory = os.path.dirname(notebook_path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(notebook_path, mode='w', encoding='utf-8') as nb_file:
nbformat.write(notebook, nb_file)
logger.info('Notebook file ready')
if hasattr(notebook_model, 'requirements_url') and notebook_model.requirements_url:
__pull_and_load_requirements(notebook_model.requirements_url, notebook_path)
def __pull_and_load_requirements(url, notebook_path):
logger.info('Pulling down the requirements file')
r = requests.get(url)
if r.status_code != 200:
raise NotebookManagementError('Failed to pull down requirements.txt file from S3')
logger.info('Writing contents of requirements file')
requirements_path = os.path.join(os.path.dirname(notebook_path), 'requirements.txt')
with open(requirements_path, 'wb') as requirements:
requirements.write(r.content)
logger.info('Requirements file ready')
def find_and_install_requirements(requirements_path):
while os.path.isdir(requirements_path) and requirements_path != '/root':
requirements_file = os.path.join(requirements_path, 'requirements.txt')
logger.info('Looking for requirements at %s' % requirements_file)
if not os.path.isfile(requirements_file):
requirements_path = os.path.dirname(requirements_path)
continue
pip_install(requirements_file)
break
def pip_install(requirements_file):
logger.info('Installing packages from %s' % requirements_file)
try:
subprocess.check_output(
[sys.executable, '-m', 'pip', 'install', '-r', requirements_file],
stderr=subprocess.STDOUT
)
logger.info('Installed requirements.txt')
except subprocess.CalledProcessError as e:
raise NotebookManagementError(e.output.decode("utf-8"))
def post_save(model, os_path, contents_manager):
""" Called from Jupyter post-save hook. Manages save of NB """
if model['type'] != 'notebook':
return
logger.info('Getting URLs to update notebook')
update_url, update_preview_url = get_update_urls()
save_notebook(update_url, os_path)
generate_and_save_preview(update_preview_url, os_path)
logger.info('Notebook save complete')
def get_update_urls():
"""
Get the URLs needed to update the NB.
These URLs expire after a few minutes so do not cache them
"""
client = get_client()
urls = client.notebooks.list_update_links(os.environ['PLATFORM_OBJECT_ID'])
return (urls.update_url, urls.update_preview_url)
def save_notebook(url, os_path):
""" Push raw notebook to S3 """
with open(os_path, 'rb') as nb_file:
logger.info('Pushing latest notebook file to S3')
requests.put(url, data=nb_file.read())
logger.info('Notebook file updated')
def generate_and_save_preview(url, os_path):
""" Render NB-as-HTML and push that file to S3 """
d, fname = os.path.split(os_path)
logger.info('Rendering notebook to HTML')
try:
check_call(['jupyter', 'nbconvert', '--to', 'html', fname], cwd=d)
except CalledProcessError as e:
raise NotebookManagementError('nbconvert failed to convert notebook file to html: {}'.format(repr(e)))
preview_path = os.path.splitext(os_path)[0] + '.html'
with open(preview_path, 'rb') as preview_file:
logger.info('Pushing latest notebook preview to S3')
requests.put(url, data=preview_file.read())
logger.info('Notebook preview updated')
def get_client():
""" This gets a client that knows about our notebook endpoints """
# TODO: Simplify this once the notebooks endpoints are in the client
return civis.APIClient(resources='all')
class NotebookManagementError(Exception):
'''
raised whenever we hit an error trying to move
notebook data between our notebook and platform
'''
logger = log_utils.setup_stream_logging()
| 36.580645
| 116
| 0.711817
| 160
| 0.028219
| 0
| 0
| 0
| 0
| 0
| 0
| 2,019
| 0.356085
|
1a241fc805a6084215d593c48583935b44833885
| 2,118
|
py
|
Python
|
dbt_cloud/command/job/run.py
|
jeremyyeo/dbt-cloud-cli
|
f1253bcc343c08232e18ea01ef4a74c2e62a9999
|
[
"Apache-2.0"
] | 33
|
2021-12-09T11:17:58.000Z
|
2022-03-23T21:51:43.000Z
|
dbt_cloud/command/job/run.py
|
jeremyyeo/dbt-cloud-cli
|
f1253bcc343c08232e18ea01ef4a74c2e62a9999
|
[
"Apache-2.0"
] | 20
|
2021-11-26T15:46:43.000Z
|
2022-03-25T15:49:20.000Z
|
dbt_cloud/command/job/run.py
|
jeremyyeo/dbt-cloud-cli
|
f1253bcc343c08232e18ea01ef4a74c2e62a9999
|
[
"Apache-2.0"
] | 4
|
2022-01-17T19:18:34.000Z
|
2022-03-12T09:55:31.000Z
|
import os
import requests
from typing import Optional, List
from pydantic import Field, validator
from dbt_cloud.command.command import DbtCloudAccountCommand
from dbt_cloud.field import JOB_ID_FIELD
class DbtCloudJobRunCommand(DbtCloudAccountCommand):
"""Triggers a dbt Cloud job run and returns a status JSON response."""
job_id: int = JOB_ID_FIELD
cause: str = Field(
default="Triggered via API",
description="A text description of the reason for running this job",
)
git_sha: Optional[str] = Field(
description="The git sha to check out before running this job"
)
git_branch: Optional[str] = Field(
description="The git branch to check out before running this job"
)
schema_override: Optional[str] = Field(
description="Override the destination schema in the configured target for this job"
)
dbt_version_override: Optional[str] = Field(
description="Override the version of dbt used to run this job"
)
threads_override: Optional[int] = Field(
description="Override the number of threads used to run this job"
)
target_name_override: Optional[str] = Field(
description="Override the target.name context variable used when running this job"
)
generate_docs_override: Optional[bool] = Field(
description="Override whether or not this job generates docs (true=yes, false=no)"
)
timeout_seconds_override: Optional[int] = Field(
description="Override the timeout in seconds for this job"
)
steps_override: Optional[List[str]] = Field(
description="Override the list of steps for this job"
)
@validator("steps_override")
def check_steps_override_is_none_if_empty(cls, value):
return value or None
@property
def api_url(self) -> str:
return f"{super().api_url}/jobs/{self.job_id}/run/"
def execute(self) -> requests.Response:
response = requests.post(
url=self.api_url,
headers=self.request_headers,
json=self.get_payload(),
)
return response
| 35.3
| 91
| 0.686497
| 1,915
| 0.904155
| 0
| 0
| 215
| 0.101511
| 0
| 0
| 708
| 0.334278
|
1a257d44848898125832b45b07d58d12e6e91f60
| 1,266
|
py
|
Python
|
modelzoo/migrations/0024_auto_20201014_1425.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | 1
|
2021-11-15T07:30:24.000Z
|
2021-11-15T07:30:24.000Z
|
modelzoo/migrations/0024_auto_20201014_1425.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | null | null | null |
modelzoo/migrations/0024_auto_20201014_1425.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-10-14 12:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelzoo', '0023_model_con_mod_dims'),
]
operations = [
migrations.RemoveField(
model_name='model',
name='author_email',
),
migrations.RemoveField(
model_name='model',
name='author_name',
),
migrations.RemoveField(
model_name='model',
name='description',
),
migrations.RemoveField(
model_name='model',
name='paper',
),
migrations.RemoveField(
model_name='model',
name='readme_txt',
),
migrations.RemoveField(
model_name='model',
name='screenshot',
),
migrations.RemoveField(
model_name='model',
name='short_description',
),
migrations.AlterField(
model_name='model',
name='modality',
field=models.CharField(blank=True, choices=[('CT', 'CT'), ('Ultrasound', 'Ultrasound'), ('MRI', 'MRI'), ('PET', 'PET'), ('X-Ray', 'X-Ray')], default='', max_length=15),
),
]
| 26.93617
| 180
| 0.518167
| 1,173
| 0.92654
| 0
| 0
| 0
| 0
| 0
| 0
| 306
| 0.241706
|
1a270b137592d14be9f26784b9d3fa7001be71f2
| 5,982
|
py
|
Python
|
src/misc/MBExp.py
|
akshatha-k/Calibrated_MOPO
|
3b2e675003e9f6d31a0763be2ec784ceeae5099e
|
[
"MIT"
] | null | null | null |
src/misc/MBExp.py
|
akshatha-k/Calibrated_MOPO
|
3b2e675003e9f6d31a0763be2ec784ceeae5099e
|
[
"MIT"
] | null | null | null |
src/misc/MBExp.py
|
akshatha-k/Calibrated_MOPO
|
3b2e675003e9f6d31a0763be2ec784ceeae5099e
|
[
"MIT"
] | null | null | null |
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
from time import time, localtime, strftime
import numpy as np
from scipy.io import savemat
from dotmap import DotMap
from src.modeling.trainers import BNN_trainer
from src.misc.DotmapUtils import get_required_argument
from src.misc.Agent import Agent
from src.modeling.trainers.registry import get_config
from src.controllers.MPC import MPC
SAVE_EVERY = 25
class MBExperiment:
def __init__(self, args):
"""Initializes class instance.
Argument:
params (DotMap): A DotMap containing the following:
.sim_cfg:
.env (gym.env): Environment for this experiment
.task_hor (int): Task horizon
.stochastic (bool): (optional) If True, agent adds noise to its actions.
Must provide noise_std (see below). Defaults to False.
.noise_std (float): for stochastic agents, noise of the form N(0, noise_std^2I)
will be added.
.exp_cfg:
.ntrain_iters (int): Number of training iterations to be performed.
.nrollouts_per_iter (int): (optional) Number of rollouts done between training
iterations. Defaults to 1.
.ninit_rollouts (int): (optional) Number of initial rollouts. Defaults to 1.
.policy (controller): Policy that will be trained.
.log_cfg:
.logdir (str): Parent of directory path where experiment data will be saved.
Experiment will be saved in logdir/<date+time of experiment start>
.nrecord (int): (optional) Number of rollouts to record for every iteration.
Defaults to 0.
.neval (int): (optional) Number of rollouts for performance evaluation.
Defaults to 1.
"""
self.args = args
self.env_config = get_config(self.args.env)(self.args)
self.env = self.env_config.env
self.agent = Agent(self.args, self.env)
self.model = self.env_config.nn_constructor()
self.model_trainer = BNN_trainer(self.args, self.model)
self.policy = MPC(
self.env_config, self.args, self.model_trainer
) # TODO: Convert MPC and make an object here; we need a get controller here
def run_experiment(self):
"""Perform experiment."""
# os.makedirs(self.logdir, exist_ok=True)
traj_obs, traj_acs, traj_rets, traj_rews = [], [], [], []
# Perform initial rollouts
samples = []
for i in range(self.args.ninit_rollouts):
samples.append(self.agent.sample(self.args.task_hor, self.policy))
traj_obs.append(samples[-1]["obs"])
traj_acs.append(samples[-1]["ac"])
traj_rews.append(samples[-1]["rewards"])
if self.args.ninit_rollouts > 0:
self.policy.train(
[sample["obs"] for sample in samples],
[sample["ac"] for sample in samples],
[sample["rewards"] for sample in samples],
)
# Training loop
for i in range(self.args.ntrain_iters):
print(
"####################################################################"
)
print("Starting training iteration %d." % (i + 1))
# iter_dir = os.path.join(self.logdir, "train_iter%d" % (i + 1))
# os.makedirs(iter_dir, exist_ok=True)
samples = []
for j in range(self.args.n_record):
samples.append(
self.agent.sample(
self.args.task_hor,
self.policy,
None
# os.path.join(self.args.output_dir, "rollout%d.mp4" % j),
)
)
# if self.args.nrecord > 0:
# for item in filter(lambda f: f.endswith(".json"), os.listdir(iter_dir)):
# os.remove(os.path.join(iter_dir, item))
for j in range(
max(self.args.n_eval, self.args.nrollouts_per_iter) - self.args.n_record
):
samples.append(self.agent.sample(self.args.task_hor, self.policy))
print(
"Rewards obtained:",
[sample["reward_sum"] for sample in samples[: self.args.n_eval]],
)
traj_obs.extend(
[sample["obs"] for sample in samples[: self.args.nrollouts_per_iter]]
)
traj_acs.extend(
[sample["ac"] for sample in samples[: self.args.nrollouts_per_iter]]
)
traj_rets.extend(
[sample["reward_sum"] for sample in samples[: self.args.n_eval]]
)
traj_rews.extend(
[
sample["rewards"]
for sample in samples[: self.args.nrollouts_per_iter]
]
)
samples = samples[: self.args.nrollouts_per_iter]
savemat(
os.path.join(self.args.output_dir, "logs.mat"),
{
"observations": traj_obs,
"actions": traj_acs,
"returns": traj_rets,
"rewards": traj_rews,
},
)
if i < self.args.ntrain_iters - 1:
self.policy.train(
[sample["obs"] for sample in samples],
[sample["ac"] for sample in samples],
[sample["rewards"] for sample in samples],
)
# Delete iteration directory if not used
if len(os.listdir(self.args.output_dir)) == 0:
os.rmdir(self.args.output_dir)
| 39.615894
| 99
| 0.531595
| 5,499
| 0.919258
| 0
| 0
| 0
| 0
| 0
| 0
| 2,299
| 0.38432
|
1a286a917af5eacc1b12d3158f1106f90974b451
| 252
|
py
|
Python
|
lightnn/base/__init__.py
|
tongluocq/lightnn
|
602b0742d1141efc73a7146c930c5ea9eb994d37
|
[
"Apache-2.0"
] | 131
|
2017-04-05T06:03:25.000Z
|
2021-05-20T03:05:36.000Z
|
ch4/lightnn/lightnn/base/__init__.py
|
helloqorld/book-of-qna-code
|
54950478fb28d15cd73dae4dc39f3cd783721e08
|
[
"Apache-2.0"
] | 27
|
2018-11-26T07:39:25.000Z
|
2022-02-09T23:44:53.000Z
|
ch4/lightnn/lightnn/base/__init__.py
|
helloqorld/book-of-qna-code
|
54950478fb28d15cd73dae4dc39f3cd783721e08
|
[
"Apache-2.0"
] | 62
|
2018-11-26T07:44:02.000Z
|
2022-01-13T08:31:00.000Z
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .activations import *
from .losses import *
from .initializers import *
from .optimizers import *
| 19.384615
| 38
| 0.797619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 35
| 0.138889
|
1a288c60c996fe5119fab7f4db833d65180f5556
| 3,273
|
py
|
Python
|
tests/vvadd/Q.py
|
sa2257/llvm-runtime-pass
|
6f2c92141465e7df56f9720ab7753826663d799f
|
[
"MIT"
] | null | null | null |
tests/vvadd/Q.py
|
sa2257/llvm-runtime-pass
|
6f2c92141465e7df56f9720ab7753826663d799f
|
[
"MIT"
] | null | null | null |
tests/vvadd/Q.py
|
sa2257/llvm-runtime-pass
|
6f2c92141465e7df56f9720ab7753826663d799f
|
[
"MIT"
] | null | null | null |
import time
class processingElement:
def __init__(self, name, op, flex, regs, ports, bdwth, route, ctrl, branch):
self.name = name
self.op = op
self.tick = 1
self.ins = [0, 0]
self.outs = [None, None]
self.flex = flex
self.regs = regs
self.ports = ports
self.bdwth = bdwth
self.route = route
self.ctrl = ctrl
self.branch= branch
self.validate()
def validate(self):
if (len(self.ins) + len(self.outs) > self.ports):
print(self.name + " needs more ports than provided!")
def update_ticks(self, ticks):
return ticks + self.tick
def operate(self, ins, ticks):
self.ins = ins
if (self.ins[0] is None or self.ins[1] is None) :
return None, self.update_ticks(ticks)
switcher = {
'add': self.ins[0] + self.ins[1],
'mul': self.ins[0] * self.ins[1],
'div': self.ins[0] / self.ins[1] if self.ins[1] is not 0 else 0,
'sub': self.ins[0] - self.ins[1],
'gt' : self.ins[0] > self.ins[1],
'lt' : self.ins[0] < self.ins[1],
'ge' : self.ins[0] >= self.ins[1],
'eq' : self.ins[0] is self.ins[1],
'and': self.ins[0] and self.ins[1],
'or' : self.ins[0] or self.ins[1]
}
self.outs = switcher.get(self.op, None)
return self.outs, self.update_ticks(ticks)
class unitQ:
def __init__(self, name, regT, regIdx, inNodeT, inWire, outT, outIdx, ports):
self.name = name
self.tick = 1
self.regT = regT
self.inNT = inNodeT
self.outT = outT
#self.outNT = outNodeT
self.ridx = regIdx
self.inW = inWire
self.oidx = outIdx
#self.outW = outWire
self.ports = ports
self.validate()
def validate(self):
if (self.inW is not None and self.inW > self.ports):
print(self.name + " accessing non existent port!")
if (self.regT and self.ridx is None):
print(self.name + " register is not set!")
if (self.outT and self.oidx is None):
print(self.name + " out memory is not updated!")
if (not self.regT and self.inNT and self.inW > 3):
print(self.name + " input is not a node!")
if ((not self.regT and self.inW is None)):
print(self.name + " input not set!")
def update_ticks(self, ticks):
return ticks + self.tick
def route(self, ins, inputq, outputq, readyq, ticks):
if (self.regT):
out = inputq[self.ridx]
elif self.inW is not None:
out = ins[self.inW]
else:
out = None
if (self.outT):
outputq[self.oidx] = out
readyq[self.oidx] = True if (out is not None) else False
self.outs = out
return self.outs, outputq, readyq, self.update_ticks(ticks)
class sliceQ:
def __init__(self, name):
self.name = name
self.validate()
self.route()
def validate(self):
pass
def route(self):
pass
| 32.405941
| 84
| 0.513902
| 3,253
| 0.993889
| 0
| 0
| 0
| 0
| 0
| 0
| 245
| 0.074855
|
1a2b8e3fe78c16eea9e7fc19485f21ac4d60d622
| 22
|
py
|
Python
|
middleman/api/application/__init__.py
|
scooterman/middleman
|
c765c9157cce02574e7191608dacd573156e333b
|
[
"Xnet",
"X11"
] | 5
|
2020-03-19T07:19:49.000Z
|
2021-09-29T06:33:47.000Z
|
trends/__init__.py
|
victorkifer/SocialMediaTopTrends
|
32098f1621059700d9ca6437a988956ebe1d319a
|
[
"MIT"
] | 22
|
2015-09-20T14:00:16.000Z
|
2021-06-10T20:08:25.000Z
|
trends/__init__.py
|
victorkifer/SocialMediaTopTrends
|
32098f1621059700d9ca6437a988956ebe1d319a
|
[
"MIT"
] | 6
|
2015-12-14T21:05:01.000Z
|
2019-11-02T19:35:24.000Z
|
__author__ = 'victor'
| 11
| 21
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 8
| 0.363636
|
1a2c8ecde415f77d6438cc4d119dd253cc4b947d
| 799
|
py
|
Python
|
2009/plotting_data_monitor/_distrib.py
|
mikiec84/code-for-blog
|
79b2264f9a808eb14f624cb3c5ae7624038c043a
|
[
"Unlicense"
] | 1,199
|
2015-01-06T14:09:37.000Z
|
2022-03-29T19:39:51.000Z
|
2009/plotting_data_monitor/_distrib.py
|
mikiec84/code-for-blog
|
79b2264f9a808eb14f624cb3c5ae7624038c043a
|
[
"Unlicense"
] | 25
|
2016-07-29T15:44:01.000Z
|
2021-11-19T16:21:01.000Z
|
2009/plotting_data_monitor/_distrib.py
|
mikiec84/code-for-blog
|
79b2264f9a808eb14f624cb3c5ae7624038c043a
|
[
"Unlicense"
] | 912
|
2015-01-04T00:39:50.000Z
|
2022-03-29T06:50:22.000Z
|
from eblib import libcollect
# Create a LibCollect object
lc = libcollect.LibCollect()
# Prepare arguments for do_collect
#
# Path to the script (can be absolute or relative)
scriptname = 'plotting_data_monitor.pyw'
# Ask the resulting distribution to be placed in
# directory distrib
targetdir = 'distrib'
# Specify which libraries to exclude from the
# distribution (because you know they're installed
# on the target machine)
excludes = ["PyQt4",
"numpy",
"serial",
"pywin",
"win32api",
"win32com"]
# This does the actual work
# See the documentation of LibCollect for more options
#
lc.do_collect( scriptname,
targetdir,
excludes,
verbose=True)
| 24.212121
| 55
| 0.624531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 479
| 0.599499
|
1a2d28dea2bb837c0dd72c1aefaedfb353d8cc72
| 3,751
|
py
|
Python
|
tests/conftest.py
|
cread/aws-parallelcluster-node
|
1f3bcd32f216d246d89e0e175be8027c923ae8ec
|
[
"Apache-2.0"
] | 33
|
2018-11-14T14:54:47.000Z
|
2022-03-22T23:47:51.000Z
|
tests/conftest.py
|
cread/aws-parallelcluster-node
|
1f3bcd32f216d246d89e0e175be8027c923ae8ec
|
[
"Apache-2.0"
] | 180
|
2019-02-21T09:33:10.000Z
|
2022-03-31T08:01:28.000Z
|
tests/conftest.py
|
cread/aws-parallelcluster-node
|
1f3bcd32f216d246d89e0e175be8027c923ae8ec
|
[
"Apache-2.0"
] | 35
|
2019-02-06T13:36:18.000Z
|
2022-03-01T12:54:05.000Z
|
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
import boto3
import pytest
from botocore.stub import Stubber
@pytest.fixture()
def test_datadir(request, datadir):
"""
Inject the datadir with resources for the specific test function.
If the test function is declared in a class then datadir is ClassName/FunctionName
otherwise it is only FunctionName.
"""
function_name = request.function.__name__
if not request.cls:
return datadir / function_name
class_name = request.cls.__name__
return datadir / "{0}/{1}".format(class_name, function_name)
@pytest.fixture()
def boto3_stubber(mocker, boto3_stubber_path):
"""
Create a function to easily mock boto3 clients.
To mock a boto3 service simply pass the name of the service to mock and
the mocked requests, where mocked_requests is an object containing the method to mock,
the response to return and the expected params for the boto3 method that gets called.
The function makes use of botocore.Stubber to mock the boto3 API calls.
Multiple boto3 services can be mocked as part of the same test.
:param boto3_stubber_path is the path of the boto3 import to mock. (e.g. pcluster.config.validators.boto3)
"""
__tracebackhide__ = True
created_stubbers = []
mocked_clients = {}
mocked_client_factory = mocker.patch(boto3_stubber_path, autospec=True)
# use **kwargs to skip parameters passed to the boto3.client other than the "service"
# e.g. boto3.client("ec2", region_name=region, ...) --> x = ec2
mocked_client_factory.client.side_effect = lambda x, **kwargs: mocked_clients[x]
def _boto3_stubber(service, mocked_requests):
client = boto3.client(service)
stubber = Stubber(client)
# Save a ref to the stubber so that we can deactivate it at the end of the test.
created_stubbers.append(stubber)
# Attach mocked requests to the Stubber and activate it.
if not isinstance(mocked_requests, list):
mocked_requests = [mocked_requests]
for mocked_request in mocked_requests:
if mocked_request.generate_error:
stubber.add_client_error(
mocked_request.method,
service_message=mocked_request.response,
expected_params=mocked_request.expected_params,
service_error_code=mocked_request.error_code,
)
else:
stubber.add_response(
mocked_request.method, mocked_request.response, expected_params=mocked_request.expected_params
)
stubber.activate()
# Add stubber to the collection of mocked clients. This allows to mock multiple clients.
# Mocking twice the same client will replace the previous one.
mocked_clients[service] = client
return client
# yield allows to return the value and then continue the execution when the test is over.
# Used for resources cleanup.
yield _boto3_stubber
# Assert that all mocked requests were consumed and deactivate all stubbers.
for stubber in created_stubbers:
stubber.assert_no_pending_responses()
stubber.deactivate()
| 41.21978
| 119
| 0.702479
| 0
| 0
| 2,633
| 0.701946
| 3,130
| 0.834444
| 0
| 0
| 1,967
| 0.524393
|
1a2d480359b08490e3beec01917db1a8d876c6dd
| 7,263
|
py
|
Python
|
scripts/fastrfaa.py
|
Facenapalm/NapalmBot
|
ce775a270f374e626bcabc313676e4e2f9dbb843
|
[
"MIT"
] | 4
|
2016-05-14T17:42:03.000Z
|
2018-09-24T18:43:03.000Z
|
scripts/fastrfaa.py
|
Facenapalm/NapalmBot
|
ce775a270f374e626bcabc313676e4e2f9dbb843
|
[
"MIT"
] | null | null | null |
scripts/fastrfaa.py
|
Facenapalm/NapalmBot
|
ce775a270f374e626bcabc313676e4e2f9dbb843
|
[
"MIT"
] | 1
|
2021-05-08T15:45:30.000Z
|
2021-05-08T15:45:30.000Z
|
"""
Maintainer script for ruwiki's administrator attention requests table
([[:ru:ВП:ЗКАБ]]).
Log file is used for saving "администратор" field in deleted requests.
Usage:
python fastrfaa.py [logfile]
"""
import re
import sys
from datetime import datetime
import pywikibot
REGEXP = re.compile(r"""
(?P<indent>\n*)
==[ ]*(?P<header>.*?)[ ]*==\s+
(?P<section>
<onlyinclude>\s*
(?P<template>
(?:[^<]|<(?!/?onlyinclude))*?
)
\s*</onlyinclude>
)
""", re.I | re.VERBOSE)
CONFIGURATION = {
# nickname or "*" for any: [done delay, undone delay, period of moving to rfaa]
"*": [24, 3 * 24, 7 * 24]
}
TIME_FORMAT = "%Y%m%d%H%M%S"
UTCNOW = datetime.utcnow()
UTCNOWSTR = UTCNOW.strftime(TIME_FORMAT)
MOVED_TEXT = ""
CORRECTED_COUNT = 0
DELETED_DONE_COUNT = 0
DELETED_UNDONE_COUNT = 0
MOVED_COUNT = 0
if len(sys.argv) > 1:
LOGFILE = open(sys.argv[1], "a", encoding="utf-8")
else:
LOGFILE = None
def load_configuration(config_text):
"""Load configuration and set individual delays."""
for line in config_text.split("\n"):
try:
if re.match(r"^(#|</?pre>)", line):
continue
parsed = [value.strip() for value in line.split("/")]
if len(parsed) != 4:
continue
CONFIGURATION[parsed[0]] = [int(value) for value in parsed[1:]]
except:
continue
def get_delays(user="*"):
"""Get delays for current user from configuration."""
if user in CONFIGURATION:
return CONFIGURATION[user]
else:
return CONFIGURATION["*"]
def minor_fixes(text):
"""Fix some minor errors before processing the page."""
text = re.sub(r"^==.*?==\n+(==.*?==)$", "\\1", text, flags=re.M) # empty sections
return text
def correct_request(match):
"""Fix some errors, for example, update header if it doesn't match the content."""
# initialization
corrected = False
indent = match.group("indent")
header = match.group("header")
section = match.group("section")
# missing timestamp fix
(section, flag) = re.subn(
r"(\|\s*администратор\s*=[^/\n]*[^/\s][^/\n]*)\n",
"\\1/" + UTCNOWSTR + "\n",
section)
if flag > 0:
corrected = True
# wrong header fix
question = re.search(r"\|\s*вопрос\s*=(.*)", section)
timestamp = re.search(r"\|\s*автор\s*=[^/\n]+/\s*(\d{14})", section)
if question is None or timestamp is None:
# request is completely broken
return match.group(0)
correct_header = question.group(1).strip() + "/" + timestamp.group(1)
if header != correct_header:
corrected = True
header = correct_header
# finalization
if corrected:
global CORRECTED_COUNT
CORRECTED_COUNT += 1
return "{}== {} ==\n{}".format(indent, header, section)
else:
return match.group(0)
def move_old_request(template):
"""Forms text for (non-fast) rfaa in MOVED_TEXT."""
global MOVED_TEXT
global MOVED_COUNT
parts = re.search(r"\|\s*вопрос\s*=(.*)", template).group(1).strip().split("/")
if len(parts) == 2:
header = parts[1]
else:
header = parts[0]
MOVED_TEXT += "== {} (с ЗКАБ) ==\n".format(header)
MOVED_TEXT += re.sub(r"(ЗКА:Быстрый запрос)", "subst:\\1", template)
MOVED_TEXT += "\n* {{block-small|Перенесено со страницы быстрых запросов ботом," \
+ " поскольку запрос не был выполнен в течение 7 дней. ~~~~}}"
MOVED_TEXT += "\n\n"
MOVED_COUNT += 1
def delete_old_request(match):
"""Process one table row and delete it if it's neccessary."""
template = match.group("template")
status = re.search(r"\|\s*статус\s*=\s*([+-])", template)
author = re.search(r"\|\s*автор\s*=([^/\n]+)/\s*(\d{14})", template)
admin = re.search(r"\|\s*администратор\s*=([^/\n]+)/\s*(\d{14})", template)
extract_name = lambda m: m.group(1).strip()
extract_date = lambda m: datetime.strptime(m.group(2), TIME_FORMAT)
check_delay = lambda date, delay: delay >= 0 and (UTCNOW - date).total_seconds() >= delay * 60 * 60
if author is None:
delays = get_delays()
else:
delays = get_delays(extract_name(author))
if admin is None:
# request is still open
if author is not None:
if check_delay(extract_date(author), delays[2]):
# very old request that should be moved to rfaa
move_old_request(template)
return ""
else:
# request is closed
if status is None:
done = True
else:
done = status.group(1) == "+"
if done:
delay = delays[0]
else:
delay = delays[1]
if check_delay(extract_date(admin), delay):
# archiving
if done:
global DELETED_DONE_COUNT
DELETED_DONE_COUNT += 1
else:
global DELETED_UNDONE_COUNT
DELETED_UNDONE_COUNT += 1
if LOGFILE:
LOGFILE.write("{}/{}\n".format(extract_name(admin), admin.group(2)))
return ""
return match.group(0)
def form_comment():
"""Analyze global variables and form a comment for an edit."""
plural = lambda num, word: word + ("ый" if num % 10 == 1 and num % 100 != 11 else "ых")
plural_phrase = lambda num, word: str(num) + " " + plural(num, word)
deleted_parts = []
if DELETED_DONE_COUNT > 0:
deleted_parts.append(plural_phrase(DELETED_DONE_COUNT, "выполненн"))
if DELETED_UNDONE_COUNT > 0:
deleted_parts.append(plural_phrase(DELETED_UNDONE_COUNT, "невыполненн"))
if MOVED_COUNT > 0:
deleted_parts.append(plural_phrase(MOVED_COUNT, "перенесённ"))
deleted = ", ".join(deleted_parts)
if CORRECTED_COUNT:
corrected = str(CORRECTED_COUNT)
else:
corrected = ""
if corrected and deleted:
return "Исправление ошибочных ({}), удаление старых запросов ({}).".format(corrected, deleted)
elif corrected:
return "Исправление ошибочных запросов ({}).".format(corrected)
elif deleted:
return "Удаление старых запросов ({}).".format(deleted)
else:
return ""
def main():
"""Main script function."""
site = pywikibot.Site()
config = pywikibot.Page(site, "Википедия:Запросы к администраторам/Быстрые/Конфигурация")
if config.exists():
load_configuration(config.text)
fast = pywikibot.Page(site, "Википедия:Запросы к администраторам/Быстрые")
ftext = fast.text
ftext = minor_fixes(ftext)
ftext = REGEXP.sub(correct_request, ftext)
ftext = REGEXP.sub(delete_old_request, ftext)
if MOVED_TEXT != "":
rfaa = pywikibot.Page(site, "Википедия:Запросы к администраторам")
rtext = rfaa.text
insert = rtext.find("==")
if insert == -1:
insert = len(rtext)
rtext = rtext[:insert] + MOVED_TEXT + rtext[insert:]
rfaa.text = rtext
rfaa.save("Перенос залежавшихся быстрых запросов.", minor=False)
comment = form_comment()
if comment:
fast.text = ftext
fast.save(comment)
if __name__ == "__main__":
main()
| 31.171674
| 103
| 0.592317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,591
| 0.335665
|
a7de746c56c67620e56b1437e51a6c5e5965554a
| 1,102
|
py
|
Python
|
rssfly/tests/common.py
|
lidavidm/rssfly
|
1cfb893a249e4095412b966a1bf50fc3de7744e7
|
[
"Apache-2.0"
] | 1
|
2021-02-14T03:44:35.000Z
|
2021-02-14T03:44:35.000Z
|
rssfly/tests/common.py
|
lidavidm/rssfly
|
1cfb893a249e4095412b966a1bf50fc3de7744e7
|
[
"Apache-2.0"
] | 6
|
2021-07-15T13:03:19.000Z
|
2022-03-26T14:14:14.000Z
|
rssfly/tests/common.py
|
lidavidm/rssfly
|
1cfb893a249e4095412b966a1bf50fc3de7744e7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 David Li
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from pathlib import Path
from typing import Dict
class FakeContext:
def __init__(self, urls: Dict[str, str]):
self._urls = urls
def get_text(self, url, **kwargs):
# TODO: raise proper error
return self._urls[url]
def get_bytes(self, url, **kwargs):
# TODO: raise proper error
return self._urls[url]
def get_test_data(path: str) -> str:
root = Path(os.environ.get("RSSFLY_TEST_DATA_ROOT", ".")) / path
with root.open("rb") as f:
return f.read()
| 29.783784
| 74
| 0.696915
| 303
| 0.274955
| 0
| 0
| 0
| 0
| 0
| 0
| 642
| 0.582577
|
a7dfe24c47f27180a9478cedac00f9ebde2a0811
| 16,502
|
py
|
Python
|
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import mep
class cfm_ma_sub_commands(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /protocol/cfm/domain-name/ma-name/cfm-ma-sub-commands. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__ccm_interval','__mip_policy','__mep',)
_yang_name = 'cfm-ma-sub-commands'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ccm_interval = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
self.__mip_policy = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
self.__mep = YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'protocol', u'cfm', u'domain-name', u'ma-name', u'cfm-ma-sub-commands']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'protocol', u'cfm', u'domain-name', u'ma-name']
def _get_ccm_interval(self):
"""
Getter method for ccm_interval, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/ccm_interval (ccm-interval-type)
"""
return self.__ccm_interval
def _set_ccm_interval(self, v, load=False):
"""
Setter method for ccm_interval, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/ccm_interval (ccm-interval-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_ccm_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ccm_interval() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ccm_interval must be of a type compatible with ccm-interval-type""",
'defined-type': "brocade-dot1ag:ccm-interval-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)""",
})
self.__ccm_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_ccm_interval(self):
self.__ccm_interval = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
def _get_mip_policy(self):
"""
Getter method for mip_policy, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mip_policy (mip-policy-type)
"""
return self.__mip_policy
def _set_mip_policy(self, v, load=False):
"""
Setter method for mip_policy, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mip_policy (mip-policy-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_mip_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mip_policy() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mip_policy must be of a type compatible with mip-policy-type""",
'defined-type': "brocade-dot1ag:mip-policy-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)""",
})
self.__mip_policy = t
if hasattr(self, '_set'):
self._set()
def _unset_mip_policy(self):
self.__mip_policy = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
def _get_mep(self):
"""
Getter method for mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep (list)
"""
return self.__mep
def _set_mep(self, v, load=False):
"""
Setter method for mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_mep is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mep() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mep must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)""",
})
self.__mep = t
if hasattr(self, '_set'):
self._set()
def _unset_mep(self):
self.__mep = YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
ccm_interval = __builtin__.property(_get_ccm_interval, _set_ccm_interval)
mip_policy = __builtin__.property(_get_mip_policy, _set_mip_policy)
mep = __builtin__.property(_get_mep, _set_mep)
_pyangbind_elements = {'ccm_interval': ccm_interval, 'mip_policy': mip_policy, 'mep': mep, }
| 85.061856
| 1,055
| 0.690038
| 16,097
| 0.975458
| 0
| 0
| 0
| 0
| 0
| 0
| 8,650
| 0.524179
|
a7e04b7806a0a0c1a8e2d03be13546c94ed6e271
| 3,253
|
py
|
Python
|
scripts/generate.py
|
maruina/diagrams
|
8a9012fa24e2987b49672bae0abf16585fed440a
|
[
"MIT"
] | null | null | null |
scripts/generate.py
|
maruina/diagrams
|
8a9012fa24e2987b49672bae0abf16585fed440a
|
[
"MIT"
] | null | null | null |
scripts/generate.py
|
maruina/diagrams
|
8a9012fa24e2987b49672bae0abf16585fed440a
|
[
"MIT"
] | null | null | null |
import os
import sys
from typing import Iterable
from jinja2 import Environment, FileSystemLoader, Template
import config as cfg
from . import app_root_dir, doc_root_dir, resource_dir, template_dir
_usage = "Usage: generate.py <onprem|aws|gcp|azure|k8s|alibabacloud|oci|programming|saas>"
def load_tmpl(tmpl: str) -> Template:
env = Environment(loader=FileSystemLoader(template_dir()))
env.filters["up_or_title"] = up_or_title
return env.get_template(tmpl)
def up_or_title(pvd: str, s: str) -> str:
if s in cfg.UPPER_WORDS.get(pvd, ()):
return s.upper()
if s in cfg.TITLE_WORDS.get(pvd, {}):
return cfg.TITLE_WORDS[pvd][s]
return s.title()
def gen_classes(pvd: str, typ: str, paths: Iterable[str]) -> str:
"""Generate all service node classes based on resources paths with class templates."""
tmpl = load_tmpl(cfg.TMPL_MODULE)
# TODO: extract the gen class metas for sharing
# TODO: independent function for generating all pvd/typ/paths pairs
def _gen_class_meta(path: str) -> dict:
base = os.path.splitext(path)[0]
name = "".join([up_or_title(pvd, s) for s in base.split("-")])
return {"name": name, "icon": path}
metas = map(_gen_class_meta, paths)
aliases = cfg.ALIASES[pvd][typ] if typ in cfg.ALIASES[pvd] else {}
return tmpl.render(pvd=pvd, typ=typ, metas=metas, aliases=aliases)
def gen_apidoc(pvd: str, typ_paths: dict) -> str:
tmpl = load_tmpl(cfg.TMPL_APIDOC)
# TODO: remove
def _gen_class_name(path: str) -> str:
base = os.path.splitext(path)[0]
name = "".join([up_or_title(pvd, s) for s in base.split("-")])
return name
typ_classes = {}
for typ, paths in sorted(typ_paths.items()):
typ_classes[typ] = []
for name in map(_gen_class_name, paths):
alias = cfg.ALIASES[pvd].get(typ, {}).get(name)
typ_classes[typ].append({"name": name, "alias": alias})
return tmpl.render(pvd=pvd, typ_classes=typ_classes)
def make_module(pvd: str, typ: str, classes: str) -> None:
"""Create a module file"""
mod_path = os.path.join(app_root_dir(pvd), f"{typ}.py")
with open(mod_path, "w+") as f:
f.write(classes)
def make_apidoc(pvd: str, content: str) -> None:
"""Create an api documentation file"""
mod_path = os.path.join(doc_root_dir(), f"{pvd}.md")
with open(mod_path, "w+") as f:
f.write(content)
def generate(pvd: str) -> None:
"""Generates a service node classes."""
typ_paths = {}
for root, _, files in os.walk(resource_dir(pvd)):
# Extract the names and paths from resources.
files.sort()
pngs = list(filter(lambda f: f.endswith(".png"), files))
paths = list(filter(lambda f: "rounded" not in f, pngs))
# Skip the top-root directory.
typ = os.path.basename(root)
if typ == pvd:
continue
classes = gen_classes(pvd, typ, paths)
make_module(pvd, typ, classes)
typ_paths[typ] = paths
# Build API documentation
apidoc = gen_apidoc(pvd, typ_paths)
make_apidoc(pvd, apidoc)
if __name__ == "__main__":
pvd = sys.argv[1]
if pvd not in cfg.PROVIDERS:
sys.exit()
generate(pvd)
| 31.582524
| 90
| 0.640639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 601
| 0.184753
|
a7e08ada30043433441727fc2f8b4036acae9399
| 5,023
|
py
|
Python
|
src/GUI/menuTypes.py
|
Vidhu007/Cloud-Encryption
|
ec9ccd76a71e98740d937b34a7734f821448fae0
|
[
"MIT"
] | 7
|
2021-05-10T13:30:51.000Z
|
2022-03-20T17:49:59.000Z
|
src/GUI/menuTypes.py
|
Vidhu007/Cloud-Encryption
|
ec9ccd76a71e98740d937b34a7734f821448fae0
|
[
"MIT"
] | null | null | null |
src/GUI/menuTypes.py
|
Vidhu007/Cloud-Encryption
|
ec9ccd76a71e98740d937b34a7734f821448fae0
|
[
"MIT"
] | 8
|
2019-04-05T10:40:49.000Z
|
2022-03-20T06:00:43.000Z
|
import users
import sys
import encryption
import googleDriveAPI
u= users
e= encryption
g= googleDriveAPI
#Function to generate menu for privileged (admin) user
def privilegeMenu():
try:
while True:
#Menu system used to navigate the management console
user_In = input("|U - Upload file | D - Download file | S - User settings | L - Log out | E - Exit|\n").lower()
if(user_In == "s"):
while True:
#Cases for creating or deleting users and creating a new key
user_In = input("|A - Add User | D - Delete User | N - Generate New Key | E - Exit to menu|\n").lower()
if (user_In == "a"):
createLogin = input("Create login name: ")
createPassword = getpass.getpass("Create password: ")
u.newUser(createLogin,createPassword)
elif(user_In == "d"):
userName = input("\nEnter username you wish to delete: ")
u.deleteUser(userName)
elif(user_In == "N"):
e.keyGen()
break
elif(user_In == "e"):
break
else:
print("Wrong input try again\n")
elif(user_In == "u"):
while True:
#Upload file screen
print("\nEnsure that files you wish to upload are in the 'Files' folder.\nEnter m to return to main menu.\n")
user_In = input("Enter file name: ")
if(user_In == "m"):
break
else:
e.encrypt(user_In, e.keyRead())
g.uploadFile(user_In)
elif(user_In == "e"):
print("Exiting.")
sys.exit()
elif(user_In == "l"):
#Logging out of admin account and setting admin bool to false
u.privilege = False
print("Logging out.")
break
elif(user_In == "d"):
#Download file screen
while True:
user_In = input("\n|S - Search for file | D - Download File | E - Exit to menu|\n").lower()
if(user_In == "s"):
#Incase you forgot the file name you can double check here
user_In = input("Enter file name: ")
g.searchFile(user_In)
elif(user_In == "d"):
#download file via file name
user_In = input("Enter file name: ")
fileID= g.fileID(user_In)
g.downloadFile(fileID, user_In)
e.decrypt(user_In, e.keyRead())
elif(user_In == "e"):
break
else:
print("Wrong input try again\n")
else:
print("Wrong input try again\n")
except Exception:
pass
#Function which generates menu for standard non-privileged users
def standardMenu():
try:
while True:
#Same as above minus the settings option
user_In = input("|U - Upload file | D - Download file | L - Log Out | E - Exit|\n").lower()
if(user_In == "u"):
while True:
print("\nEnsure that files you wish to upload are in the 'Files' folder.\nEnter m to return to main menu.\n")
user_In = input("Enter file name: ")
if(user_In == "m"):
break
else:
e.encrypt(user_In, e.keyRead())
g.uploadFile(user_In)
elif(user_In == "e"):
print("Exiting.")
sys.exit()
elif(user_In == "l"):
print("Logging out.")
break
elif(user_In == "d"):
while True:
user_In = input("\n|S - Search for file | D - Download File | E - Exit to menu|\n").lower()
if(user_In == "s"):
user_In = input("Enter file name: ")
g.searchFile(user_In)
elif(user_In == "d"):
user_In = input("Enter file name: ")
fileID= g.fileID(user_In)
g.downloadFile(fileID, user_In)
e.decrypt(user_In, e.keyRead())
elif(user_In == "e"):
break
else:
print("Wrong input try again\n")
else:
print("Wrong input try again\n")
except Exception:
pass
| 38.937984
| 129
| 0.428827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,452
| 0.28907
|
a7e0cc5a6d14c321badeeffabba58fb153ebc18b
| 657
|
py
|
Python
|
eap_backend/eap_api/migrations/0005_alter_eapuser_is_active.py
|
alan-turing-institute/AssurancePlatform
|
1aa34b544990f981a289f6d21a832657ad19742e
|
[
"MIT"
] | 5
|
2021-09-28T15:02:21.000Z
|
2022-03-23T14:37:51.000Z
|
eap_backend/eap_api/migrations/0005_alter_eapuser_is_active.py
|
alan-turing-institute/AssurancePlatform
|
1aa34b544990f981a289f6d21a832657ad19742e
|
[
"MIT"
] | 69
|
2021-09-28T14:21:24.000Z
|
2022-03-31T17:12:19.000Z
|
eap_backend/eap_api/migrations/0005_alter_eapuser_is_active.py
|
alan-turing-institute/AssurancePlatform
|
1aa34b544990f981a289f6d21a832657ad19742e
|
[
"MIT"
] | 1
|
2021-09-28T15:11:00.000Z
|
2021-09-28T15:11:00.000Z
|
# Generated by Django 3.2.8 on 2022-05-31 10:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("eap_api", "0004_auto_20220531_0935"),
]
operations = [
migrations.AlterField(
model_name="eapuser",
name="is_active",
field=models.BooleanField(
default=True,
help_text=(
"Designates whether this user should be treated as active. "
"Unselect this instead of deleting accounts."
),
verbose_name="active",
),
),
]
| 25.269231
| 81
| 0.531202
| 564
| 0.858447
| 0
| 0
| 0
| 0
| 0
| 0
| 215
| 0.327245
|
a7e26aa446e86411030f396561a3b8cb6f32b961
| 465
|
py
|
Python
|
lucid_torch/transforms/monochrome/TFMSMonochromeTo.py
|
HealthML/lucid-torch
|
627700a83b5b2690cd8f95010b5ed439204102f4
|
[
"MIT"
] | 1
|
2021-08-20T07:38:09.000Z
|
2021-08-20T07:38:09.000Z
|
lucid_torch/transforms/monochrome/TFMSMonochromeTo.py
|
HealthML/lucid-torch
|
627700a83b5b2690cd8f95010b5ed439204102f4
|
[
"MIT"
] | 5
|
2021-03-19T15:50:42.000Z
|
2022-03-12T00:53:17.000Z
|
lucid_torch/transforms/monochrome/TFMSMonochromeTo.py
|
HealthML/lucid-torch
|
627700a83b5b2690cd8f95010b5ed439204102f4
|
[
"MIT"
] | null | null | null |
import torch
class TFMSMonochromeTo(torch.nn.Module):
def __init__(self, num_dimensions: int = 3):
super(TFMSMonochromeTo, self).__init__()
if not isinstance(num_dimensions, int):
raise TypeError()
elif num_dimensions < 2:
raise ValueError()
self.num_dimensions = num_dimensions
def forward(self, data: torch.Tensor):
return data.expand(data.shape[0], self.num_dimensions, *data.shape[2:])
| 31
| 79
| 0.658065
| 449
| 0.965591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a7e2b57529b0723b4ab18b73801cd2816d8025dd
| 1,027
|
py
|
Python
|
python/paddle/v2/framework/tests/test_modified_huber_loss_op.py
|
AI-books/Paddle
|
5b5f4f514047975ac09ec42b31e46dabf235e7dd
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/v2/framework/tests/test_modified_huber_loss_op.py
|
AI-books/Paddle
|
5b5f4f514047975ac09ec42b31e46dabf235e7dd
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/v2/framework/tests/test_modified_huber_loss_op.py
|
AI-books/Paddle
|
5b5f4f514047975ac09ec42b31e46dabf235e7dd
|
[
"Apache-2.0"
] | 1
|
2020-06-04T04:27:15.000Z
|
2020-06-04T04:27:15.000Z
|
import unittest
import numpy as np
from op_test import OpTest
def modified_huber_loss_forward(val):
if val < -1:
return -4 * val
elif val < 1:
return (1 - val) * (1 - val)
else:
return 0
class TestModifiedHuberLossOp(OpTest):
def setUp(self):
self.op_type = 'modified_huber_loss'
samples_num = 32
self.inputs = {
'X': np.random.uniform(-1, 1., (samples_num, 1)).astype('float32'),
'Y': np.random.choice([0, 1], samples_num).reshape((samples_num, 1))
}
product_res = self.inputs['X'] * (2 * self.inputs['Y'] - 1)
loss = np.vectorize(modified_huber_loss_forward)(product_res)
self.outputs = {
'IntermediateVal': product_res,
'Out': loss.reshape((samples_num, 1))
}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out', max_relative_error=0.005)
if __name__ == '__main__':
unittest.main()
| 25.675
| 80
| 0.590068
| 750
| 0.730282
| 0
| 0
| 0
| 0
| 0
| 0
| 82
| 0.079844
|
a7e54d153065f4c3487a12be2e95a69fef30b9f1
| 5,626
|
py
|
Python
|
bots.py
|
FatherUsarox/generadorqr
|
a56cc91c1b9320c03f3579c5a4d7d21f71b42f17
|
[
"MIT"
] | null | null | null |
bots.py
|
FatherUsarox/generadorqr
|
a56cc91c1b9320c03f3579c5a4d7d21f71b42f17
|
[
"MIT"
] | null | null | null |
bots.py
|
FatherUsarox/generadorqr
|
a56cc91c1b9320c03f3579c5a4d7d21f71b42f17
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# pylint: disable=C0116,W0613
# This program is dedicated to the public domain under the CC0 license.
"""
First, a few callback functions are defined. Then, those functions are passed to
the Dispatcher and registered at their respective places.
Then, the bot is started and runs until we press Ctrl-C on the command line.
Usage:
Example of a bot-user conversation using ConversationHandler.
Send /start to initiate the conversation.
Press Ctrl-C on the command line or send a signal to the process to stop the
bot.
"""
import logging
from telegram import ReplyKeyboardMarkup, ReplyKeyboardRemove, Update
from telegram.ext import (
Updater,
CommandHandler,
MessageHandler,
Filters,
ConversationHandler,
CallbackContext,
)
# Enable logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO
)
logger = logging.getLogger(__name__)
GENDER, PHOTO, LOCATION, BIO = range(4)
def start(update: Update, context: CallbackContext) -> int:
"""Starts the conversation and asks the user about their gender."""
reply_keyboard = [['Boy', 'Girl', 'Other']]
update.message.reply_text(
'Hi! My name is Professor Bot. I will hold a conversation with you. '
'Send /cancel to stop talking to me.\n\n'
'Are you a boy or a girl?',
reply_markup=ReplyKeyboardMarkup(
reply_keyboard, one_time_keyboard=True, input_field_placeholder='Boy or Girl?'
),
)
return GENDER
def gender(update: Update, context: CallbackContext) -> int:
"""Stores the selected gender and asks for a photo."""
user = update.message.from_user
logger.info("Gender of %s: %s", user.first_name, update.message.text)
update.message.reply_text(
'I see! Please send me a photo of yourself, '
'so I know what you look like, or send /skip if you don\'t want to.',
reply_markup=ReplyKeyboardRemove(),
)
return PHOTO
def photo(update: Update, context: CallbackContext) -> int:
"""Stores the photo and asks for a location."""
user = update.message.from_user
photo_file = update.message.photo[-1].get_file()
photo_file.download('user_photo.jpg')
logger.info("Photo of %s: %s", user.first_name, 'user_photo.jpg')
update.message.reply_text(
'Gorgeous! Now, send me your location please, or send /skip if you don\'t want to.'
)
return LOCATION
def skip_photo(update: Update, context: CallbackContext) -> int:
"""Skips the photo and asks for a location."""
user = update.message.from_user
logger.info("User %s did not send a photo.", user.first_name)
update.message.reply_text(
'I bet you look great! Now, send me your location please, or send /skip.'
)
return LOCATION
def location(update: Update, context: CallbackContext) -> int:
"""Stores the location and asks for some info about the user."""
user = update.message.from_user
user_location = update.message.location
logger.info(
"Location of %s: %f / %f", user.first_name, user_location.latitude, user_location.longitude
)
update.message.reply_text(
'Maybe I can visit you sometime! At last, tell me something about yourself.'
)
return BIO
def skip_location(update: Update, context: CallbackContext) -> int:
"""Skips the location and asks for info about the user."""
user = update.message.from_user
logger.info("User %s did not send a location.", user.first_name)
update.message.reply_text(
'You seem a bit paranoid! At last, tell me something about yourself.'
)
return BIO
def bio(update: Update, context: CallbackContext) -> int:
"""Stores the info about the user and ends the conversation."""
user = update.message.from_user
logger.info("Bio of %s: %s", user.first_name, update.message.text)
update.message.reply_text('Thank you! I hope we can talk again some day.')
return ConversationHandler.END
def cancel(update: Update, context: CallbackContext) -> int:
"""Cancels and ends the conversation."""
user = update.message.from_user
logger.info("User %s canceled the conversation.", user.first_name)
update.message.reply_text(
'Bye! I hope we can talk again some day.', reply_markup=ReplyKeyboardRemove()
)
return ConversationHandler.END
def main() -> None:
"""Run the bot."""
# Create the Updater and pass it your bot's token.
updater = Updater("TOKEN")
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# Add conversation handler with the states GENDER, PHOTO, LOCATION and BIO
conv_handler = ConversationHandler(
entry_points=[CommandHandler('start', start)],
states={
GENDER: [MessageHandler(Filters.regex('^(Boy|Girl|Other)$'), gender)],
PHOTO: [MessageHandler(Filters.photo, photo), CommandHandler('skip', skip_photo)],
LOCATION: [
MessageHandler(Filters.location, location),
CommandHandler('skip', skip_location),
],
BIO: [MessageHandler(Filters.text & ~Filters.command, bio)],
},
fallbacks=[CommandHandler('cancel', cancel)],
)
dispatcher.add_handler(conv_handler)
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
| 32.900585
| 99
| 0.682545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,386
| 0.424102
|
a7e702d2867f4402c54e0d45a5281d763c846bf9
| 746
|
py
|
Python
|
src/tests/TestQuadratureRule.py
|
WaveBlocks/WaveBlocks
|
2af3730dcf27e54006ec602e696b4d4df25459d8
|
[
"BSD-3-Clause"
] | null | null | null |
src/tests/TestQuadratureRule.py
|
WaveBlocks/WaveBlocks
|
2af3730dcf27e54006ec602e696b4d4df25459d8
|
[
"BSD-3-Clause"
] | null | null | null |
src/tests/TestQuadratureRule.py
|
WaveBlocks/WaveBlocks
|
2af3730dcf27e54006ec602e696b4d4df25459d8
|
[
"BSD-3-Clause"
] | null | null | null |
"""The WaveBlocks Project
Plot some quadrature rules.
@author: R. Bourquin
@copyright: Copyright (C) 2010, 2011 R. Bourquin
@license: Modified BSD License
"""
from numpy import squeeze
from matplotlib.pyplot import *
from WaveBlocks import GaussHermiteQR
tests = (2, 3, 4, 7, 32, 64, 128)
for I in tests:
Q = GaussHermiteQR(I)
print(Q)
N = Q.get_nodes()
N = squeeze(N)
W = Q.get_weights()
W = squeeze(W)
fig = figure()
ax = fig.gca()
ax.stem(N, W)
ax.set_xlabel(r"$\gamma_i$")
ax.set_ylabel(r"$\omega_i$")
ax.set_title(r"Gauss-Hermite quadrature with $"+str(Q.get_number_nodes())+r"$ nodes")
fig.savefig("qr_order_"+str(Q.get_order())+".png")
| 18.65
| 89
| 0.61126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 247
| 0.331099
|
a7e7986c81b7eb2a012589680dc9149ce7e709a3
| 7,141
|
py
|
Python
|
neural_network.py
|
lee-winchester/deep-neural-network
|
8f7c012e864a6bf9a3257d8cd08e3b3488243b19
|
[
"MIT"
] | null | null | null |
neural_network.py
|
lee-winchester/deep-neural-network
|
8f7c012e864a6bf9a3257d8cd08e3b3488243b19
|
[
"MIT"
] | null | null | null |
neural_network.py
|
lee-winchester/deep-neural-network
|
8f7c012e864a6bf9a3257d8cd08e3b3488243b19
|
[
"MIT"
] | null | null | null |
import os
import cv2
import numpy as np
import matplotlib.pyplot as plt
import scipy
ROWS = 64
COLS = 64
CHANNELS = 3
TRAIN_DIR = 'Train_data/'
TEST_DIR = 'Test_data/'
train_images = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)]
test_images = [TEST_DIR+i for i in os.listdir(TEST_DIR)]
def read_image(file_path):
img = cv2.imread(file_path, cv2.IMREAD_COLOR)
return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC)
def prepare_data(images):
m = len(images)
X = np.zeros((m, ROWS, COLS, CHANNELS), dtype=np.uint8)
y = np.zeros((1, m))
for i, image_file in enumerate(images):
X[i,:] = read_image(image_file)
if 'dog' in image_file.lower():
y[0, i] = 1
elif 'cat' in image_file.lower():
y[0, i] = 0
return X, y
def sigmoid(z):
s = 1/(1+np.exp(-z))
return s
train_set_x, train_set_y = prepare_data(train_images)
test_set_x, test_set_y = prepare_data(test_images)
train_set_x_flatten = train_set_x.reshape(train_set_x.shape[0], ROWS*COLS*CHANNELS).T
test_set_x_flatten = test_set_x.reshape(test_set_x.shape[0], -1).T
train_set_x = train_set_x_flatten/255
test_set_x = test_set_x_flatten/255
#train_set_x_flatten shape: (12288, 6002)
#train_set_y shape: (1, 6002)
def initialize_parameters(input_layer, hidden_layer, output_layer):
# initialize 1st layer output and input with random values
W1 = np.random.randn(hidden_layer, input_layer) * 0.01
# initialize 1st layer output bias
b1 = np.zeros((hidden_layer, 1))
# initialize 2nd layer output and input with random values
W2 = np.random.randn(output_layer, hidden_layer) * 0.01
# initialize 2nd layer output bias
b2 = np.zeros((output_layer,1))
parameters = {"W1": W1,
"b1": b1,
"W2": W2,
"b2": b2}
return parameters
def forward_propagation(X, parameters):
# Retrieve each parameter from the dictionary "parameters"
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Implementing Forward Propagation to calculate A2 probabilities
Z1 = np.dot(W1, X) + b1
A1 = np.tanh(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = sigmoid(Z2)
# Values needed in the backpropagation are stored in "cache"
cache = {"Z1": Z1,
"A1": A1,
"Z2": Z2,
"A2": A2}
return A2, cache
def compute_cost(A2, Y, parameters):
# number of example
m = Y.shape[1]
# Compute the cross-entropy cost
logprobs = np.multiply(np.log(A2),Y) + np.multiply(np.log(1-A2), (1-Y))
cost = -1/m*np.sum(logprobs)
# makes sure cost is in dimension we expect, E.g., turns [[51]] into 51
cost = np.squeeze(cost)
return cost
def backward_propagation(parameters, cache, X, Y):
# number of example
m = X.shape[1]
# Retrieve W1 and W2 from the "parameters" dictionary
W1 = parameters["W1"]
W2 = parameters["W2"]
# Retrieve A1 and A2 from "cache" dictionary
A1 = cache["A1"]
A2 = cache["A2"]
# Backward propagation for dW1, db1, dW2, db2
dZ2 = A2-Y
dW2 = 1./m*np.dot(dZ2, A1.T)
db2 = 1./m*np.sum(dZ2, axis = 1, keepdims=True)
dZ1 = np.dot(W2.T, dZ2) * (1 - np.power(A1, 2))
dW1 = 1./m*np.dot(dZ1, X.T)
db1 = 1./m*np.sum(dZ1, axis = 1, keepdims=True)
grads = {"dW1": dW1,
"db1": db1,
"dW2": dW2,
"db2": db2}
return grads
def update_parameters(parameters, grads, learning_rate = 0.1):
# Retrieve each parameter from the dictionary "parameters"
W1 = parameters["W1"]
W2 = parameters["W2"]
b1 = parameters["b1"]
b2 = parameters["b2"]
# Retrieve each gradient from the "grads" dictionary
dW1 = grads["dW1"]
db1 = grads["db1"]
dW2 = grads["dW2"]
db2 = grads["db2"]
# Update rule for each parameter
W1 = W1 - dW1 * learning_rate
b1 = b1 - db1 * learning_rate
W2 = W2 - dW2 * learning_rate
b2 = b2 - db2 * learning_rate
parameters = {"W1": W1,
"b1": b1,
"W2": W2,
"b2": b2}
return parameters
def predict(parameters, X):
# Computes probabilities using forward propagation
Y_prediction = np.zeros((1, X.shape[1]))
A2, cache = forward_propagation(X, parameters)
for i in range(A2.shape[1]):
# Convert probabilities A[0,i] to actual predictions p[0,i]
if A2[0,i] > 0.5:
Y_prediction[[0],[i]] = 1
else:
Y_prediction[[0],[i]] = 0
return Y_prediction
def nn_model(X_train, Y_train, X_test, Y_test, n_h, num_iterations = 1000, learning_rate = 0.05, print_cost=False):
n_x = X_train.shape[0]
n_y = Y_train.shape[0]
# Initialize parameters with nputs: "n_x, n_h, n_y"
parameters = initialize_parameters(n_x, n_h, n_y)
# Retrieve W1, b1, W2, b2
W1 = parameters["W1"]
W2 = parameters["W2"]
b1 = parameters["b1"]
b2 = parameters["b2"]
costs = []
for i in range(0, num_iterations):
# Forward propagation. Inputs: "X, parameters". Outputs: "A2, cache".
A2, cache = forward_propagation(X_train, parameters)
# Cost function. Inputs: "A2, Y, parameters". Outputs: "cost".
cost = compute_cost(A2, Y_train, parameters)
# Backpropagation. Inputs: "parameters, cache, X, Y". Outputs: "grads".
grads = backward_propagation(parameters, cache, X_train, Y_train)
# Gradient descent parameter update. Inputs: "parameters, grads". Outputs: "parameters".
parameters = update_parameters(parameters, grads, learning_rate)
# Print the cost every 200 iterations
if print_cost and i % 200 == 0:
print ("Cost after iteration %i: %f" %(i, cost))
# Record the cost
if i % 100 == 0:
costs.append(cost)
# Predict test/train set examples
Y_prediction_test = predict(parameters,X_test)
Y_prediction_train = predict(parameters,X_train)
# Print train/test Errors
print("train accuracy: {} %".format(100 - np.mean(np.abs(Y_prediction_train - Y_train)) * 100))
print("test accuracy: {} %".format(100 - np.mean(np.abs(Y_prediction_test - Y_test)) * 100))
parameters.update({"costs": costs, "n_h": n_h})
return parameters
#nn_model(train_set_x, train_set_y, test_set_x, test_set_y, n_h = 10, num_iterations = 3000, learning_rate = 0.05, print_cost = True)
hidden_layer = [10, 50, 100, 200, 400]
models = {}
for i in hidden_layer:
print ("hidden layer is: ",i)
models[i] = nn_model(train_set_x, train_set_y, test_set_x, test_set_y, n_h = i, num_iterations = 10000, learning_rate = 0.05, print_cost = True)
print ("-------------------------------------------------------")
for i in hidden_layer:
plt.plot(np.squeeze(models[i]["costs"]), label= str(models[i]["n_h"]))
plt.ylabel('cost')
plt.xlabel('iterations (hundreds)')
legend = plt.legend(loc='upper center', shadow=True)
frame = legend.get_frame()
frame.set_facecolor('0.90')
plt.show()
| 30.780172
| 148
| 0.618261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,990
| 0.278672
|
a7e7c360d245af4066c15b6cc4582b7c3939eb5b
| 798
|
py
|
Python
|
main.py
|
UstymHanyk/NearbyMovies
|
b54995463a30a130f9023c63d6549e734c45251c
|
[
"MIT"
] | 1
|
2021-02-15T20:20:06.000Z
|
2021-02-15T20:20:06.000Z
|
main.py
|
UstymHanyk/NearbyMovies
|
b54995463a30a130f9023c63d6549e734c45251c
|
[
"MIT"
] | null | null | null |
main.py
|
UstymHanyk/NearbyMovies
|
b54995463a30a130f9023c63d6549e734c45251c
|
[
"MIT"
] | null | null | null |
"""
A module for generating a map with 10 nearest movies
"""
from data_reader import read_data, select_year
from locations_finder import coord_finder, find_nearest_movies
from map_generator import generate_map
def start():
year = int(input("Please enter a year you would like to have a map for:"))
user_location = tuple(int(loc) for loc in input("Please enter your location (format: lat, long):").split(','))
movie_data = read_data("smaller_locations.list")
n_year_movies = select_year(movie_data,year)
all_movie_locations = coord_finder(n_year_movies)
nearest_movie_locations = find_nearest_movies(all_movie_locations, user_location)
generate_map(nearest_movie_locations,user_location)
return "Open map.html to enjoy the map."
if __name__=="__main__":
start()
| 42
| 114
| 0.766917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 234
| 0.293233
|
a7e7f170ca97fac3a1e811a7610e759c8f771e42
| 9,003
|
py
|
Python
|
wbtools/lib/nlp/entity_extraction/ntt_extractor.py
|
WormBase/wbtools
|
70d07109182706b2a6cc333ef7a17dcd293cc3f3
|
[
"MIT"
] | 1
|
2021-02-17T06:54:13.000Z
|
2021-02-17T06:54:13.000Z
|
wbtools/lib/nlp/entity_extraction/ntt_extractor.py
|
WormBase/wbtools
|
70d07109182706b2a6cc333ef7a17dcd293cc3f3
|
[
"MIT"
] | 1
|
2021-04-28T20:58:57.000Z
|
2021-04-28T20:58:57.000Z
|
wbtools/lib/nlp/entity_extraction/ntt_extractor.py
|
WormBase/wbtools
|
70d07109182706b2a6cc333ef7a17dcd293cc3f3
|
[
"MIT"
] | 1
|
2021-03-31T17:23:37.000Z
|
2021-03-31T17:23:37.000Z
|
import math
import re
from typing import List, Dict
from wbtools.db.generic import WBGenericDBManager
from wbtools.lib.nlp.common import EntityType
from wbtools.lib.nlp.literature_index.abstract_index import AbstractLiteratureIndex
ALL_VAR_REGEX = r'({designations}|m|p|It)(_)?([A-z]+)?([0-9]+)([a-zA-Z]{{1,4}}[0-9]*)?(\[[0-9]+\])?([a-zA-Z]{{1,4}}' \
r'[0-9]*)?(\[.+\])?'
NEW_VAR_REGEX = r'[\(\s]({designations}|m|p)([0-9]+)((?:{designations}|m|p|ts|gf|lf|d|sd|am|cs)[0-9]+)?[\)\s\[]'
STRAIN_REGEX = r'[\(\s,\.:;\'\"]({designations})([0-9]+)[\)\s\,\.:;\'\"]'
OPENING_REGEX_STR = "[\\.\\n\\t\\'\\/\\(\\)\\[\\]\\{\\}:;\\,\\!\\?> ]"
CLOSING_REGEX_STR = "[\\.\\n\\t\\'\\/\\(\\)\\[\\]\\{\\}:;\\,\\!\\?> ]"
OPENING_CLOSING_REGEXES = {
EntityType.VARIATION: [r'[\(\s](', r')[\)\s\[]'],
EntityType.STRAIN: [r'[\(\s,\.:;\'\"](', r')[\)\s,\.:;\'\"]']
}
class NttExtractor:
def __init__(self, db_manager: WBGenericDBManager = None):
self.db_manager = db_manager
self.curated_entities = {}
for entity_type in EntityType:
self.curated_entities[entity_type] = None
allele_designations = self.db_manager.get_allele_designations()
new_var_regex = NEW_VAR_REGEX.format(designations="|".join(allele_designations))
strain_regex = STRAIN_REGEX.format(designations="|".join(self.db_manager.get_strain_designations()))
self.entity_type_regex_map = {
EntityType.VARIATION: new_var_regex,
EntityType.STRAIN: strain_regex
}
def get_curated_entities(self, entity_type: EntityType, exclude_id_used_as_name: bool = True):
if not self.curated_entities[entity_type]:
self.curated_entities[entity_type] = self.db_manager.get_curated_entities(
entity_type=entity_type, exclude_id_used_as_name=exclude_id_used_as_name)
return self.curated_entities[entity_type]
@staticmethod
def match_entities_regex(text, regex):
res = re.findall(regex, " " + text + " ")
return ["".join(entity_arr) for entity_arr in res]
@staticmethod
def count_keyword_matches_regex(keyword, text, case_sensitive: bool = True,
match_uppercase: bool = False) -> int:
keyword = keyword if case_sensitive else keyword.upper()
text = text if case_sensitive else text.upper()
match_uppercase = False if keyword.upper() == keyword else match_uppercase
if keyword in text or match_uppercase and keyword.upper() in text:
try:
match_count = len(re.findall(OPENING_REGEX_STR + re.escape(keyword) + CLOSING_REGEX_STR, text))
if match_uppercase:
match_count += len(re.findall(OPENING_REGEX_STR + re.escape(keyword.upper()) +
CLOSING_REGEX_STR, text))
return match_count
except:
pass
return 0
@staticmethod
def is_entity_meaningful(entity_keywords: List[str], text, lit_index: AbstractLiteratureIndex,
match_uppercase: bool = False, min_num_occurrences: int = 1,
tfidf_threshold: float = 0.0) -> bool:
min_num_occurrences = 1 if min_num_occurrences < 1 else min_num_occurrences
raw_count = sum(NttExtractor.count_keyword_matches_regex(keyword=keyword, text=text,
match_uppercase=match_uppercase) for
keyword in entity_keywords)
return True if raw_count >= min_num_occurrences and (
tfidf_threshold <= 0 or 0 < tfidf_threshold < NttExtractor.tfidf(entity_keywords=entity_keywords,
raw_count=raw_count,
lit_index=lit_index)) else False
@staticmethod
def tfidf(entity_keywords: List[str], raw_count, lit_index: AbstractLiteratureIndex) -> float:
doc_counter = sum(lit_index.count_matching_documents(keyword) for keyword in entity_keywords)
idf = math.log(float(lit_index.num_documents()) / (doc_counter if doc_counter > 0 else 0.5))
return raw_count * idf
@staticmethod
def extract_meaningful_entities_by_keywords(keywords: List[str], text: str,
lit_index: AbstractLiteratureIndex = None,
match_uppercase: bool = False, min_matches: int = 1,
tfidf_threshold: float = 0.0,
blacklist: List[str] = None) -> List[str]:
blacklist = set(blacklist) if blacklist else set()
return [keyword for keyword in set(keywords) if keyword not in blacklist and
NttExtractor.is_entity_meaningful(
entity_keywords=[keyword], text=text, match_uppercase=match_uppercase, min_num_occurrences=min_matches,
tfidf_threshold=tfidf_threshold, lit_index=lit_index)]
def extract_species_regex(self, text: str, taxon_id_name_map: Dict[str, List[str]] = None,
blacklist: List[str] = None,
whitelist: List[str] = None, min_matches: int = 1, tfidf_threshold: float = 0.0,
lit_index: AbstractLiteratureIndex = None):
blacklist = set(blacklist) if blacklist else set()
whitelist = set(whitelist) if whitelist else set()
if taxon_id_name_map is None:
taxon_id_name_map = self.db_manager.get_taxon_id_names_map()
return [regex_list[0].replace("\\", "") for taxon_id, regex_list in taxon_id_name_map.items() if
taxon_id not in blacklist and (taxon_id in whitelist or
NttExtractor.is_entity_meaningful(entity_keywords=regex_list, text=text,
match_uppercase=False,
lit_index=lit_index,
min_num_occurrences=min_matches,
tfidf_threshold=tfidf_threshold))]
@staticmethod
def get_entity_ids_from_names(entity_names: List[str], entity_name_id_map: Dict[str, str]):
return list(set([(entity_name_id_map[entity_name], entity_name) for entity_name in entity_names]))
def extract_all_entities_by_type(self, text: str, entity_type: EntityType, include_new: bool = True,
match_curated: bool = False, exclude_curated: bool = False,
match_entities: List[str] = None, exclude_entities: List[str] = None,
exclude_id_used_as_name: bool = True):
"""
extract entities mentioned in text
Args:
text (str): the input text
entity_type (EntityType): the type of entities to extract
include_new (bool): whether to include possibly new entities not yet in the curation database
match_curated (bool): whether to extract curated entities obtained from the provided DB manager
exclude_curated (bool): whether to remove curated entities obtained from the provided DB manager from the
extracted ones
match_entities (List[str]): match the provided entities
exclude_entities (List[str]): exclude the provided entities from the results
exclude_id_used_as_name (bool): do not extract entity ids when used as names in the DB
Returns:
list: the list of entities extracted from text
"""
entities = set()
if include_new:
entities.update(NttExtractor.match_entities_regex(text, self.entity_type_regex_map[entity_type]))
if match_curated:
entities.update(NttExtractor.match_entities_regex(
text, OPENING_CLOSING_REGEXES[entity_type][0] + '|'.join(self.db_manager.get_curated_entities(
entity_type=entity_type, exclude_id_used_as_name=exclude_id_used_as_name)) +
OPENING_CLOSING_REGEXES[entity_type][1]))
if exclude_curated:
entities -= set(self.get_curated_entities(entity_type=entity_type, exclude_id_used_as_name=exclude_id_used_as_name))
if match_entities:
entities.update(NttExtractor.match_entities_regex(
text, OPENING_CLOSING_REGEXES[entity_type][0] + '|'.join(match_entities) +
OPENING_CLOSING_REGEXES[entity_type][1]))
if exclude_entities:
entities -= set(exclude_entities)
return sorted(list(entities))
| 56.26875
| 128
| 0.59258
| 8,125
| 0.902477
| 0
| 0
| 3,432
| 0.381206
| 0
| 0
| 1,359
| 0.15095
|
a7e832d5a07f4dde801acb1a916d9e7763b10c42
| 3,228
|
py
|
Python
|
tests/feature_extractors/test_bitteli.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 21
|
2016-08-22T22:00:49.000Z
|
2020-03-29T04:15:19.000Z
|
tests/feature_extractors/test_bitteli.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 22
|
2016-08-28T01:07:08.000Z
|
2018-02-07T14:38:26.000Z
|
tests/feature_extractors/test_bitteli.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 3
|
2017-01-12T10:04:27.000Z
|
2022-01-06T13:25:48.000Z
|
"""Test motif.features.bitteli
"""
import unittest
import numpy as np
from motif.feature_extractors import bitteli
def array_equal(array1, array2):
return np.all(np.isclose(array1, array2, atol=1e-7))
class TestBitteliFeatures(unittest.TestCase):
def setUp(self):
self.ftr = bitteli.BitteliFeatures()
def test_ref_hz(self):
expected = 55.0
actual = self.ftr.ref_hz
self.assertEqual(expected, actual)
def test_poly_degree(self):
expected = 5
actual = self.ftr.poly_degree
self.assertEqual(expected, actual)
def test_min_freq(self):
expected = 3
actual = self.ftr.min_freq
self.assertEqual(expected, actual)
def test_max_freq(self):
expected = 30
actual = self.ftr.max_freq
self.assertEqual(expected, actual)
def test_freq_step(self):
expected = 0.1
actual = self.ftr.freq_step
self.assertEqual(expected, actual)
def test_vibrato_threshold(self):
expected = 0.25
actual = self.ftr.vibrato_threshold
self.assertEqual(expected, actual)
def test_get_feature_vector(self):
times = np.linspace(0, 1, 2000)
freqs_hz = 440.0 * np.ones((2000, ))
salience = 0.5 * np.ones((2000, ))
sample_rate = 2000
actual = self.ftr.get_feature_vector(
times, freqs_hz, salience, sample_rate
)
expected = np.array([
0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
3600.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0,
0.0, 0.5, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0
])
self.assertTrue(array_equal(expected, actual))
self.assertEqual(len(actual), len(self.ftr.feature_names))
def test_get_feature_names(self):
expected = [
'vibrato rate',
'vibrato extent',
'vibrato coverage',
'vibrato coverage - beginning',
'vibrato coverage - middle',
'vibrato coverage - end',
'0th polynomial coeff - freq',
'1st polynomial coeff - freq',
'2nd polynomial coeff - freq',
'3rd polynomial coeff - freq',
'4th polynomial coeff - freq',
'5th polynomial coeff - freq',
'polynomial fit residual - freq',
'overall model fit residual - freq',
'0th polynomial coeff - salience',
'1st polynomial coeff - salience',
'2nd polynomial coeff - salience',
'3rd polynomial coeff - salience',
'4th polynomial coeff - salience',
'5th polynomial coeff - salience',
'polynomial fit residual - salience',
'duration',
'pitch stddev (cents)',
'pitch range (cents)',
'pitch average variation',
'salience stdev',
'salience range',
'salience average variation'
]
actual = self.ftr.feature_names
self.assertEqual(expected, actual)
def test_get_id(self):
expected = 'bitteli'
actual = self.ftr.get_id()
self.assertEqual(expected, actual)
| 30.742857
| 66
| 0.565675
| 3,017
| 0.934634
| 0
| 0
| 0
| 0
| 0
| 0
| 785
| 0.243185
|
a7e93039a39687c72a48499f5a446f6400a42bad
| 412
|
py
|
Python
|
data-structures/trees/node/node.py
|
b-ritter/python-notes
|
e08e466458b8a2987c0abe42674da4066c763e74
|
[
"MIT"
] | 1
|
2017-05-04T18:48:45.000Z
|
2017-05-04T18:48:45.000Z
|
data-structures/trees/node/node.py
|
b-ritter/python-notes
|
e08e466458b8a2987c0abe42674da4066c763e74
|
[
"MIT"
] | null | null | null |
data-structures/trees/node/node.py
|
b-ritter/python-notes
|
e08e466458b8a2987c0abe42674da4066c763e74
|
[
"MIT"
] | null | null | null |
class Node():
def __init__(self, value=None):
self.children = []
self.parent = None
self.value = value
def add_child(self, node):
if type(node).__name__ == 'Node':
node.parent = self
self.children.append(node)
else:
raise ValueError
def get_parent(self):
return self.parent.value if self.parent else 'root'
| 27.466667
| 59
| 0.553398
| 412
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.029126
|
a7e94b54f09da5dbd4544a51eda95b42dbf4bd2e
| 3,436
|
py
|
Python
|
qf_lib/backtesting/events/time_event/regular_date_time_rule.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 198
|
2019-08-16T15:09:23.000Z
|
2022-03-30T12:44:00.000Z
|
qf_lib/backtesting/events/time_event/regular_date_time_rule.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 13
|
2021-01-07T10:15:19.000Z
|
2022-03-29T13:01:47.000Z
|
qf_lib/backtesting/events/time_event/regular_date_time_rule.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 29
|
2019-08-16T15:21:28.000Z
|
2022-02-23T09:53:49.000Z
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from qf_lib.common.utils.dateutils.relative_delta import RelativeDelta
class RegularDateTimeRule(object):
"""
RegularDateTimeRule is a helper class for TimeEvents. It has a convenience method for calculating
next trigger time for events which occur on certain date/time on regular basis (e.g. each day at 9:30,
each first day of a month, etc.).
"""
def __init__(self, year: int = None, month: int = None, day: int = None, weekday: int = None, hour: int = None,
minute: int = None, second: int = None, microsecond: int = None):
self.trigger_time = RelativeDelta(
year=year, month=month, day=day, weekday=weekday, hour=hour, minute=minute,
second=second, microsecond=microsecond
)
def next_trigger_time(self, now: datetime) -> datetime:
next_trigger_time = now + self.trigger_time
# check if next_trigger_time is in the past and if it is, it needs to be adjusted so that it's in the future
if next_trigger_time <= now:
next_trigger_time = self._get_next_trigger_time_after(next_trigger_time)
return next_trigger_time
def _get_next_trigger_time_after(self, start_time: datetime):
# calculate proper adjustment (time shift):
# if the month is important for the trigger time, than we should go to the next year
# for getting the next occurrence, if it is unimportant but day is important,
# then we should go to the next month etc.
time_adjustment = None
if self.trigger_time.year is not None:
# nothing can be done if the year is important. No way of getting next occurrence (there will never be
# the same year again)
raise ArithmeticError(
"Cannot get next occurrence of the event with `year` specified "
"(there will never be the same year again)."
)
elif self.trigger_time.month is not None:
time_adjustment = RelativeDelta(years=1)
elif self.trigger_time.day is not None:
time_adjustment = RelativeDelta(months=1)
elif self.trigger_time.weekday is not None:
time_adjustment = RelativeDelta(weeks=1)
elif self.trigger_time.hour is not None:
time_adjustment = RelativeDelta(days=1)
elif self.trigger_time.minute is not None:
time_adjustment = RelativeDelta(hours=1)
elif self.trigger_time.second is not None:
time_adjustment = RelativeDelta(minutes=1)
elif self.trigger_time.microsecond is not None:
time_adjustment = RelativeDelta(seconds=1)
next_trigger_time = start_time + time_adjustment
return next_trigger_time
| 45.813333
| 116
| 0.680151
| 2,669
| 0.776323
| 0
| 0
| 0
| 0
| 0
| 0
| 1,494
| 0.434555
|
a7ea14ccf7f41c0614b8f95c605b3bd30018a21b
| 2,643
|
py
|
Python
|
example_project/blog/migrations/0001_initial.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | 4
|
2019-10-15T06:47:29.000Z
|
2019-11-11T13:16:15.000Z
|
example_project/blog/migrations/0001_initial.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | null | null | null |
example_project/blog/migrations/0001_initial.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | null | null | null |
# Generated by Django 2.2.6 on 2019-10-16 02:53
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=254)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Blog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('title', models.CharField(blank=True, max_length=255, null=True, verbose_name='title')),
('content', models.TextField(blank=True, null=True)),
('author', models.ForeignKey(blank=True, help_text='作者id', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='author', to='blog.Author', verbose_name='作者')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='UserFavorite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('blog', models.ForeignKey(blank=True, help_text='博客id', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='blog', to='blog.Blog', verbose_name='博客')),
('user', models.ForeignKey(help_text='收藏人id', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '用户收藏',
'verbose_name_plural': '用户收藏',
'ordering': ['id'],
'unique_together': {('user', 'blog')},
},
),
]
| 43.327869
| 193
| 0.573212
| 2,526
| 0.940782
| 0
| 0
| 0
| 0
| 0
| 0
| 459
| 0.17095
|
a7eb2348cfa9a172a906c37500f1917164aff9ba
| 515
|
py
|
Python
|
hamming/hamming.py
|
olepunchy/exercism-python-solutions
|
7710e49ec0188510d50a22928cdb951063ad1a44
|
[
"BSD-3-Clause"
] | 1
|
2021-12-20T11:29:35.000Z
|
2021-12-20T11:29:35.000Z
|
hamming/hamming.py
|
olepunchy/exercism-python-solutions
|
7710e49ec0188510d50a22928cdb951063ad1a44
|
[
"BSD-3-Clause"
] | null | null | null |
hamming/hamming.py
|
olepunchy/exercism-python-solutions
|
7710e49ec0188510d50a22928cdb951063ad1a44
|
[
"BSD-3-Clause"
] | null | null | null |
"""Hamming Distance from Exercism"""
def distance(strand_a, strand_b):
"""Determine the hamming distance between two RNA strings
param: str strand_a
param: str strand_b
return: int calculation of the hamming distance between strand_a and strand_b
"""
if len(strand_a) != len(strand_b):
raise ValueError("Strands must be of equal length.")
distance = 0
for i, _ in enumerate(strand_a):
if strand_a[i] != strand_b[i]:
distance += 1
return distance
| 25.75
| 81
| 0.660194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 265
| 0.514563
|
a7ec26521d5754d63393dc5921008ed61eb700b3
| 1,384
|
py
|
Python
|
python/scopePractice.py
|
5x5x5x5/Back2Basics
|
4cd4117c6fdcb064b6cd62fde63be92347950526
|
[
"Unlicense"
] | null | null | null |
python/scopePractice.py
|
5x5x5x5/Back2Basics
|
4cd4117c6fdcb064b6cd62fde63be92347950526
|
[
"Unlicense"
] | 1
|
2016-02-14T00:09:48.000Z
|
2016-02-14T00:10:05.000Z
|
python/scopePractice.py
|
5x5x5x5/Back2Basics
|
4cd4117c6fdcb064b6cd62fde63be92347950526
|
[
"Unlicense"
] | null | null | null |
#def spam():
# eggs = 31337
#spam()
#print(eggs)
"""
def spam():
eggs = 98
bacon()
print(eggs)
def bacon():
ham = 101
eggs = 0
spam()
"""
"""
# Global variables can be read from local scope.
def spam():
print(eggs)
eggs = 42
spam()
print(eggs)
"""
"""
# Local and global variables with the same name.
def spam():
eggs = 'spam local'
print(eggs) # prints 'spam local'
def bacon():
eggs = 'bacon local'
print(eggs) # prints 'bacon local'
spam()
print(eggs) # prints 'bacon local'
eggs = 'global'
bacon()
print(eggs) # prints 'global'
"""
"""
# the global statement
def spam():
global eggs
eggs = 'spam'
eggs = 'it don\'t matter'
spam()
print(eggs)
"""
"""
def spam():
global eggs
eggs = 'spam' # this is the global
def bacon():
eggs = 'bacon' # this is a local
def ham():
print(eggs) # this is the global
eggs = 42 # this is global
spam()
print(eggs)
"""
# Python will not fall back to using the global eggs variable
def spam():
eggs = 'wha??'
print(eggs) # ERROR!
eggs = 'spam local'
eggs = 'global'
spam()
# This error happens because Python sees that there is an assignment statement for eggs in the spam() function and therefore considers eggs to be local. Because print(eggs) is executed before eggs is assigned anything, the local variable eggs doesn't exist.
| 16.674699
| 257
| 0.621387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,297
| 0.937139
|
a7ee3074b0e06212ba87fbc858e47dc0897b2f73
| 3,114
|
py
|
Python
|
utils/csv_generator.py
|
stegmaierj/CellSynthesis
|
de2c90ed668b7f57b960896473df3d56636eca82
|
[
"Apache-2.0"
] | 1
|
2021-07-21T21:40:32.000Z
|
2021-07-21T21:40:32.000Z
|
utils/csv_generator.py
|
stegmaierj/CellSynthesis
|
de2c90ed668b7f57b960896473df3d56636eca82
|
[
"Apache-2.0"
] | null | null | null |
utils/csv_generator.py
|
stegmaierj/CellSynthesis
|
de2c90ed668b7f57b960896473df3d56636eca82
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
# 3D Image Data Synthesis.
# Copyright (C) 2021 D. Eschweiler, M. Rethwisch, M. Jarchow, S. Koppers, J. Stegmaier
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Liceense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Please refer to the documentation for more information about the software
# as well as for installation instructions.
#
"""
import os
import glob
import csv
import numpy as np
def get_files(folders, data_root='', descriptor='', filetype='tif'):
filelist = []
for folder in folders:
files = glob.glob(os.path.join(data_root, folder, '*'+descriptor+'*.'+filetype))
filelist.extend([os.path.join(folder, os.path.split(f)[-1]) for f in files])
return filelist
def read_csv(list_path, data_root=''):
filelist = []
with open(list_path, 'r') as f:
reader = csv.reader(f, delimiter=';')
for row in reader:
if len(row)==0 or np.sum([len(r) for r in row])==0: continue
row = [os.path.join(data_root, r) for r in row]
filelist.append(row)
return filelist
def create_csv(data_list, save_path='list_folder/experiment_name', test_split=0.2, val_split=0.1, shuffle=False):
if shuffle:
np.random.shuffle(data_list)
# Get number of files for each split
num_files = len(data_list)
num_test_files = int(test_split*num_files)
num_val_files = int((num_files-num_test_files)*val_split)
num_train_files = num_files - num_test_files - num_val_files
# Get file indices
file_idx = np.arange(num_files)
# Save csv files
if num_test_files > 0:
test_idx = sorted(np.random.choice(file_idx, size=num_test_files, replace=False))
with open(save_path+'_test.csv', 'w') as fh:
writer = csv.writer(fh, delimiter=';')
for idx in test_idx:
writer.writerow(data_list[idx])
else:
test_idx = []
if num_val_files > 0:
val_idx = sorted(np.random.choice(list(set(file_idx)-set(test_idx)), size=num_val_files, replace=False))
with open(save_path+'_val.csv', 'w') as fh:
writer = csv.writer(fh, delimiter=';')
for idx in val_idx:
writer.writerow(data_list[idx])
else:
val_idx = []
if num_train_files > 0:
train_idx = sorted(list(set(file_idx) - set(test_idx) - set(val_idx)))
with open(save_path+'_train.csv', 'w') as fh:
writer = csv.writer(fh, delimiter=';')
for idx in train_idx:
writer.writerow(data_list[idx])
| 33.847826
| 113
| 0.633269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 991
| 0.31824
|
a7f02ff728dc30360284b4e08bfb0d211597ed3b
| 1,003
|
py
|
Python
|
test/test_VersionUpdaterWindow.py
|
jmarrec/IDFVersionUpdater2
|
0420732141e41bdc06c85f1372d82f0843f8cebf
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_VersionUpdaterWindow.py
|
jmarrec/IDFVersionUpdater2
|
0420732141e41bdc06c85f1372d82f0843f8cebf
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_VersionUpdaterWindow.py
|
jmarrec/IDFVersionUpdater2
|
0420732141e41bdc06c85f1372d82f0843f8cebf
|
[
"BSD-3-Clause"
] | 2
|
2020-09-25T08:02:39.000Z
|
2021-08-18T08:30:31.000Z
|
import os
import sys
import tempfile
import unittest
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'IDFVersionUpdater'))
from VersionUpdaterWindow import VersionUpdaterWindow
class TestGetIDFVersion(unittest.TestCase):
def setUp(self):
self.idf_name = tempfile.mktemp()
def test_good_version_number(self):
with open(self.idf_name, 'w') as f:
f.write("Version,8.5.0;")
version = VersionUpdaterWindow.get_idf_version(self.idf_name)
self.assertEqual(version, 8.5)
def test_bad_version_number(self):
with open(self.idf_name, 'w') as f:
f.write("Version,x.y.z;")
with self.assertRaises(ValueError):
VersionUpdaterWindow.get_idf_version(self.idf_name)
def test_missing_version_number(self):
with open(self.idf_name, 'w') as f:
f.write("x,y;")
version = VersionUpdaterWindow.get_idf_version(self.idf_name)
self.assertIsNone(version)
| 31.34375
| 101
| 0.682951
| 789
| 0.78664
| 0
| 0
| 0
| 0
| 0
| 0
| 70
| 0.069791
|
a7f04cab3ce9aa87269ec6d3083f5676dec9b76a
| 421
|
py
|
Python
|
Algorithm/Mathematical/453. Minimum Moves to Equal Array Elements.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Mathematical/453. Minimum Moves to Equal Array Elements.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Mathematical/453. Minimum Moves to Equal Array Elements.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
# https://leetcode.com/problems/minimum-moves-to-equal-array-elements/
# Explanation: https://leetcode.com/problems/minimum-moves-to-equal-array-elements/discuss/93817/It-is-a-math-question
# Source: https://leetcode.com/problems/minimum-moves-to-equal-array-elements/discuss/272994/Python-Greedy-Sum-Min*Len
class Solution:
def minMoves(self, nums: List[int]) -> int:
return sum(nums) - min(nums)*len(nums)
| 60.142857
| 118
| 0.752969
| 110
| 0.261283
| 0
| 0
| 0
| 0
| 0
| 0
| 306
| 0.726841
|
a7f28bd0d14c90ec88699bf98d0a9fe7b8320366
| 583
|
py
|
Python
|
tests/test_decisions.py/test_binary_decision.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 5
|
2021-12-17T20:34:55.000Z
|
2022-01-24T15:18:05.000Z
|
tests/test_decisions.py/test_binary_decision.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 31
|
2021-10-29T21:05:00.000Z
|
2022-03-22T03:27:14.000Z
|
tests/test_decisions.py/test_binary_decision.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 1
|
2021-12-23T18:32:47.000Z
|
2021-12-23T18:32:47.000Z
|
from pyminion.decisions import binary_decision
def test_yes_input(monkeypatch):
monkeypatch.setattr("builtins.input", lambda _: "y")
assert binary_decision(prompt="test") is True
def test_no_input(monkeypatch):
monkeypatch.setattr("builtins.input", lambda _: "n")
assert binary_decision(prompt="test") is False
def test_invalid_input(monkeypatch):
import pytest
from pyminion.exceptions import InvalidBinaryInput
monkeypatch.setattr("builtins.input", lambda _: "")
with pytest.raises(InvalidBinaryInput):
binary_decision(prompt="test")
| 27.761905
| 56
| 0.749571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 74
| 0.12693
|
a7f2b60426d4a9b0bbe027e12dcdd2ac3143d158
| 1,658
|
py
|
Python
|
commands/limit.py
|
nstra111/autovc
|
e73e1fea7b566721c3dce3ca6f587472e7ee9d1b
|
[
"MIT"
] | 177
|
2020-02-02T18:03:46.000Z
|
2022-03-17T06:18:43.000Z
|
commands/limit.py
|
zigsphere/Auto-Voice-Channels
|
6ae901728580bef4246737a6f1b9f10763badd3e
|
[
"MIT"
] | 82
|
2020-02-02T17:43:18.000Z
|
2022-03-24T20:34:55.000Z
|
commands/limit.py
|
zigsphere/Auto-Voice-Channels
|
6ae901728580bef4246737a6f1b9f10763badd3e
|
[
"MIT"
] | 165
|
2019-02-17T20:15:20.000Z
|
2022-03-27T23:59:23.000Z
|
import utils
import functions as func
from commands.base import Cmd
help_text = [
[
("Usage:", "<PREFIX><COMMAND>\n"
"<PREFIX><COMMAND> `N`"),
("Description:",
"Use when already in a channel - Limit the number of users allowed in your channel to either the current "
"number of users, or the specified number.\n\n"
"Use *<PREFIX>un<COMMAND>* to remove the limit."),
("Example:", "<PREFIX><COMMAND> 4"),
]
]
async def execute(ctx, params):
params_str = ' '.join(params)
guild = ctx['guild']
settings = ctx['settings']
limit = utils.strip_quotes(params_str)
author = ctx['message'].author
vc = ctx['voice_channel']
if limit:
try:
limit = abs(int(limit))
except ValueError:
return False, "`{}` is not a number.".format(limit)
else:
limit = len(vc.members)
if limit > 99:
return False, "The user limit cannot be higher than 99."
await vc.edit(user_limit=limit)
if limit != 0:
log_msg = "👪 {} (`{}`) set the user limit of \"**{}**\" (`{}`) to {}".format(
func.user_hash(author), author.id, func.esc_md(vc.name), vc.id, limit
)
else:
log_msg = "👨👩👧👦 {} (`{}`) removed the user limit of \"**{}**\" (`{}`)".format(
func.user_hash(author), author.id, func.esc_md(vc.name), vc.id
)
await func.server_log(guild, log_msg, 2, settings)
return True, None
command = Cmd(
execute=execute,
help_text=help_text,
params_required=0,
admin_required=False,
voice_required=True,
creator_only=True,
)
| 28.586207
| 115
| 0.571773
| 0
| 0
| 0
| 0
| 0
| 0
| 1,028
| 0.612269
| 550
| 0.327576
|
a7f2fd039004fefa20925e8a466b301e8532a1f0
| 11,548
|
py
|
Python
|
salt/tests/unit/modules/test_metalk8s_solutions.py
|
zarumaru/metalk8s
|
8c79a28c2bd28ca5b84e58ace5605cbe6183fc75
|
[
"Apache-2.0"
] | null | null | null |
salt/tests/unit/modules/test_metalk8s_solutions.py
|
zarumaru/metalk8s
|
8c79a28c2bd28ca5b84e58ace5605cbe6183fc75
|
[
"Apache-2.0"
] | null | null | null |
salt/tests/unit/modules/test_metalk8s_solutions.py
|
zarumaru/metalk8s
|
8c79a28c2bd28ca5b84e58ace5605cbe6183fc75
|
[
"Apache-2.0"
] | null | null | null |
import errno
import os.path
import yaml
from parameterized import param, parameterized
from salt.exceptions import CommandExecutionError
from salttesting.mixins import LoaderModuleMockMixin
from salttesting.unit import TestCase
from salttesting.mock import MagicMock, mock_open, patch
import metalk8s_solutions
from tests.unit import utils
YAML_TESTS_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"files", "test_metalk8s_solutions.yaml"
)
with open(YAML_TESTS_FILE) as fd:
YAML_TESTS_CASES = yaml.safe_load(fd)
class Metalk8sSolutionsTestCase(TestCase, LoaderModuleMockMixin):
"""
TestCase for `metalk8s_solutions` module
"""
loader_module = metalk8s_solutions
def test_virtual_success(self):
"""
Tests the return of `__virtual__` function, success
"""
dict_patch = {'metalk8s.archive_info_from_iso': MagicMock()}
with patch.dict(metalk8s_solutions.__salt__, dict_patch):
self.assertEqual(
metalk8s_solutions.__virtual__(), 'metalk8s_solutions'
)
def test_virtual_missing_metalk8s_module(self):
"""
Tests the return of `__virtual__` function,
when metalk8s module is missing
"""
self.assertEqual(
metalk8s_solutions.__virtual__(),
(False, "Failed to load 'metalk8s' module.")
)
@utils.parameterized_from_cases(YAML_TESTS_CASES["read_config"])
def test_read_config(self, create=False, config=None, result=None,
raises=False):
"""
Tests the return of `read_config` function
"""
open_mock = mock_open(read_data=config)
if not config:
open_mock.side_effect = IOError(
errno.ENOENT, "No such file or directory"
)
with patch("metalk8s_solutions.open", open_mock), \
patch("metalk8s_solutions._write_config_file", MagicMock()):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
result,
metalk8s_solutions.read_config
)
else:
if create:
self.assertEqual(
metalk8s_solutions.read_config(create),
result
)
else:
self.assertEqual(
metalk8s_solutions.read_config(),
result
)
@utils.parameterized_from_cases(YAML_TESTS_CASES["configure_archive"])
def test_configure_archive(self, archive, removed=None, config=None,
result=None, raises=False):
"""
Tests the return of `configure_archive` function
"""
def _write_config_file_mock(new_config):
if raises:
raise CommandExecutionError(
"Failed to write Solutions config file"
)
config = new_config
read_config_mock = MagicMock(return_value=config)
write_config_file_mock = MagicMock(side_effect=_write_config_file_mock)
with patch("metalk8s_solutions.read_config", read_config_mock), \
patch("metalk8s_solutions._write_config_file",
write_config_file_mock):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
"Failed to write Solutions config file",
metalk8s_solutions.configure_archive,
archive
)
else:
self.assertEqual(
metalk8s_solutions.configure_archive(
archive, removed=removed
),
True
)
self.assertEqual(config, result)
@utils.parameterized_from_cases(YAML_TESTS_CASES["activate_solution"])
def test_activate_solution(self, solution, version=None, config=None,
result=None, available=None, raises=False):
"""
Tests the return of `activate_solution` function
"""
def _yaml_safe_dump_mock(data, _):
if raises:
raise Exception("Something bad happened! :/")
config = data
list_available_mock = MagicMock(return_value=available or {})
read_config_mock = MagicMock(return_value=config)
yaml_safe_dump_mock = MagicMock(side_effect=_yaml_safe_dump_mock)
with patch("metalk8s_solutions.list_available", list_available_mock), \
patch("metalk8s_solutions.read_config", read_config_mock), \
patch("metalk8s_solutions.open", mock_open()), \
patch("yaml.safe_dump", yaml_safe_dump_mock):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
result,
metalk8s_solutions.activate_solution,
solution,
version
)
else:
if version:
self.assertEqual(
metalk8s_solutions.activate_solution(
solution, version
),
True
)
else:
self.assertEqual(
metalk8s_solutions.activate_solution(solution),
True
)
self.assertEqual(config, result)
@utils.parameterized_from_cases(YAML_TESTS_CASES["deactivate_solution"])
def test_deactivate_solution(self, solution, config=None, raises=False,
result=None):
"""
Tests the return of `deactivate_solution` function
"""
def _yaml_safe_dump_mock(data, _):
if raises:
raise Exception("Something bad happened! :/")
config = data
read_config_mock = MagicMock(return_value=config)
yaml_safe_dump_mock = MagicMock(side_effect=_yaml_safe_dump_mock)
with patch("metalk8s_solutions.read_config", read_config_mock), \
patch("yaml.safe_dump", yaml_safe_dump_mock), \
patch("metalk8s_solutions.open", mock_open()):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
"Failed to write Solutions config file",
metalk8s_solutions.deactivate_solution,
solution
)
else:
self.assertEqual(
metalk8s_solutions.deactivate_solution(solution),
True
)
self.assertEqual(config, result)
@utils.parameterized_from_cases(YAML_TESTS_CASES["list_solution_images"])
def test_list_solution_images(self, images=None, result=None,
raises=False):
"""
Tests the return of `list_solution_images` function
"""
mountpoint = '/srv/scality/my-solution/'
image_dir_prefix_len = len(os.path.join(mountpoint, 'images'))
if not images:
images = {}
def _get_image_name_and_version(path):
version = None
basename = path[image_dir_prefix_len:].lstrip('/')
try:
name, version = basename.split('/')
except ValueError:
name = basename
return name, version
def _path_isdir_mock(path):
name, version = _get_image_name_and_version(path)
return images and (not name or images[name]) and \
(not version or images[name][version])
def _listdir_mock(path):
name, version = _get_image_name_and_version(path)
if not name:
return images.keys()
return images[name].keys()
path_isdir_mock = MagicMock(side_effect=_path_isdir_mock)
listdir_mock = MagicMock(side_effect=_listdir_mock)
with patch("os.path.isdir", path_isdir_mock), \
patch("os.listdir", listdir_mock):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
result,
metalk8s_solutions.list_solution_images,
mountpoint
)
else:
self.assertItemsEqual(
metalk8s_solutions.list_solution_images(mountpoint),
result
)
@utils.parameterized_from_cases(YAML_TESTS_CASES["read_solution_config"])
def test_read_solution_config(self, config=None, result=None,
raises=False):
"""
Tests the return of `read_solution_config` function
"""
path_isfile_mock = MagicMock(return_value=config is not None)
list_solution_images_mock = MagicMock(return_value=[])
fopen_mock = mock_open(read_data=config)
read_solution_config_args = [
'/srv/scality/my-solution', 'my-solution', '1.0.0'
]
with patch("os.path.isfile", path_isfile_mock), \
patch("salt.utils.files.fopen", fopen_mock), \
patch("metalk8s_solutions.list_solution_images",
list_solution_images_mock):
if raises:
self.assertRaisesRegexp(
CommandExecutionError,
result,
metalk8s_solutions.read_solution_config,
*read_solution_config_args
)
else:
self.assertEqual(
metalk8s_solutions.read_solution_config(
*read_solution_config_args
),
result
)
@utils.parameterized_from_cases(YAML_TESTS_CASES["list_available"])
def test_list_available(self, mountpoints=None, archive_infos=None,
result=None, raises=False):
"""
Tests the return of `list_available` function
"""
def _archive_info_from_tree(path):
if archive_infos:
return archive_infos
raise Exception('Path has no "product.txt"')
if not mountpoints:
mountpoints = {}
if not result:
result = {}
mount_active_mock = MagicMock(return_value=mountpoints)
archive_info_from_tree_mock = MagicMock(
side_effect=_archive_info_from_tree
)
read_solution_config_mock = MagicMock(return_value=None)
salt_dict_patch = {
'mount.active': mount_active_mock,
'metalk8s.archive_info_from_tree': archive_info_from_tree_mock,
}
with patch.dict(metalk8s_solutions.__salt__, salt_dict_patch), \
patch("metalk8s_solutions.read_solution_config",
read_solution_config_mock):
if raises:
self.assertRaisesRegexp(
Exception,
'Path has no "product.txt"',
metalk8s_solutions.list_available
)
else:
self.assertEqual(
metalk8s_solutions.list_available(),
result
)
| 36.660317
| 79
| 0.559144
| 10,996
| 0.9522
| 0
| 0
| 10,110
| 0.875476
| 0
| 0
| 1,850
| 0.160201
|
a7f5cbeb6c6ac6730e6541d991681e7c83554dd8
| 523
|
py
|
Python
|
fun.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
fun.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
fun.py
|
Krishna-Aaseri/Python_Logical_Questions
|
c0f025a56dbbf85426142adb423b25fa7b034adb
|
[
"MIT"
] | null | null | null |
#def add(num,num1):
# add1=num+num1
# print add1
#add(6,7)
#def welcome():
# print "python kaisa lagta h aapko"
# print "but please reply na kare aap"
#welcome()
user = int(raw_input("enter a number"))
i = 0
new = []
while i < (user):
user1 = int(raw_input("enter a number"))
new.append(user1)
i = i + 1
print new
print "**********************************************"
i = 0
new_list = []
while i < len(new):
if new[i]%2 == 0:
new_list.append(new)
else:
new_list.append(new)
i = i + 1
print new_list
| 13.763158
| 54
| 0.565966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 234
| 0.447419
|
a7f7aa50e11186fe4bb67eb3b4c81147ea13ad7a
| 29
|
py
|
Python
|
app.py
|
00MB/lottocoin
|
ebf27f5a02169d948e8633b1dc5d5ad37ee1bb4a
|
[
"MIT"
] | 2
|
2021-02-10T01:40:36.000Z
|
2021-02-10T01:41:22.000Z
|
app.py
|
00MB/lottocoin
|
ebf27f5a02169d948e8633b1dc5d5ad37ee1bb4a
|
[
"MIT"
] | null | null | null |
app.py
|
00MB/lottocoin
|
ebf27f5a02169d948e8633b1dc5d5ad37ee1bb4a
|
[
"MIT"
] | null | null | null |
from lottocoin import app
| 5.8
| 25
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a7f8a1ad9e3a4405d58d74a78a4a7eac31d085da
| 30,883
|
py
|
Python
|
api/views_v2.py
|
GeRDI-Project/HarvesterControlCenter
|
ce161a31a6510ae28ffa68b8e0fd43c42060cb07
|
[
"Apache-2.0"
] | null | null | null |
api/views_v2.py
|
GeRDI-Project/HarvesterControlCenter
|
ce161a31a6510ae28ffa68b8e0fd43c42060cb07
|
[
"Apache-2.0"
] | 9
|
2020-01-07T12:40:26.000Z
|
2021-09-22T18:00:38.000Z
|
api/views_v2.py
|
GeRDI-Project/HarvesterControlCenter
|
ce161a31a6510ae28ffa68b8e0fd43c42060cb07
|
[
"Apache-2.0"
] | null | null | null |
"""
This is the views module which encapsulates the backend logic
which will be riggered via the corresponding path (url).
"""
import collections
import json
import logging
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.contrib.messages.views import SuccessMessageMixin
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from django.views.generic import RedirectView
from django.views.generic.base import View
from django.views.generic.edit import FormMixin
from rest_framework import generics, permissions, status
from rest_framework.authentication import (BasicAuthentication,
TokenAuthentication)
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from api.constants import HCCJSONConstants as HCCJC
from api.forms import (HarvesterForm, SchedulerForm, UploadFileForm,
ValidateFileForm, create_config_fields,
create_config_form)
from api.harvester_api import InitHarvester
from api.mixins import AjaxableResponseMixin
from api.models import Harvester
from api.permissions import IsOwner
from api.serializers import HarvesterSerializer, UserSerializer
__author__ = "Jan Frömberg, Laura Höhle"
__copyright__ = "Copyright 2018, GeRDI Project"
__credits__ = ["Jan Frömberg"]
__license__ = "Apache 2.0"
__maintainer__ = "Jan Frömberg"
__email__ = "jan.froemberg@tu-dresden.de"
# Get an instance of a logger
LOGGER = logging.getLogger(__name__)
def index(request):
"""
Index to show something meaningful instead of an empty page.
:param request:
:return: a HttpResponse
"""
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def toggle_harvester(request, name):
"""
This function toggles the enabled and disabled status of an harvester.
:param request: the request
:param name: name of the harvester
:return: an HttpResponseRedirect to the Main HCC page
"""
harv = get_object_or_404(Harvester, name=name)
if harv.enabled:
harv.disable()
LOGGER.info("%s disabled.", harv.name)
messages.add_message(request, messages.INFO,
name + ' harvester disabled.')
else:
harv.enable()
LOGGER.info("%s enabled.", harv.name)
messages.add_message(request, messages.INFO,
name + ' harvester enabled.')
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def toggle_harvesters(request, hnames):
"""
This function toggles the enabled and disabled status of selected harvester.
:param request: the request
:param hnames: names of the harvesters
:return: an HttpResponseRedirect to the Main HCC page
"""
names = hnames.split('-')
for name in names:
harv = get_object_or_404(Harvester, name=name)
if harv.enabled:
harv.disable()
LOGGER.info("%s disabled.", harv.name)
messages.add_message(request, messages.INFO,
name + ' harvester disabled.')
else:
harv.enable()
LOGGER.info("%s enabled.", harv.name)
messages.add_message(request, messages.INFO,
name + ' harvester enabled.')
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def stop_harvester(request, name):
"""
This function stops an harvester.
:param request: the request
:param name: name of the harvester
:return: an HttpResponseRedirect to the Main HCC page
"""
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.stop_harvest()
messages.add_message(request, messages.INFO,
name + ': ' + str(response.data[harvester.name]))
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def start_harvester(request, name):
"""
This function starts an harvester.
:param request: the request
:param name: name of the harvester
:return: an HttpResponseRedirect to the Main HCC page
"""
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.start_harvest()
messages.add_message(request, messages.INFO,
name + ': ' + str(response.data[harvester.name]))
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def start_selected_harvesters(request, hnames):
"""
This function starts selected harvester.
:param request: the request
:param hnames: names of the harvesters
:return: an HttpResponseRedirect to the Main HCC page
"""
names = hnames.split('-')
for name in names:
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.start_harvest()
messages.add_message(request, messages.INFO,
name + ': ' + str(response.data[harvester.name]))
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def reset_harvester(request, name):
"""
This function resets an harvester.
:param request: the request
:param name: name of the harvester
:return: an HttpResponseRedirect to the Main HCC page
"""
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.reset_harvest()
messages.add_message(request, messages.INFO,
name + ': ' + str(response.data[harvester.name]))
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def get_all_harvester_log(request):
"""
This function gets the logfile for each harvester.
:param request: the request
:return: JSON Feedback Array
"""
feedback = {}
feedback[HCCJC.LOG_DATA] = {}
harvesters = Harvester.objects.all()
for harvester in harvesters:
if harvester.enabled:
api = InitHarvester(harvester).get_harvester_api()
response = api.harvester_log()
feedback[HCCJC.LOG_DATA][harvester.name] = response.data[harvester.name][HCCJC.LOGS]
return render(request, "hcc/harvester_logs.html", feedback)
@login_required
def get_hcc_log(request):
""" A function to get the hcc logfile -> [settings.py] ./log/debug.log """
filename = settings.LOGGING['handlers']['filedebug']['filename']
content = filename + "<br>"
with open(filename, 'r') as file:
content += file.read().replace('\n', '<br>')
response = HttpResponse(content, content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename={0}'.format(
filename)
return response
@login_required
def get_harvester_progress(request, name):
"""
This function gets the harvester progress.
:param request: the request
:return: JSON Feedback Array
"""
feedback = {}
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.harvester_progress()
feedback[harvester.name] = response.data[harvester.name]
return JsonResponse(feedback, status=response.status_code)
@login_required
def harvester_status_history(request, name):
"""
Returns the status history of a harvester.
"""
feedback = {}
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.status_history()
feedback["message"] = response.data
return JsonResponse(feedback)
@login_required
def start_all_harvesters(request):
"""
This function starts all harvesters.
:param request: the request
:return: an HttpResponseRedirect to the Main HCC page
"""
harvesters = Harvester.objects.all()
for harvester in harvesters:
if harvester.enabled:
api = InitHarvester(harvester).get_harvester_api()
response = api.start_harvest()
if HCCJC.HEALTH in response.data[harvester.name]:
msg = harvester.name + ': ' + response.data[harvester.name][
HCCJC.HEALTH]
messages.add_message(request, messages.INFO, msg)
else:
msg = harvester.name + ': ' + str(
response.data[harvester.name])
messages.add_message(request, messages.INFO, msg)
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def abort_all_harvesters(request):
"""
This function aborts all harvesters.
:param request: the request
:return: an HttpResponseRedirect to the Main HCC page
"""
harvesters = Harvester.objects.all()
for harvester in harvesters:
if harvester.enabled:
api = InitHarvester(harvester).get_harvester_api()
response = api.stop_harvest()
if HCCJC.HEALTH in response.data[harvester.name]:
msg = harvester.name + ': ' + response.data[harvester.name][
HCCJC.HEALTH]
messages.add_message(request, messages.INFO, msg)
else:
msg = harvester.name + ': ' + str(
response.data[harvester.name])
messages.add_message(request, messages.INFO, msg)
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def harvester_api_info(request, name):
"""
This function returns the pretty rendered
api help text of an harvester.
"""
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
response = api.api_infotext()
content = response.data[harvester.name].replace('\n', '<br>')
return HttpResponse(content, content_type='text/plain')
def create_form(response, harvester_name):
"""
This method generates a scheduler form for a harvester
based on a harvester specific JSON response.
If there is no response a default empty form will be created
for that harvester.
:param response: the response
:return: SchedulerForm(prefix=harvester.name)
"""
if response:
response_dict = response.data[harvester_name]
if HCCJC.CRONTAB in response_dict:
# if a GET (or any other method) we'll create form
# initialized with a schedule for this harvester
jsonstr = {
HCCJC.POSTCRONTAB:
response_dict[HCCJC.CRONTAB]
}
placehldr = response_dict[HCCJC.CRONTAB]
form = SchedulerForm(prefix=harvester_name)
if isinstance(placehldr, list):
if len(placehldr) > 0:
placehldr = response_dict[HCCJC.CRONTAB][0]
form.fields[HCCJC.POSTCRONTAB].widget.attrs.update(
{'placeholder': placehldr})
return form
else:
jsonstr = {HCCJC.POSTCRONTAB: '0 0 * * *'}
form = SchedulerForm(initial=jsonstr,
prefix=harvester_name)
return form
else:
jsonstr = {HCCJC.POSTCRONTAB: '0 0 * * *'}
form = SchedulerForm(initial=jsonstr,
prefix=harvester_name)
return form
def home(request):
"""
Home entry point of Web-Application GUI.
"""
feedback = {}
# init session variables:
# theme (dark/light) with default light
theme = request.session.get('theme', 'light')
# viewtype (card/list/table) with default card
viewtype = request.session.get('viewtype', 'card')
# collapse status (visible/collapsed)
collapse_status = {}
collapse_status['toolbox'] = request.session.get('toolbox', 'collapsed')
collapse_status['chart'] = request.session.get('chart', 'collapsed')
collapse_status['disabled_harvs'] = request.session.get(
'disabled_harvs', 'collapsed')
collapse_status['enabled_harvs'] = request.session.get(
'enabled_harvs', 'visible')
# if user is logged in
if request.user.is_authenticated:
forms = {}
response = None
harvesters = Harvester.objects.all()
num_harvesters = len(harvesters)
num_enabled_harvesters = 0
num_disabled_harvesters = 0
# get status of each enabled harvester
for harvester in harvesters:
# TODO do that call at client side!!
if harvester.enabled:
num_enabled_harvesters += 1
api = InitHarvester(harvester).get_harvester_api()
response = api.harvester_status()
forms[harvester.name] = create_form(response, harvester.name)
if response:
feedback[harvester.name] = response.data[harvester.name]
else:
feedback[harvester.name] = {}
feedback[harvester.name][HCCJC.GUI_STATUS] = HCCJC.WARNING
err = 'Error : no response object'
feedback[harvester.name][HCCJC.HEALTH] = err
else:
num_disabled_harvesters += 1
# get total amount of docs
sum_harvested = 0
sum_max_docs = 0
for harvester in feedback.values():
if isinstance(harvester, dict):
for (_k, _v) in harvester.items():
if _k == HCCJC.CACHED_DOCS:
sum_harvested += int(_v)
if _k == HCCJC.MAX_DOCUMENTS:
if _v != 'N/A':
sum_max_docs += int(_v)
feedback['sum_harvested'] = sum_harvested
feedback['sum_maxdocs'] = sum_max_docs
feedback['num_disabled_harvesters'] = num_disabled_harvesters
feedback['num_enabled_harvesters'] = num_enabled_harvesters
feedback['num_harvesters'] = num_harvesters
msg = '{} enabled Harvesters with total amount \
of harvested Items so far: {}'.format(num_enabled_harvesters,
sum_harvested)
messages.add_message(request, messages.INFO, msg)
# init form
if request.method == 'POST':
form = SchedulerForm(request.POST)
if form.is_valid():
return HttpResponseRedirect(reverse('hcc_gui'))
return render(
request, 'hcc/index.html', {
'harvesters': harvesters,
'status': feedback,
'forms': forms,
'theme': theme,
'viewtype': viewtype,
'collapse_status': collapse_status
})
return render(request, 'hcc/index.html', {
'status': feedback
})
@login_required
def update_session(request):
"""
Updates session variables via POST request.
"""
if not request.is_ajax() or not request.method == 'POST':
return JsonResponse({
'status': 'failed', 'message': 'not a POST request or ajax call'
})
message = ""
for key, value in request.POST.items():
if key == "csrfmiddlewaretoken":
continue
elif key in HCCJC.SESSION_KEYS:
request.session[key] = value
status = "ok"
message += 'Session variable {} was changed to {}.'.format(
key, value)
else:
request.session[key] = value
status = "failed"
message += '{} is not a session variable.'.format(value)
return JsonResponse({
'status': status,
'message': message
})
@api_view(['POST'])
# @authentication_classes((TokenAuthentication, BasicAuthentication))
@permission_classes((IsAuthenticated, ))
def start_harvesters(request, format=None):
"""
Start all harvesters via POST request.
"""
feedback = {}
harvesters = Harvester.objects.all()
for harvester in harvesters:
api = InitHarvester(harvester).get_harvester_api()
response = api.start_harvest()
feedback[harvester.name] = response.data[harvester.name]
return Response(feedback, status=status.HTTP_200_OK)
@api_view(['POST'])
@permission_classes((IsAuthenticated, ))
def start_harvest(request, name, format=None):
"""
Start harvest via POST request to an harvester url-endpoint.
"""
harvester = Harvester.objects.get(name=name)
api = InitHarvester(harvester).get_harvester_api()
return api.start_harvest()
@api_view(['POST'])
@permission_classes((IsAuthenticated, ))
def stop_harvest(request, name, format=None):
"""
Stop harvest via POST request to an harvester url-endpoint.
"""
harvester = Harvester.objects.get(name=name)
api = InitHarvester(harvester).get_harvester_api()
return api.stop_harvest()
@api_view(['POST'])
@permission_classes((IsAuthenticated, ))
def stop_harvesters(request, format=None):
"""
Stop all harvesters via POST request.
"""
feedback = {}
harvesters = Harvester.objects.all()
for harvester in harvesters:
api = InitHarvester(harvester).get_harvester_api()
response = api.stop_harvest()
feedback[harvester.name] = response.data[harvester.name]
return Response(feedback, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((IsAuthenticated, ))
def get_harvester_state(request, name, format=None):
"""
View to show an harvester state via GET request.
"""
harvester = get_object_or_404(Harvester, name=name)
api = InitHarvester(harvester).get_harvester_api()
return api.harvester_status()
@api_view(['GET'])
@permission_classes((IsAuthenticated, ))
def get_harvester_states(request, format=None):
"""
View to show all harvester states via GET request.
"""
feedback = {}
harvesters = Harvester.objects.all()
for harvester in harvesters:
api = InitHarvester(harvester).get_harvester_api()
response = api.harvester_status()
feedback[harvester.name] = response.data[harvester.name]
return Response(feedback, status=status.HTTP_200_OK)
@login_required
def harvester_data_to_file(request):
"""
Function that gets data of all harvesters in the database and returns it
through a file.
"""
data = list(Harvester.objects.values('name', 'notes', 'url', 'enabled'))
return JsonResponse(data, safe=False)
@login_required
def upload_file(request):
"""
This function handles POST requests to upload a file
containing harvester data and add it to the database
"""
data = {}
f = request.FILES['upload_file']
# Check if file type is correct and get the content
if f.content_type == 'application/json':
try:
content = json.load(f)
except json.JSONDecodeError:
message = (
'Upload failed. '
'File content was either wrong formatted or empty. '
'Must be a JSON array of objects with harvester data.'
)
messages.warning(request, message)
return HttpResponseRedirect(reverse('hcc_gui'))
else:
message = (
'Upload failed. '
'File type could not been handled. '
'Must be a JSON file!'
)
messages.warning(request, message)
return HttpResponseRedirect(reverse('hcc_gui'))
required_keys = ('name', 'notes', 'url', 'enabled')
for harvester_data in content:
# 'content' should be a list of dictionaries
if not isinstance(harvester_data, collections.Mapping):
message = (
'Validation failed. '
'File content could not been handled.'
'Should be a list of dictionaries!'
)
messages.warning(request, message)
return HttpResponseRedirect(reverse('hcc_gui'))
# The json file should contain the required harvester data
if not all(key in harvester_data for key in required_keys):
message = (
'Validation failed. '
'Key missmatch! Required: name, notes, url, enabled'
)
messages.warning(request, message)
return HttpResponseRedirect(reverse('hcc_gui'))
data = harvester_data.copy()
if Harvester.objects.filter(name=harvester_data['name']).exists():
# Harvester already exists -> update harvester
harvester = Harvester.objects.get(name=harvester_data['name'])
data['notes'] = harvester.notes # Notes should not be updated
if ((harvester.url == harvester_data['url']
and harvester.enabled == harvester_data['enabled'])):
continue
elif not harvester.url == harvester_data['url']:
if Harvester.objects.filter(
url=harvester_data['url']).exists():
# The url should be unique. Leave the existing harvester data
# and ignore the new one.
continue
# Create new Harvester with new url
harvester = Harvester(owner=request.user)
counter = 1
while True:
# Loop until the harvester name is not already used
postfix = '_{}'.format(counter)
temp_name = harvester_data['name'] + postfix
if not Harvester.objects.filter(name=temp_name).exists():
data['name'] = temp_name
break
counter += 1
elif Harvester.objects.filter(url=harvester_data['url']).exists():
# The url should be unique. Leave the existing harvester data
# and ignore the new one
continue
else:
# Create a new harvester
harvester = Harvester(owner=request.user)
form = ValidateFileForm(data, instance=harvester)
if form.is_valid():
form.save()
else:
message = (
'Validation failed. '
'Content data could not been saved.'
)
messages.warning(request, message)
return HttpResponseRedirect(reverse('hcc_gui'))
messages.success(request, 'Upload successful!')
return HttpResponseRedirect(reverse('hcc_gui'))
@login_required
def upload_file_form(request):
"""
This function handles GET requests to create a form
for uploading a file containing harvester data that
will be handled in upload_file.
"""
data = {'uploadform': UploadFileForm()}
return render(request, "hcc/file_upload_form.html", data)
class HarvesterCreateView(generics.ListCreateAPIView):
"""
This class handles the GET and POST requests
to create/register a new harvester
to our Harvester Control Center REST-API.
"""
authentication_classes = (BasicAuthentication, TokenAuthentication)
queryset = Harvester.objects.all()
serializer_class = HarvesterSerializer
permission_classes = (permissions.IsAuthenticated, IsOwner)
def perform_create(self, serializer):
"""Save the post data when creating a new harvester."""
serializer.save(owner=self.request.user)
class HarvesterDetailsView(generics.RetrieveUpdateDestroyAPIView):
"""
This class handles GET, PUT, PATCH and DELETE requests.
To show, modify and delete an registered harvester.
"""
authentication_classes = (BasicAuthentication, )
lookup_field = 'name'
queryset = Harvester.objects.all()
serializer_class = HarvesterSerializer
permission_classes = (permissions.IsAuthenticated, IsOwner)
class UserView(generics.ListAPIView):
"""
View to list the control center registered users.
"""
authentication_classes = (BasicAuthentication, )
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetailsView(generics.RetrieveAPIView):
"""
View to retrieve a user instance.
"""
authentication_classes = (BasicAuthentication, )
queryset = User.objects.all()
serializer_class = UserSerializer
class EditHarvesterView(LoginRequiredMixin, View,
AjaxableResponseMixin, FormMixin):
"""
This class handles AJAx, GET, DELETE and POST requests
to control the edit of the harvesters.
"""
@staticmethod
def get(request, *args, **kwargs): # the form that is loaded into the modal
data = {}
if "name" not in kwargs:
harvester = Harvester(owner=request.user)
data['template_title'] = 'Add Harvester'
else:
myname = kwargs['name']
harvester = Harvester.objects.get(name=myname)
data['template_title'] = "Edit Harvester - {}".format(myname)
data['hname'] = myname
data['form'] = HarvesterForm(instance=harvester)
return render(request, "hcc/harvester_edit_form.html", data)
def post(self, request, *args, **kwargs): # the actual logic behind the form
name = self.request.POST.get('name')
if "name" not in kwargs: # Add Harvester
# check if the name is not already used
if Harvester.objects.filter(name=name).exists():
return JsonResponse(
{'message': 'A Harvester named {} already exists!'.format(name)})
else:
_h = Harvester(owner=self.request.user)
action = 'initialised'
myname = name
else: # Edit Harvester
myname = kwargs['name']
_h = Harvester.objects.get(name=myname)
action = 'modified'
form = HarvesterForm(self.request.POST, instance=_h)
if form.is_valid():
form.save()
success_message = (
"{} has been {} successfully!"
" Please hold on while the page"
" is reloading.".format(myname, action)
)
if action == 'initialised':
LOGGER.info("new harvester created: {}".format(name))
response = {'message': success_message, 'name': myname}
else:
success_message = (
"{} could not been {}!"
" Please hold on while the page"
" is reloading.".format(myname, action)
)
response = {'message': success_message}
return JsonResponse(response)
class ConfigHarvesterView(LoginRequiredMixin, View,
AjaxableResponseMixin, FormMixin):
"""
This class handles GET, DELETE and POST requests
to control the config of the harvesters.
"""
@staticmethod
def get(request, *args, **kwargs):
myname = kwargs['name']
data = {}
harvester = get_object_or_404(Harvester, name=myname)
api = InitHarvester(harvester).get_harvester_api()
response = api.get_harvester_config_data()
if response.status_code != status.HTTP_200_OK:
data["message"] = response.data[harvester.name][HCCJC.HEALTH]
else:
form = create_config_form(
response.data[harvester.name][HCCJC.HEALTH])
data["form"] = form
data["hname"] = myname
return render(request, "hcc/harvester_config_form.html", data)
def post(self, request, *args, **kwargs):
myname = kwargs['name']
harvester = get_object_or_404(Harvester, name=myname)
api = InitHarvester(harvester).get_harvester_api()
response = api.get_harvester_config_data()
old_config_data = response.data[harvester.name][HCCJC.HEALTH]
(fields, old_data) = create_config_fields(old_config_data)
data = {}
changes = {} # before-after data
config_changes = {} # only after data to send to api
for key in fields:
# In the response all boolean fields are either set "on" if True
# or None if false. -> convert it
if self.request.POST.get(key) == "on":
new_data = "true"
elif self.request.POST.get(key) is None:
new_data = "false"
else:
new_data = self.request.POST.get(key)
if(old_data[key] != new_data):
changes[key] = {"before": old_data[key], "after": new_data}
config_changes[key] = new_data
if len(changes) > 0:
response = api.save_harvester_config_data(config_changes)
data["changes"] = changes
else:
return JsonResponse({
"status": "unchanged",
"message": "There have been no changes!"
})
message = response.data[harvester.name][HCCJC.HEALTH]["message"]
data["message"] = message
data["status"] = response.data[harvester.name][HCCJC.HEALTH]["status"]
if ("Cannot change value" in message) and ("Set parameter" in message):
data["status"] = "some issues"
return JsonResponse(data)
class ScheduleHarvesterView(
SuccessMessageMixin, RedirectView, AjaxableResponseMixin, FormMixin):
"""
This class handles GET, DELETE and POST requests
to control the scheduling of harvesters.
"""
success_message = "Schedule for %(name) was created successfully"
@staticmethod
def get(request, *args, **kwargs):
pass
def post(self, request, *args, **kwargs):
myname = kwargs['name']
harvester = get_object_or_404(Harvester, name=myname)
api = InitHarvester(harvester).get_harvester_api()
crontab = request.POST.get(harvester.name + "-" + HCCJC.POSTCRONTAB,
False)
if crontab:
response = api.add_schedule(crontab)
else:
response = api.delete_schedule(crontab)
return JsonResponse(response.data[harvester.name][HCCJC.HEALTH])
def delete(self, request, *args, **kwargs):
myname = kwargs['name']
harvester = get_object_or_404(Harvester, name=myname)
api = InitHarvester(harvester).get_harvester_api()
data = json.loads(request.body)
response = api.delete_schedule(data[HCCJC.POSTCRONTAB])
messages.add_message(
request, messages.INFO, harvester.name + ': '
+ response.data[harvester.name][HCCJC.HEALTH])
return HttpResponseRedirect(reverse('hcc_gui'))
| 36.036173
| 96
| 0.628307
| 7,774
| 0.251692
| 0
| 0
| 17,346
| 0.561595
| 0
| 0
| 8,176
| 0.264707
|
a7f8c76db9c1ab40ada45ae9f5ec62c61c102d7a
| 8,151
|
py
|
Python
|
rin/modules/setu/lolicon.py
|
oralvi/rinyuuki
|
2b55a5a9f0ebbecbdba815e242450b248c8e727a
|
[
"MIT"
] | null | null | null |
rin/modules/setu/lolicon.py
|
oralvi/rinyuuki
|
2b55a5a9f0ebbecbdba815e242450b248c8e727a
|
[
"MIT"
] | null | null | null |
rin/modules/setu/lolicon.py
|
oralvi/rinyuuki
|
2b55a5a9f0ebbecbdba815e242450b248c8e727a
|
[
"MIT"
] | null | null | null |
import datetime
import io
import json
import os
import random
import traceback
import aiohttp
from PIL import Image
import rin
from rin import R
from .config import get_api_num, get_config, key_vaildable_query, set_key_invaild
quota_limit_time = datetime.datetime.now()
def generate_image_struct():
return {
'id': 0,
'url': '',
'title': '',
'author': '',
'tags': [],
'r18': False,
'data': None,
'native': False,
}
native_info = {}
native_r18_info = {}
def load_native_info(sub_dir):
info = {}
path = f'setu_mix/' + sub_dir
res = R.img(path)
if not os.path.exists(res.path):
return info
fnlist = os.listdir(res.path)
for fn in fnlist:
s = fn.split('.')
if len(s) != 2 or s[1] != 'json' or not s[0].isdigit():
continue
uid = int(s[0])
try:
with open(res.path + '/' + fn, encoding='utf8') as f:
d = json.load(f)
d['tags'].append(d['title'])
d['tags'].append(d['author'])
info[uid] = ','.join(d['tags'])
except:
pass
rin.logger.info(f'[INFO]read{len(info)}setu from{sub_dir}')
return info
# 获取随机色图
async def query_setu(r18=0, keyword=None):
global quota_limit_time
image_list = []
apikey = get_config('lolicon', 'apikey')
if not key_vaildable_query:
return image_list
data = {}
url = 'https://api.lolicon.app/setu'
params = {
'apikey': apikey,
'r18': r18,
'num': 10,
}
if keyword:
params['keyword'] = keyword
if get_config('lolicon', 'use_thumb'):
params['size1200'] = 'true'
if get_config('lolicon', 'pixiv_direct'):
params['proxy'] = 'disable'
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params, proxy=get_config('lolicon', 'lolicon_proxy')) as resp:
data = await resp.json(content_type='application/json')
except Exception:
traceback.print_exc()
return image_list
if 'code' not in data:
return image_list
if data['code'] != 0:
rin.logger.error(f'[ERROR]lolicon api error:{data["code"]},msg:{data["msg"]}')
if data['code'] == 429:
quota_limit_time = datetime.datetime.now(
) + datetime.timedelta(seconds=data['quota_min_ttl'])
rin.logger.error(f'[ERROR]lolicon api 已到达本日调用额度上限,次数+1时间:{quota_limit_time}s')
set_key_invaild(apikey, quota_limit_time)
return image_list
for item in data['data']:
image = generate_image_struct()
image['id'] = item['pid']
image['title'] = item['title']
image['url'] = item['url']
image['tags'] = item['tags']
image['r18'] = item['r18']
image['author'] = item['author']
image_list.append(image)
return image_list
async def download_image(url: str):
rin.logger.info(f'[INFO]lolicon downloading image:{url}')
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, proxy=get_config('lolicon', 'lolicon_proxy')) as resp:
data = await resp.read()
# 转jpg
byte_stream = io.BytesIO(data)
roiImg = Image.open(byte_stream)
if roiImg.mode != 'RGB':
roiImg = roiImg.convert('RGB')
imgByteArr = io.BytesIO()
roiImg.save(imgByteArr, format='JPEG')
return imgByteArr.getvalue()
except Exception as e:
rin.logger.error(
'[ERROR]download image {} failed,error {}'.format(url, e))
# traceback.print_exc()
return None
async def download_pixiv_image(url: str, id):
rin.logger.info('[INFO]lolicon downloading pixiv image', url)
headers = {
'referer': f'https://www.pixiv.net/member_illust.php?mode=medium&illust_id={id}'
}
try:
async with aiohttp.ClientSession(headers=headers) as session:
async with session.get(url, proxy=get_config('lolicon', 'pixiv_proxy')) as resp:
data = await resp.read()
# 转jpg
byte_stream = io.BytesIO(data)
roiImg = Image.open(byte_stream)
if roiImg.mode != 'RGB':
roiImg = roiImg.convert('RGB')
imgByteArr = io.BytesIO()
roiImg.save(imgByteArr, format='JPEG')
return imgByteArr.getvalue()
except Exception as e:
rin.logger.error(
'[ERROR]download image {} failed,error {}'.format(url, e))
# traceback.print_exc()
return None
def save_image(image):
path = f'setu_mix/lolicon/{image["id"]}'
if image['r18']:
path = f'setu_mix/lolicon_r18/{image["id"]}'
res = R.img(path + '.jpg')
with open(res.path, 'wb') as f:
f.write(image['data'])
res = R.img(path + '.json')
info = {
'title': image['title'],
'author': image['author'],
'url': image['url'],
'tags': image['tags'],
}
with open(res.path, 'w', encoding='utf8') as f:
json.dump(info, f, ensure_ascii=False, indent=2)
async def get_setu_online(num, r18=0, keyword=None):
image_list = await query_setu(r18=r18, keyword=keyword)
if get_api_num() != 1:
while image_list == None:
image_list = await query_setu(r18=r18, keyword=keyword)
else:
if image_list == None:
return
valid_list = []
while len(image_list) > 0:
image = image_list.pop(random.randint(0, len(image_list) - 1))
# 检查本地是否存在该图片
path = f'setu_mix/lolicon/{image["id"]}.jpg'
if image['r18']:
path = f'setu_mix/lolicon_r18/{image["id"]}.jpg'
res = R.img(path)
if os.path.exists(res.path):
image['data'] = res.path
image['native'] = True
else:
if get_config('lolicon', 'pixiv_direct'):
image['data'] = await download_pixiv_image(image['url'], image['id'])
else:
image['data'] = await download_image(image['url'])
image['native'] = False
if image['data'] and get_config('lolicon', 'mode') == 2:
save_image(image)
image['data'] = res.path
if image['data']:
valid_list.append(image)
if len(valid_list) >= num:
break
return valid_list
def get_setu_native(r18=0, uid=0):
image = generate_image_struct()
path = f'setu_mix/lolicon'
if r18 == 1:
path = f'setu_mix/lolicon_r18'
elif r18 == 2:
if random.randint(1, 100) > 50:
path = f'setu_mix/lolicon_r18'
res = R.img(path)
if not os.path.exists(res.path):
return image
if uid == 0:
fn = random.choice(os.listdir(res.path))
if fn.split('.')[0].isdigit():
uid = int(fn.split('.')[0])
if not uid:
return image
image['id'] = int(uid)
image['native'] = True
path += f'/{uid}'
res = R.img(path)
try:
image['data'] = res.path + '.jpg'
with open(res.path + '.json', encoding='utf8') as f:
d = json.load(f)
if 'title' in d:
image['title'] = d['title']
if 'author' in d:
image['author'] = d['author']
if 'url' in d:
image['url'] = d['url']
except:
pass
return image
def search_setu_native(keyword, r18, num):
result_list = []
if r18 == 0 or r18 == 2:
for k, v in native_info.items():
if v.find(keyword) >= 0:
result_list.append({
'uid': k,
'r18': 0,
})
if r18 == 1 or r18 == 2:
for k, v in native_r18_info.items():
if v.find(keyword) >= 0:
result_list.append({
'uid': k,
'r18': 1,
})
if len(result_list) > num:
result_list = random.sample(result_list, num)
image_list = []
for result in result_list:
image = get_setu_native(result['r18'], result['uid'])
if image['data']:
image_list.append(image)
return image_list
# r18: 0 正常 1 r18 2 混合
async def lolicon_get_setu(r18):
if get_config('lolicon', 'mode') >= 2:
return get_setu_native(r18)
elif get_config('lolicon', 'mode') == 1:
image_list = await get_setu_online(1, r18)
if len(image_list) > 0:
return image_list[0]
else:
return None
else:
return None
# r18: 0 正常 1 r18 2 混合
async def lolicon_search_setu(keyword, r18, num):
if get_config('lolicon', 'mode') == 1 or get_config('lolicon', 'mode') == 2:
return await get_setu_online(num, r18, keyword)
elif get_config('lolicon', 'mode') == 3: # 离线模式
return search_setu_native(keyword, r18, num)
else:
return None
async def lolicon_fetch_process():
if get_config('lolicon', 'mode') == 2:
rin.logger.info('[INFO]fetch lolicon setu')
await get_setu_online(10, 0)
if not get_config('lolicon', 'r18') or not get_config('default', 'lolicon_r18'):
return
rin.logger.info('[INFO]fetch lolicon r18 setu')
await get_setu_online(10, 1)
def lolicon_init():
global native_info
global native_r18_info
if get_config('lolicon', 'mode') == 3:
native_info = load_native_info('lolicon')
native_r18_info = load_native_info('lolicon_r18')
| 25.392523
| 100
| 0.655502
| 0
| 0
| 0
| 0
| 0
| 0
| 4,956
| 0.600946
| 1,906
| 0.231114
|
a7fa41d77e47cb4e42dcb175ead24d162418cceb
| 363
|
py
|
Python
|
Python Backend/diarization/build/lib/s4d/__init__.py
|
AdityaK1211/Final_Year_Project_SCET
|
1a6092e1345dad473375ada787fb5cb00ee7515f
|
[
"MIT"
] | 1
|
2022-02-15T02:49:22.000Z
|
2022-02-15T02:49:22.000Z
|
Python Backend/diarization/build/lib/s4d/__init__.py
|
AdityaK1211/Final_Year_Project_SCET
|
1a6092e1345dad473375ada787fb5cb00ee7515f
|
[
"MIT"
] | null | null | null |
Python Backend/diarization/build/lib/s4d/__init__.py
|
AdityaK1211/Final_Year_Project_SCET
|
1a6092e1345dad473375ada787fb5cb00ee7515f
|
[
"MIT"
] | 2
|
2021-07-11T12:42:43.000Z
|
2022-02-15T02:49:24.000Z
|
__author__ = 'meignier'
import s4d.clustering.hac_bic
import s4d.clustering.hac_clr
import s4d.clustering.hac_iv
import s4d.clustering.hac_utils
import s4d.model_iv
from s4d.clustering.cc_iv import ConnectedComponent
from s4d.diar import Diar
from s4d.segmentation import sanity_check, bic_linear, div_gauss
from s4d.viterbi import Viterbi
__version__ = "0.0.1"
| 27.923077
| 64
| 0.837466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 17
| 0.046832
|
a7faceab673a31a756534245b8aaabc503d661d6
| 1,217
|
py
|
Python
|
docs/demos/theme_explorer/list_group.py
|
sthagen/facultyai-dash-bootstrap-components
|
2dd5eaf1c1494b2077bcee82eb7968ec2e23af46
|
[
"Apache-2.0"
] | 50
|
2018-09-23T08:57:28.000Z
|
2019-02-02T19:59:35.000Z
|
docs/demos/theme_explorer/list_group.py
|
sthagen/dash-bootstrap-components
|
d79ad7f8fdf4c26165038e6989e24f2ac17663b1
|
[
"Apache-2.0"
] | 99
|
2018-09-21T11:06:29.000Z
|
2019-02-04T09:04:07.000Z
|
docs/demos/theme_explorer/list_group.py
|
sthagen/dash-bootstrap-components
|
d79ad7f8fdf4c26165038e6989e24f2ac17663b1
|
[
"Apache-2.0"
] | 3
|
2018-09-25T02:16:24.000Z
|
2018-12-22T20:56:31.000Z
|
import dash_bootstrap_components as dbc
from dash import html
from .util import make_subheading
list_group = html.Div(
[
make_subheading("ListGroup", "list_group"),
dbc.ListGroup(
[
dbc.ListGroupItem("No color applied"),
dbc.ListGroupItem("The primary item", color="primary"),
dbc.ListGroupItem("A secondary item", color="secondary"),
dbc.ListGroupItem("A successful item", color="success"),
dbc.ListGroupItem("A warning item", color="warning"),
dbc.ListGroupItem("A dangerous item", color="danger"),
dbc.ListGroupItem("An informative item", color="info"),
dbc.ListGroupItem("A light item", color="light"),
dbc.ListGroupItem("A dark item", color="dark"),
dbc.ListGroupItem("An action item", action=True),
dbc.ListGroupItem("An active item", active=True),
dbc.ListGroupItem(
[
html.H5("Item 4 heading"),
html.P("Item 4 text"),
]
),
]
),
],
className="mb-4",
)
| 36.878788
| 73
| 0.520953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 310
| 0.254725
|
a7fd2734a072f2bcad84ee84ad66f361e1da1371
| 865
|
py
|
Python
|
products/migrations/0004_auto_20210715_2006.py
|
keeks-mtl/go-tennis
|
af3f325a9cfb2faba4d935824492f4aea6d10309
|
[
"W3C",
"PostgreSQL"
] | null | null | null |
products/migrations/0004_auto_20210715_2006.py
|
keeks-mtl/go-tennis
|
af3f325a9cfb2faba4d935824492f4aea6d10309
|
[
"W3C",
"PostgreSQL"
] | null | null | null |
products/migrations/0004_auto_20210715_2006.py
|
keeks-mtl/go-tennis
|
af3f325a9cfb2faba4d935824492f4aea6d10309
|
[
"W3C",
"PostgreSQL"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-07-15 20:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0003_auto_20210709_0117'),
]
operations = [
migrations.AlterField(
model_name='category',
name='friendly_name',
field=models.CharField(blank=True, max_length=50),
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='product',
name='sku',
field=models.CharField(max_length=50),
),
]
| 25.441176
| 62
| 0.557225
| 772
| 0.892486
| 0
| 0
| 0
| 0
| 0
| 0
| 152
| 0.175723
|
a7fd5742db5fc95146713081d7d7a20b702afa5b
| 5,967
|
py
|
Python
|
task_landscape.py
|
aspnetcs/RecurJac-and-CROWN
|
5b3fcaaa7a275483e26164506f66a618538ee881
|
[
"BSD-2-Clause"
] | 54
|
2020-09-09T12:43:43.000Z
|
2022-03-17T17:31:19.000Z
|
task_landscape.py
|
huanzhang12/RecurJac-Jacobian-Bounds
|
163c84e7a8d345d18c718cf6b0bc61baa8a1a78a
|
[
"BSD-2-Clause"
] | 8
|
2020-09-23T05:11:31.000Z
|
2022-03-12T00:47:29.000Z
|
task_landscape.py
|
huanzhang12/RecurJac
|
163c84e7a8d345d18c718cf6b0bc61baa8a1a78a
|
[
"BSD-2-Clause"
] | 5
|
2020-09-10T07:19:43.000Z
|
2021-07-24T06:28:04.000Z
|
## task_landscape.py
##
## Run RecurJac/FastLip bounds for exploring local optimization landscape
##
## Copyright (C) 2018, Huan Zhang <huan@huan-zhang.com> and contributors
##
## This program is licenced under the BSD 2-Clause License,
## contained in the LICENCE file in this directory.
## See CREDITS for a list of contributors.
##
import time
import numpy as np
from collections import defaultdict
from utils import binary_search
from bound_base import compute_bounds
from bound_spectral import spectral_bound
class task(object):
def __init__(self, **kwargs):
# add all arguments
for k, v in kwargs.items():
if not k.startswith("__"):
exec('self.{} = kwargs["{}"]'.format(k, k))
assert self.args.jacbndalg == "recurjac" or self.args.jacbndalg == "fastlip"
assert self.args.layerbndalg == "crown-general" or self.args.layerbndalg == "crown-adaptive" or self.args.layerbndalg == "fastlin"
assert self.targeted == True
self.n_points = 0
self.sum_max_eps = 0.0
self.sum_lipschitz_max = 0.0
print("starting stationary point discovery on {} images!".format(len(self.inputs)))
def warmup(self, **kwargs):
args = self.args
compute_bounds(self.weights, self.biases, 0, -1, self.inputs[0], self.preds[0], self.numlayer,args.norm, 0.01, args.layerbndalg, args.jacbndalg, untargeted = not self.targeted, use_quad = args.quad, activation = self.activation, activation_param = self.activation_param, lipsdir = args.lipsdir, lipsshift = args.lipsshift)
def _update_stats(self, current, lipschitz_const, n_uns):
self.min_lipschitz = min(self.min_lipschitz, lipschitz_const)
self.max_lipschitz = max(self.max_lipschitz, lipschitz_const)
self.all_lipschitz[current] = lipschitz_const
self.all_n_uns[current] = n_uns
def run_single(self, i):
args = self.args
weights = self.weights
biases = self.biases
inputs = self.inputs
preds = self.preds
eps = args.eps
self.n_points += 1
predict_label = np.argmax(self.true_labels[i])
target_label = -1
start = time.time()
self.all_lipschitz = defaultdict(float)
self.all_n_uns = defaultdict(float)
self.min_lipschitz = np.inf
self.max_lipschitz = 0.0
# binary search to find the largest eps that has at least one non-zero element
def binary_search_cond(current):
_, _, lipschitz_const, n_uns = compute_bounds(weights, biases, predict_label, target_label, inputs[i], preds[i], self.numlayer, args.norm, current, args.layerbndalg, args.jacbndalg, untargeted = not self.targeted, use_quad = args.quad, activation = self.activation, activation_param = self.activation_param, lipsdir = args.lipsdir, lipsshift = args.lipsshift)
return n_uns < weights[0].shape[1], n_uns
if args.norm == 1:
upper_limit = 100.0
if args.norm == 2:
upper_limit = 10.0
if args.norm == np.inf:
upper_limit = 1.0
max_eps = binary_search(binary_search_cond, eps, upper_limit = upper_limit)
self.sum_max_eps += max_eps
# then do a linear scan with args.lipstep steps
for current in np.linspace(0.0, max_eps, args.lipsteps + 1):
_, _, lipschitz_const, n_uns = compute_bounds(weights, biases, predict_label, target_label, inputs[i], preds[i], self.numlayer, args.norm, current, args.layerbndalg, args.jacbndalg, untargeted = not self.targeted, use_quad = args.quad, activation = self.activation, activation_param = self.activation_param, lipsdir = args.lipsdir, lipsshift = args.lipsshift)
self._update_stats(current, lipschitz_const[0], n_uns)
s = []
for current in np.linspace(0.0, max_eps, args.lipsteps + 1):
s.append("lipschitz[{:.5f}] = {:.5f}".format(current, self.all_lipschitz[current]))
print("[L1] " + ", ".join(s))
s = []
for current in np.linspace(0.0, max_eps, args.lipsteps + 1):
s.append("uncertain[{:.5f}] = {:.5f}".format(current, self.all_n_uns[current]))
print("[L1] " + ", ".join(s))
self.sum_lipschitz_max += self.max_lipschitz
# get the gradient at this data point
gradients = self.model.get_gradient(inputs[i:i+1])
obj_grad = gradients[predict_label]
q = int(1.0/ (1.0 - 1.0/args.norm)) if args.norm != 1 else np.inf
grad_norm = np.linalg.norm(obj_grad.flatten(), ord = q)
predictions = self.model.model.predict(inputs[i:i+1])
margin = predictions[0][predict_label]
print("[L1] model = {}, seq = {}, id = {}, true_class = {}, target_class = {}, info = {}, lipschitz_min = {:.5f}, lipschitz_max = {:.5f}, max_eps = {}, margin = {:.4f}, grad_norm = {:.4f}, time = {:.4f}".format(self.modelfile, i, self.true_ids[i], predict_label, target_label, self.img_info[i], self.min_lipschitz, self.max_lipschitz, max_eps, margin, grad_norm, time.time() - start))
# check results
assert(np.allclose(grad_norm, self.min_lipschitz))
def summary(self, **kwargs):
# compute and report global Lipschitz constant
if self.force_label:
_, lipschitz_const = spectral_bound(self.weights, self.biases, self.force_label, -1, self.inputs[0], self.preds[0], self.numlayer, self.activation, self.args.norm, not self.targeted)
print("[L0] model = {}, numimage = {}, avg_max_eps = {:.5f}, avg_lipschitz_max = {:.4f}, opnorm_global_lipschitz = {:.4f}".format(self.modelfile, self.n_points, self.sum_max_eps / self.n_points, self.sum_lipschitz_max / self.n_points, lipschitz_const))
else:
print("[L0] model = {}, numimage = {}, avg_max_eps = {:.5f}, avg_lipschitz_max = {:.4f}".format(self.modelfile, self.n_points, self.sum_max_eps / self.n_points, self.sum_lipschitz_max / self.n_points))
| 54.743119
| 392
| 0.657282
| 5,447
| 0.912854
| 0
| 0
| 0
| 0
| 0
| 0
| 1,180
| 0.197754
|
a7fe6e62a19f61aac68612f736fdb8db8ad2bc69
| 5,956
|
py
|
Python
|
tests/test_prepareDeploymentContainerDefinitionsStep.py
|
AdventielFr/ecs-crd-cli
|
f1421055be0b2b25e5334aef277d27bc30f161e5
|
[
"MIT"
] | 1
|
2020-07-22T15:18:51.000Z
|
2020-07-22T15:18:51.000Z
|
tests/test_prepareDeploymentContainerDefinitionsStep.py
|
AdventielFr/ecs-crd-cli
|
f1421055be0b2b25e5334aef277d27bc30f161e5
|
[
"MIT"
] | 4
|
2020-03-24T17:30:40.000Z
|
2021-06-02T00:23:48.000Z
|
tests/test_prepareDeploymentContainerDefinitionsStep.py
|
AdventielFr/ecs-crd-cli
|
f1421055be0b2b25e5334aef277d27bc30f161e5
|
[
"MIT"
] | 2
|
2019-09-24T15:21:56.000Z
|
2021-07-05T07:25:20.000Z
|
import pytest
from unittest.mock import MagicMock
import logging
from ecs_crd.canaryReleaseInfos import CanaryReleaseInfos
from ecs_crd.prepareDeploymentContainerDefinitionsStep import PrepareDeploymentContainerDefinitionsStep
from ecs_crd.canaryReleaseInfos import ScaleInfos
logger = logging.Logger('mock')
infos = CanaryReleaseInfos(action='test')
step = PrepareDeploymentContainerDefinitionsStep(infos, logger)
def test_process_container_name_valid():
# default
source = {}
target = {}
step._process_container_name(source, target)
target['Name'] == 'default'
# with name
source = {}
source['name']='test'
target = {}
step._process_container_name(source, target)
target['Name'] == source['name']
def test_process_container_name_valid():
# default
source = {}
target = {}
step.infos.account_id='123456789'
step.infos.region='eu-west-3'
step.infos.service_name='service'
step.infos.service_version='latest'
step._process_container_image(source, target)
assert target['Image'] == '123456789.dkr.ecr.eu-west-3.amazonaws.com/service:latest'
# with name
source = {}
source['image']='test'
target = {}
step._process_container_image(source, target)
assert target['Image'] == source['image']
def test_process_container_cpu_invalid():
source = {}
source['cpu'] = 'a'
target = {}
with pytest.raises(ValueError):
step._process_container_cpu(source, target)
def test_process_container_cpu_valid():
source = {}
target = {}
# default value
step._process_container_cpu(source, target)
assert target['Cpu'] == 128
# set value
source['cpu'] = 256
target = {}
step._process_container_cpu(source, target)
assert target['Cpu'] == source['cpu']
def test_process_container_entry_point_valid():
source = {}
source['entry_point']=[]
source['entry_point'].append('a')
source['entry_point'].append('b')
target = {}
step._process_container_entry_point(source, target)
assert target['EntryPoint'] == 'a,b'
def test_process_container_entry_point_invalid():
source = {}
source['entry_point']='a'
target = {}
with pytest.raises(ValueError):
step._process_container_entry_point(source, target)
def test_process_container_command_valid():
source = {}
source['command']=[]
source['command'].append('a')
source['command'].append('b')
target = {}
step._process_container_command(source, target)
assert len(target['Command'])==2
assert target['Command'][0] == 'a'
assert target['Command'][1] == 'b'
def test_process_container_command_invalid():
source = {}
source['command']='b'
target = {}
with pytest.raises(ValueError):
step._process_container_command(source, target)
def test_process_container_dns_search_domains_valid():
source = {}
source['dns_search_domains']=[]
source['dns_search_domains'].append('a')
source['dns_search_domains'].append('b')
target = {}
step._process_container_dns_search_domains(source, target)
assert len(target['DnsSearchDomains'])==2
assert target['DnsSearchDomains'][0] == 'a'
assert target['DnsSearchDomains'][1] == 'b'
def _process_container_dns_search_domains_invalid():
source = {}
source['dns_search_domains']='b'
target = {}
with pytest.raises(ValueError):
step._process_container_dns_search_domains(source, target)
def test_process_container_disable_networking_valid():
source = {}
source['disable_networking'] = True
target = {}
step._process_container_disable_networking(source, target)
assert target['DisableNetworking'] == source['disable_networking']
source = {}
source['disable_networking'] = False
target = {}
step._process_container_disable_networking(source, target)
assert target['DisableNetworking'] == source['disable_networking']
def _process_container_disable_networking_invalid():
source = {}
source['disable_networking']='b'
target = {}
with pytest.raises(ValueError):
step._process_container_disable_networking(source, target)
def test_process_container_dns_servers_valid():
source = {}
source['dns_servers']=[]
source['dns_servers'].append('a')
source['dns_servers'].append('b')
target = {}
step._process_container_dns_servers(source, target)
assert len(target['DnsServers'])==2
assert target['DnsServers'][0] == 'a'
assert target['DnsServers'][1] == 'b'
def _process_container_dns_servers_invalid():
source = {}
source['dns_servers']='b'
target = {}
with pytest.raises(ValueError):
step._process_container_dns_servers(source, target)
def test_process_container_start_timeout_invalid():
source = {}
source['start_timeout'] = 'a'
target = {}
with pytest.raises(ValueError):
step._process_container_start_timeout(source, target)
def test_process_container_start_timeout_valid():
source = {}
source['start_timeout']=60
target = {}
step._process_container_start_timeout(source, target)
assert target['StartTimeout'] == source['start_timeout']
def test_process_container_stop_timeout_invalid():
source = {}
source['stop_timeout'] = 'a'
target = {}
with pytest.raises(ValueError):
step._process_container_stop_timeout(source, target)
def test_process_container_stop_timeout_valid():
source = {}
source['stop_timeout']=60
target = {}
step._process_container_stop_timeout(source, target)
assert target['StopTimeout'] == source['stop_timeout']
def test_process_container_hostname_valid():
source = {}
source['hostname']='a'
target = {}
step._process_container_hostname(source, target)
assert target['Hostname'] == source['hostname']
| 30.54359
| 103
| 0.684184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 978
| 0.164204
|
a7fef4c124b33416bb39eb6677220ad02a959e38
| 4,871
|
py
|
Python
|
Sim/A_star/A_star.py
|
Chains99/Battlefield-Simulator
|
9dc209c34aac5160232e47d6799bbe1b1bfcebad
|
[
"MIT"
] | null | null | null |
Sim/A_star/A_star.py
|
Chains99/Battlefield-Simulator
|
9dc209c34aac5160232e47d6799bbe1b1bfcebad
|
[
"MIT"
] | null | null | null |
Sim/A_star/A_star.py
|
Chains99/Battlefield-Simulator
|
9dc209c34aac5160232e47d6799bbe1b1bfcebad
|
[
"MIT"
] | null | null | null |
from Sim.A_star.heap import node_heap,heapify_down,heapify_up,extract_min,append_node
from math import inf
from math import pow
# Recibe x, y componentes de la matriz y devuelve la distancia euclideana entre ellos
def euclidean_distance(x, y):
distance = pow(x[0] - y[0], 2) + pow(x[1] - y[1], 2)
distance = pow(distance, 0.5)
return distance
# Construimos un heap con los nodos del mapa con un valor inicial inf
def init_nodes(map):
nodes = []
for i in range(len(map)):
for j in range(len(map[0])):
nodes.append(node_heap((i, j), inf, inf))
# definidos el id del nodo como una tupla (i, j) con la fila y columna del nodo
return nodes
# Actualizamos el peso/costo de cada nodo con el valor de la heuristica
def make_hw_map(map, hs):
for i in range(len(map)):
for j in range(len(map[0])):
map[i][j] = (map[i][j] + hs((i, j)))
return map
# Genera los nodos adyacentes al nodo en la posicion pos en la matriz map
def adyacent_nodes(map, pos):
top = max(pos[0]-1, 0)
bottom = min(pos[0]+1, len(map)-1)
left = max(pos[1]-1, 0)
right = min(pos[1] + 1, len(map[0])-1)
for i in range(top, bottom+1):
for j in range(left, right+1):
yield (i, j)
# Creamos un camino directo desde s hasta d, creado con cada componente del camino
# Devuelve None si no existe un camino
def make_path(path, s, d, cols, map):
#Si no existe camino
if path[d[0] * cols + d[1]] == inf:
d = adyacent_free_square(path, d, map, cols)
if path[d[0] * cols + d[1]] == inf:
return None
direct_path = []
titem = d
while titem != s:
direct_path.append(titem)
titem = path[titem[0] * cols + titem[1]]
direct_path.append(titem)
# Devolvemos un generador con el orden invertido al de direct_path
for i in range(len(direct_path)):
yield direct_path[len(direct_path)-1-i]
def adyacent_free_square(path, d, map, cols):
ady = adyacent_nodes(map, d)
new_node = d
for node in ady:
if path[node[0] * cols + node[1]] != inf:
new_node = node
break
return new_node
"""
Hs funcion lambda : lambda x -> y
Donde y es el valor de la heuristica asociado a x
"""
"""
a_star(matriz, hs, s, d)
parametros:
matriz: matriz con los valores de consumo de movimiento de cada casilla (costo),
si una casilla no puede ser transitada toma valor inf
hs funcion lambda : lambda x -> y donde y es el valor de la heuristica asociado a x
s: nodo inicial (i,j) fila i columna j
d: nodo destino (i,j)
terrain_map: mapa del terreno
soldiers_positions_matrix: matriz booleana con las posiciones del los soldados
Algoritmo de A-star para encontrar encontrar el camino mas corto de un punto de una matriz a otro
Devuelve una lista tupla (i,j) que representa el camino optimo de s a d
"""
def a_star(map, hs, s, d, terrain_map, soldiers_positions_matrix):
visited = []
for i in range(len(map)):
visited.append([False]*len(map[0]))
nodes = init_nodes(map)
# initialize the heap with the starting node
heap = []
# s_pos: la posicion de s en el heap de nodos
s_pos = s[0]*len(map[0]) + s[1]
nodes[s_pos].value = 0
append_node(heap, nodes[s_pos])
# initialize path list
path = [inf]*len(nodes)
path = dijkstra(map, nodes, heap, visited, path, d, hs, terrain_map, soldiers_positions_matrix)
return make_path(path, s, d, len(map[0]), map)
def dijkstra(w_graph, nodes, heap, visited, path, destiny, hs, terrain_map, soldiers_positions_matrix):
while len(heap) > 0:
u = extract_min(heap)
visited[u.id[0]][u.id[1]] = True
if u.id == destiny:
return path
for item in adyacent_nodes(w_graph, u.id):
# Sea item la tupla (i, j) correspondiente a un elemento de w_graph
# si el elemento en la posicion correspondiente a (i, j) en la lista de nodos tiene valor inf
if not w_graph[item[0]][item[1]] == inf and not soldiers_positions_matrix[item[0]][item[1]] and terrain_map[item[0]][item[1]].available:
if not visited[item[0]][item[1]]:
if nodes[item[0]*len(w_graph[0]) + item[1]].value == inf:
append_node(heap, nodes[item[0]*len(w_graph[0]) + item[1]])
relax(u.id, item, w_graph, nodes, heap, path, hs)
return path
def relax(u, v, graph, nodes, heap, path, hs):
if nodes[v[0]*len(graph[0])+v[1]].value > nodes[u[0]*len(graph[0])+u[1]].value + graph[v[0]][v[1]] + hs((v[0], v[1])):
nodes[v[0]*len(graph[0])+v[1]].value = nodes[u[0]*len(graph[0])+u[1]].value + graph[v[0]][v[1]] + hs((v[0], v[1]))
path[v[0]*len(graph[0])+v[1]] = u
# Update position in heap
heapify_up(heap, nodes[v[0]*len(graph[0])+v[1]])
| 31.836601
| 148
| 0.624512
| 0
| 0
| 819
| 0.168138
| 0
| 0
| 0
| 0
| 1,574
| 0.323137
|
a7ff7c5a80c329a083ab577506e02644e8986047
| 919
|
py
|
Python
|
setup.py
|
danielcliu/youtube-transcript-channel-api
|
3102c23379ad86231374b4763716310890133553
|
[
"MIT"
] | 7
|
2020-10-21T08:55:38.000Z
|
2021-03-22T02:53:20.000Z
|
setup.py
|
danielcliu/youtube-transcript-channel-api
|
3102c23379ad86231374b4763716310890133553
|
[
"MIT"
] | 3
|
2021-06-02T12:30:15.000Z
|
2022-02-11T12:46:13.000Z
|
setup.py
|
danielcliu/youtube-transcript-channel-api
|
3102c23379ad86231374b4763716310890133553
|
[
"MIT"
] | 1
|
2021-03-25T20:03:36.000Z
|
2021-03-25T20:03:36.000Z
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="youtube-channel-transcript-api", # Replace with your own username
version="0.0.1",
author="Daniel Liu",
author_email="dcliu@ucdavis.edu",
description="A python package the utilizes the Youtube Data V3 API to get all transcripts from a given channel/playlist.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/danielcliu/youtube-channel-transcript-api",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
'requests',
'google-api-python-client',
'youtube-transcript-api',
],
)
| 32.821429
| 126
| 0.669206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 482
| 0.524483
|
c50026f8c76e0a37813e1e12579a0e280470bcd9
| 1,252
|
py
|
Python
|
51-100/67.py
|
yshshadow/Leetcode
|
5097f69bb0050d963c784d6bc0e88a7e871568ed
|
[
"MIT"
] | null | null | null |
51-100/67.py
|
yshshadow/Leetcode
|
5097f69bb0050d963c784d6bc0e88a7e871568ed
|
[
"MIT"
] | null | null | null |
51-100/67.py
|
yshshadow/Leetcode
|
5097f69bb0050d963c784d6bc0e88a7e871568ed
|
[
"MIT"
] | null | null | null |
# Given two binary strings, return their sum (also a binary string).
#
# For example,
# a = "11"
# b = "1"
# Return "100".
class Solution:
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
la = len(a)
lb = len(b)
if la >= lb:
length = la
else:
length = lb
digits = []
addition = 0
for x in range(1, length + 1):
if x > la:
an = 0
else:
an = int(a[-x])
if x > lb:
bn = 0
else:
bn = int(b[-x])
res = an + bn + addition
if res == 3:
digits.append("1")
addition = 1
elif res == 2:
digits.append("0")
addition = 1
elif res == 1:
digits.append("1")
addition = 0
elif res == 0:
digits.append("0")
addition = 0
if addition != 0:
digits.append(str(addition))
digits.reverse()
return "".join(digits)
s = Solution()
print(s.addBinary("0", "0"))
# print(s.addBinary("11", "1"))
| 23.185185
| 68
| 0.384185
| 1,050
| 0.838658
| 0
| 0
| 0
| 0
| 0
| 0
| 245
| 0.195687
|
c5013b78c012ef81d3fc817c0e0956eb0e420741
| 2,761
|
py
|
Python
|
src/mds/query.py
|
phs-rcg/metadata-service
|
227ab79f1d8eadc3265cdb0c0bfcfc54db1da3b8
|
[
"Apache-2.0"
] | 10
|
2020-04-28T10:20:02.000Z
|
2021-11-01T22:20:10.000Z
|
src/mds/query.py
|
phs-rcg/metadata-service
|
227ab79f1d8eadc3265cdb0c0bfcfc54db1da3b8
|
[
"Apache-2.0"
] | 23
|
2020-02-04T22:36:02.000Z
|
2022-03-24T18:26:49.000Z
|
src/mds/query.py
|
phs-rcg/metadata-service
|
227ab79f1d8eadc3265cdb0c0bfcfc54db1da3b8
|
[
"Apache-2.0"
] | 6
|
2020-01-14T20:44:50.000Z
|
2022-02-15T22:17:14.000Z
|
from fastapi import HTTPException, Query, APIRouter
from starlette.requests import Request
from starlette.status import HTTP_404_NOT_FOUND
from .models import db, Metadata
mod = APIRouter()
@mod.get("/metadata")
async def search_metadata(
request: Request,
data: bool = Query(
False,
description="Switch to returning a list of GUIDs (false), "
"or GUIDs mapping to their metadata (true).",
),
limit: int = Query(
10, description="Maximum number of records returned. (max: 2000)"
),
offset: int = Query(0, description="Return results at this given offset."),
):
"""Search the metadata.
Without filters, this will return all data. Add filters as query strings like this:
GET /metadata?a=1&b=2
This will match all records that have metadata containing all of:
{"a": 1, "b": 2}
The values are always treated as strings for filtering. Nesting is supported:
GET /metadata?a.b.c=3
Matching records containing:
{"a": {"b": {"c": 3}}}
Providing the same key with more than one value filters records whose value of the
given key matches any of the given values. But values of different keys must all
match. For example:
GET /metadata?a.b.c=3&a.b.c=33&a.b.d=4
Matches these:
{"a": {"b": {"c": 3, "d": 4}}}
{"a": {"b": {"c": 33, "d": 4}}}
{"a": {"b": {"c": "3", "d": 4, "e": 5}}}
But won't match these:
{"a": {"b": {"c": 3}}}
{"a": {"b": {"c": 3, "d": 5}}}
{"a": {"b": {"d": 5}}}
{"a": {"b": {"c": "333", "d": 4}}}
"""
limit = min(limit, 2000)
queries = {}
for key, value in request.query_params.multi_items():
if key not in {"data", "limit", "offset"}:
queries.setdefault(key, []).append(value)
def add_filter(query):
for path, values in queries.items():
query = query.where(
db.or_(Metadata.data[list(path.split("."))].astext == v for v in values)
)
return query.offset(offset).limit(limit)
if data:
return {
metadata.guid: metadata.data
for metadata in await add_filter(Metadata.query).gino.all()
}
else:
return [
row[0]
for row in await add_filter(db.select([Metadata.guid]))
.gino.return_model(False)
.all()
]
@mod.get("/metadata/{guid:path}")
async def get_metadata(guid):
"""Get the metadata of the GUID."""
metadata = await Metadata.get(guid)
if metadata:
return metadata.data
else:
raise HTTPException(HTTP_404_NOT_FOUND, f"Not found: {guid}")
def init_app(app):
app.include_router(mod, tags=["Query"])
| 27.61
| 88
| 0.574067
| 0
| 0
| 0
| 0
| 2,498
| 0.904745
| 2,442
| 0.884462
| 1,294
| 0.468671
|
c5013dd528c7dae240161ed939f71c7b9ea2e1ef
| 695
|
py
|
Python
|
src/RTmission/storeinfo/forms.py
|
shehabkotb/RTmission_backend
|
90afb06e7d290e934e3e5f77e789b9c5227805f7
|
[
"BSD-3-Clause"
] | null | null | null |
src/RTmission/storeinfo/forms.py
|
shehabkotb/RTmission_backend
|
90afb06e7d290e934e3e5f77e789b9c5227805f7
|
[
"BSD-3-Clause"
] | null | null | null |
src/RTmission/storeinfo/forms.py
|
shehabkotb/RTmission_backend
|
90afb06e7d290e934e3e5f77e789b9c5227805f7
|
[
"BSD-3-Clause"
] | null | null | null |
from django import forms
from django.core import validators
from .models import UserInfo
class UserInfoForm(forms.ModelForm):
class Meta:
model = UserInfo
fields = [
'name',
'email',
'phone',
'age',
'gender',
'comment'
]
CHOICES = [
('MALE' , 'Male'),
('FEMALE' , 'Female')
]
gender = forms.ChoiceField(choices=CHOICES, widget=forms.RadioSelect, required=False)
def clean_age(self):
age = self.cleaned_data.get("age")
if age < 1 or age > 200:
raise forms.ValidationError("enter a valid age from 1 to 200")
return age
| 22.419355
| 89
| 0.539568
| 602
| 0.866187
| 0
| 0
| 0
| 0
| 0
| 0
| 108
| 0.155396
|
c50321a74b64b29dc6c4a647f031c7b97620662c
| 2,668
|
py
|
Python
|
python/54.spiral-matrix.py
|
kadaliao/leetcode
|
32170b1c2ba24b3765d22f9379534651080bab26
|
[
"MIT"
] | null | null | null |
python/54.spiral-matrix.py
|
kadaliao/leetcode
|
32170b1c2ba24b3765d22f9379534651080bab26
|
[
"MIT"
] | null | null | null |
python/54.spiral-matrix.py
|
kadaliao/leetcode
|
32170b1c2ba24b3765d22f9379534651080bab26
|
[
"MIT"
] | null | null | null |
# @lc app=leetcode.cn id=54 lang=python3
#
# [54] 螺旋矩阵
#
# https://leetcode-cn.com/problems/spiral-matrix/description/
#
# algorithms
# Medium (43.22%)
# Total Accepted: 129.4K
# Total Submissions: 284.1K
# Testcase Example: '[[1,2,3],[4,5,6],[7,8,9]]'
#
# 给你一个 m 行 n 列的矩阵 matrix ,请按照 顺时针螺旋顺序 ,返回矩阵中的所有元素。
#
#
# 示例 1:
#
#
# 输入:matrix = [[1,2,3],[4,5,6],[7,8,9]]
# 输出:[1,2,3,6,9,8,7,4,5]
#
#
# 示例 2:
#
#
# 输入:matrix = [[1,2,3,4],[5,6,7,8],[9,10,11,12]]
# 输出:[1,2,3,4,8,12,11,10,9,5,6,7]
#
#
#
#
# 提示:
#
#
# m == matrix.length
# n == matrix[i].length
# 1
# -100
#
#
#
from typing import List
class Solution:
def spiralOrder0(self, matrix: List[List[int]]) -> List[int]:
if not matrix or not matrix[0]:
return []
rows_size, cols_size = len(matrix), len(matrix[0])
total = rows_size * cols_size
count = 0
row, col = 0, 0
# right, down, left, up
directions = ((0, 1), (1, 0), (0, -1), (-1, 0))
index = 0
visited_matrix = [[False] * cols_size for _ in range(rows_size)]
ret = []
while count < total:
# print(f"{ row= }, { col= }, { index= }")
ret.append(matrix[row][col])
count += 1
visited_matrix[row][col] = True
next_row, next_col = row + directions[index][0], col + directions[index][1]
if (
0 <= next_row < rows_size
and 0 <= next_col < cols_size
and not visited_matrix[next_row][next_col]
):
row, col = next_row, next_col
else:
index = (index + 1) % 4
row, col = row + directions[index][0], col + directions[index][1]
return ret
def spiralOrder(self, matrix: List[List[int]]) -> List[int]:
if not matrix or not matrix[0]:
return []
ret = []
l, r, t, b = 0, len(matrix[0]) - 1, 0, len(matrix) - 1
i, j = 0, 0
while l <= r and t <= b:
# right
for j in range(l, r + 1):
ret.append(matrix[t][j])
# down
for i in range(t + 1, b + 1):
ret.append(matrix[i][r])
if l < r and t < b:
# left
for j in range(r - 1, l, -1):
ret.append(matrix[b][j])
# up
for i in range(b, t, -1):
ret.append(matrix[i][l])
l, r, t, b = l + 1, r - 1, t + 1, b - 1
return ret
# m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
# ret = Solution().spiralOrder(m)
# print(ret)
# ret = Solution().spiralOrder0(m)
# print(ret)
| 24.477064
| 87
| 0.467391
| 1,933
| 0.694574
| 0
| 0
| 0
| 0
| 0
| 0
| 862
| 0.309738
|
c503e7668b1bca9c8fa3e9b2fad69b66aea6dd54
| 660
|
py
|
Python
|
tests/test_path.py
|
Infinidat/infi.gevent-utils
|
7aef923fb19c2ea7abfe9f8341d2dfcb7b7eebdd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_path.py
|
Infinidat/infi.gevent-utils
|
7aef923fb19c2ea7abfe9f8341d2dfcb7b7eebdd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_path.py
|
Infinidat/infi.gevent-utils
|
7aef923fb19c2ea7abfe9f8341d2dfcb7b7eebdd
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from infi.gevent_utils.os import path
import sys
import os
sys.path.append(os.path.dirname(__file__))
from utils import GreenletCalledValidatorTestCase
class PathTestCase(GreenletCalledValidatorTestCase):
def test_exists(self):
self.switch_validator.assert_called(0)
self.assertFalse(path.exists("/this_path_probably_doesnt_exist_or_else_the_test_will_fail"))
self.switch_validator.assert_called(1)
def test_basename(self):
self.switch_validator.assert_called(0)
self.assertEqual("a.text", path.basename("/a/b/c/a.text"))
self.switch_validator.assert_called(0)
| 33
| 100
| 0.771212
| 465
| 0.704545
| 0
| 0
| 0
| 0
| 0
| 0
| 84
| 0.127273
|
c5041849eb6e20166cf188e490e80a877301469d
| 2,951
|
py
|
Python
|
download-from-web/govori.py
|
miroslavradojevic/python-snippets
|
753e1c15dc077d3bcf5de4fd5d3a675daf0da27c
|
[
"MIT"
] | null | null | null |
download-from-web/govori.py
|
miroslavradojevic/python-snippets
|
753e1c15dc077d3bcf5de4fd5d3a675daf0da27c
|
[
"MIT"
] | null | null | null |
download-from-web/govori.py
|
miroslavradojevic/python-snippets
|
753e1c15dc077d3bcf5de4fd5d3a675daf0da27c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Download .mp3 podcast files of Radio Belgrade show Govori da bih te video (Speak so that I can see you)
# grab all mp3s and save them with parsed name and date to the output folder
import requests
import os
import time
import xml.dom.minidom
from urllib.parse import urlparse
url = "https://www.rts.rs/page/radio/sr/podcast/5433/govori-da-bih-te-video/audio.html"
# url results with xml that is further parsed
timestamp = time.strftime("%Y%m%d-%H%M%S")
out_dir = os.path.join("govori_" + timestamp)
doc_path = "govori_" + timestamp + ".xml"
if not os.path.exists(out_dir):
os.makedirs(out_dir)
try:
req = requests.get(url)
req.raise_for_status()
doc = xml.dom.minidom.parseString(req.text) # TODO check if it is valid XML
items = doc.getElementsByTagName("item")
print("found ", len(items), " items")
for item in items:
# titles = item.getElementsByTagName("title")
# if len(titles) > 0:
# print(titles[0].firstChild.data)
links = item.getElementsByTagName("link")
if len(links) > 0:
print(links[0].firstChild.data) # read element data value
# get only filename of the .html https://bit.ly/2ZnqwK7
a = urlparse(links[0].firstChild.data)
out_fname_pname = os.path.basename(a.path).replace('.html', '')
else:
out_fname_pname = "NA"
enclosures = item.getElementsByTagName("enclosure")
if len(enclosures) > 0:
url_value = enclosures[0].attributes["url"].value # read attribute value
print(url_value)
if url_value.endswith('.mp3'):
url_elements = urlparse(url_value).path.split('/')
if len(url_elements) >= 5:
out_fname_date = ''.join(url_elements[-5:-2]) # https://bit.ly/3e6mXMk
else:
out_fname_date = "NA"
out_file = out_fname_date + "_" + out_fname_pname + ".mp3"
print("saved to " + os.path.join(out_dir, out_file))
# save mp3 file from url_value to out_file
# https://dzone.com/articles/simple-examples-of-downloading-files-using-python
print("saving... ", end='')
try:
req = requests.get(url_value)
req.raise_for_status()
open(os.path.join(out_dir, out_file), 'wb').write(req.content)
print("saved to " + os.path.join(out_dir, out_file))
except requests.exceptions.HTTPError as err:
print(err)
# raise SystemExit(err)
print("")
# save rss xml
with open(os.path.join(out_dir, doc_path), "w", encoding="utf-8") as f:
f.write(doc.toprettyxml())
print(os.path.join(out_dir, doc_path))
except requests.exceptions.HTTPError as err:
print(err)
# raise SystemExit(err)
| 36.432099
| 105
| 0.597763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 941
| 0.318875
|
c506aceeb7ea06c9672cd06b35d80f96cd51d00c
| 830
|
py
|
Python
|
setup.py
|
uhlerlab/conditional_independence
|
aa4b5117b6f24bf39433d427d490312864e9bd69
|
[
"BSD-3-Clause"
] | 4
|
2021-01-29T20:27:31.000Z
|
2022-02-01T11:55:33.000Z
|
setup.py
|
uhlerlab/conditional_independence
|
aa4b5117b6f24bf39433d427d490312864e9bd69
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
uhlerlab/conditional_independence
|
aa4b5117b6f24bf39433d427d490312864e9bd69
|
[
"BSD-3-Clause"
] | 1
|
2021-09-12T13:41:21.000Z
|
2021-09-12T13:41:21.000Z
|
import setuptools
setuptools.setup(
name='conditional_independence',
version='0.1a.4',
description='Parametric and non-parametric conditional independence tests.',
long_description='',
author='Chandler Squires',
author_email='chandlersquires18@gmail.com',
packages=setuptools.find_packages(exclude=['tests']),
python_requires='>3.5.0',
zip_safe=False,
classifiers=[
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
install_requires=[
'scipy',
'dataclasses',
'numpy',
# 'scikit_sparse',
'numexpr',
'scikit_learn',
'typing',
'pygam',
'tqdm',
# 'numba',
'ipdb',
]
)
| 25.151515
| 80
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 407
| 0.490361
|
c509490417a8c93598f13380d18986bf96b33fd7
| 200
|
py
|
Python
|
feedbacks/urls.py
|
mpyatishev/djfeedback
|
fc1ebf0646d4449371ed80560db7cbb3f7996156
|
[
"MIT"
] | null | null | null |
feedbacks/urls.py
|
mpyatishev/djfeedback
|
fc1ebf0646d4449371ed80560db7cbb3f7996156
|
[
"MIT"
] | null | null | null |
feedbacks/urls.py
|
mpyatishev/djfeedback
|
fc1ebf0646d4449371ed80560db7cbb3f7996156
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
url(r'feedback$', views.FeedbackView.as_view(), name='feedback-post')
)
| 15.384615
| 73
| 0.655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 52
| 0.26
|
c5095d645afe3699b5e0ecd4c38a1042890d0c0e
| 6,466
|
py
|
Python
|
pygeoapi/provider/mongo.py
|
paul121/pygeoapi
|
21c4d36a408f510ac83ff6c1d56932338ddb6d6e
|
[
"MIT"
] | null | null | null |
pygeoapi/provider/mongo.py
|
paul121/pygeoapi
|
21c4d36a408f510ac83ff6c1d56932338ddb6d6e
|
[
"MIT"
] | null | null | null |
pygeoapi/provider/mongo.py
|
paul121/pygeoapi
|
21c4d36a408f510ac83ff6c1d56932338ddb6d6e
|
[
"MIT"
] | 1
|
2021-10-02T14:04:20.000Z
|
2021-10-02T14:04:20.000Z
|
# =================================================================
#
# Authors: Timo Tuunanen <timo.tuunanen@rdvelho.com>
#
# Copyright (c) 2019 Timo Tuunanen
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
from datetime import datetime
import logging
from bson import Code
from pymongo import MongoClient
from pymongo import GEOSPHERE
from pymongo import ASCENDING, DESCENDING
from pymongo.collection import ObjectId
from pygeoapi.provider.base import BaseProvider, ProviderItemNotFoundError
LOGGER = logging.getLogger(__name__)
class MongoProvider(BaseProvider):
"""Generic provider for Mongodb.
"""
def __init__(self, provider_def):
"""
MongoProvider Class constructor
:param provider_def: provider definitions from yml pygeoapi-config.
data,id_field, name set in parent class
:returns: pygeoapi.provider.mongo.MongoProvider
"""
# this is dummy value never used in case of Mongo.
# Mongo id field is _id
provider_def.setdefault('id_field', '_id')
BaseProvider.__init__(self, provider_def)
LOGGER.info('Mongo source config: {}'.format(self.data))
dbclient = MongoClient(self.data)
self.featuredb = dbclient.get_default_database()
self.collection = provider_def['collection']
self.featuredb[self.collection].create_index([("geometry", GEOSPHERE)])
def get_fields(self):
"""
Get provider field information (names, types)
:returns: dict of fields
"""
map = Code(
"function() { for (var key in this.properties) "
"{ emit(key, null); } }")
reduce = Code("function(key, stuff) { return null; }")
result = self.featuredb[self.collection].map_reduce(
map, reduce, "myresults")
return result.distinct('_id')
def _get_feature_list(self, filterObj, sortList=[], skip=0, maxitems=1):
featurecursor = self.featuredb[self.collection].find(filterObj)
if sortList:
featurecursor = featurecursor.sort(sortList)
matchCount = self.featuredb[self.collection].count_documents(filterObj)
featurecursor.skip(skip)
featurecursor.limit(maxitems)
featurelist = list(featurecursor)
for item in featurelist:
item['id'] = str(item.pop('_id'))
return featurelist, matchCount
def query(self, startindex=0, limit=10, resulttype='results',
bbox=[], datetime_=None, properties=[], sortby=[],
select_properties=[], skip_geometry=False):
"""
query the provider
:returns: dict of 0..n GeoJSON features
"""
and_filter = []
if len(bbox) == 4:
x, y, w, h = map(float, bbox)
and_filter.append(
{'geometry': {'$geoWithin': {'$box': [[x, y], [w, h]]}}})
# This parameter is not working yet!
# gte is not sufficient to check date range
if datetime_ is not None:
assert isinstance(datetime_, datetime)
and_filter.append({'properties.datetime': {'$gte': datetime_}})
for prop in properties:
and_filter.append({"properties."+prop[0]: {'$eq': prop[1]}})
filterobj = {'$and': and_filter} if and_filter else {}
sort_list = [("properties." + sort['property'],
ASCENDING if (sort['order'] == 'A') else DESCENDING)
for sort in sortby]
featurelist, matchcount = self._get_feature_list(filterobj,
sortList=sort_list,
skip=startindex,
maxitems=limit)
if resulttype == 'hits':
featurelist = []
feature_collection = {
'type': 'FeatureCollection',
'features': featurelist,
'numberMatched': matchcount,
'numberReturned': len(featurelist)
}
return feature_collection
def get(self, identifier):
"""
query the provider by id
:param identifier: feature id
:returns: dict of single GeoJSON feature
"""
featurelist, matchcount = self._get_feature_list(
{'_id': ObjectId(identifier)})
if featurelist:
return featurelist[0]
else:
err = 'item {} not found'.format(identifier)
LOGGER.error(err)
raise ProviderItemNotFoundError(err)
def create(self, new_feature):
"""Create a new feature
"""
self.featuredb[self.collection].insert_one(new_feature)
def update(self, identifier, updated_feature):
"""Updates an existing feature id with new_feature
:param identifier: feature id
:param new_feature: new GeoJSON feature dictionary
"""
data = {k: v for k, v in updated_feature.items() if k != 'id'}
self.featuredb[self.collection].update_one(
{'_id': ObjectId(identifier)}, {"$set": data})
def delete(self, identifier):
"""Deletes an existing feature
:param identifier: feature id
"""
self.featuredb[self.collection].delete_one(
{'_id': ObjectId(identifier)})
| 35.333333
| 79
| 0.604547
| 4,844
| 0.749149
| 0
| 0
| 0
| 0
| 0
| 0
| 2,756
| 0.42623
|
c509a2151c61ed3015af0423248b9cd0ce672927
| 1,975
|
py
|
Python
|
examples/ecs/server_interface.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 43
|
2018-12-19T08:39:15.000Z
|
2021-07-21T02:45:43.000Z
|
examples/ecs/server_interface.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 11
|
2019-03-17T13:28:56.000Z
|
2020-09-23T23:57:50.000Z
|
examples/ecs/server_interface.py
|
wangrui1121/huaweicloud-sdk-python
|
240abe00288760115d1791012d4e3c4592d77ad1
|
[
"Apache-2.0"
] | 47
|
2018-12-19T05:14:25.000Z
|
2022-03-19T15:28:30.000Z
|
# -*-coding:utf-8 -*-
from openstack import connection
# create connection
username = "xxxxxx"
password = "xxxxxx"
projectId = "xxxxxxxxxxxxxxxxxxxxxxxxxxxx" # tenant ID
userDomainId = "xxxxxxxxxxxxxxxxxxxxxxxxxxxx" # user account ID
auth_url = "xxxxxxxxxxxxxxxxxxxxxxxxxxxx" # endpoint url
conn = connection.Connection(auth_url=auth_url,
user_domain_id=userDomainId,
project_id=projectId,
username=username,
password=password)
# create server interface
def create_server_interface(server_id, net_id=None, port_id=None,
fixed_ip=None):
attrs = {"net_id": net_id, "port_id": port_id, "fixed_ip": fixed_ip}
kwargs = {}
for key in attrs:
if attrs[key]:
kwargs[key] = attrs[key]
print(kwargs)
if kwargs == {}:
message = "Parameter error"
raise exceptions.SDKException(message)
server = conn.compute.create_server_interface(server_id, **kwargs)
print(server)
return server
# delete interface
def delete_server_interface(server_interface, servr_id):
conn.compute.delete_server_interface(server_interface, server=servr_id)
# show interface detail
def get_server_interface(server_interface, servr_id):
server_ifa = conn.compute.get_server_interface(server_interface,
server=servr_id)
print(server_ifa)
# get list of interface
def server_interfaces(server_id):
server_ifas = conn.compute.server_interfaces(server_id)
for ifa in server_ifas:
print(ifa)
if __name__ == "__main__":
server_id = "8700184b-79ff-414b-ab8e-11ed01bd3d3d"
net_id = "e2103034-dcf3-4ac3-b551-6d5dd8fadb6e"
server = create_server_interface(server_id, net_id)
get_server_interface(server.id, server_id)
server_interfaces(server_id)
delete_server_interface(server.id, server_id)
| 32.377049
| 75
| 0.671392
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 407
| 0.206076
|
c50a6cdccc88ffe721b0e07a35e407563cda966e
| 9,060
|
py
|
Python
|
sdk/python/pulumi_google_native/dlp/v2/stored_info_type.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/dlp/v2/stored_info_type.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/dlp/v2/stored_info_type.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['StoredInfoTypeArgs', 'StoredInfoType']
@pulumi.input_type
class StoredInfoTypeArgs:
def __init__(__self__, *,
config: pulumi.Input['GooglePrivacyDlpV2StoredInfoTypeConfigArgs'],
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
stored_info_type_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a StoredInfoType resource.
:param pulumi.Input['GooglePrivacyDlpV2StoredInfoTypeConfigArgs'] config: Configuration of the storedInfoType to create.
:param pulumi.Input[str] stored_info_type_id: The storedInfoType ID can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: `[a-zA-Z\d-_]+`. The maximum length is 100 characters. Can be empty to allow the system to generate one.
"""
pulumi.set(__self__, "config", config)
if location is not None:
pulumi.set(__self__, "location", location)
if project is not None:
pulumi.set(__self__, "project", project)
if stored_info_type_id is not None:
pulumi.set(__self__, "stored_info_type_id", stored_info_type_id)
@property
@pulumi.getter
def config(self) -> pulumi.Input['GooglePrivacyDlpV2StoredInfoTypeConfigArgs']:
"""
Configuration of the storedInfoType to create.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: pulumi.Input['GooglePrivacyDlpV2StoredInfoTypeConfigArgs']):
pulumi.set(self, "config", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="storedInfoTypeId")
def stored_info_type_id(self) -> Optional[pulumi.Input[str]]:
"""
The storedInfoType ID can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: `[a-zA-Z\d-_]+`. The maximum length is 100 characters. Can be empty to allow the system to generate one.
"""
return pulumi.get(self, "stored_info_type_id")
@stored_info_type_id.setter
def stored_info_type_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "stored_info_type_id", value)
class StoredInfoType(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config: Optional[pulumi.Input[pulumi.InputType['GooglePrivacyDlpV2StoredInfoTypeConfigArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
stored_info_type_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a pre-built stored infoType to be used for inspection. See https://cloud.google.com/dlp/docs/creating-stored-infotypes to learn more.
Auto-naming is currently not supported for this resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['GooglePrivacyDlpV2StoredInfoTypeConfigArgs']] config: Configuration of the storedInfoType to create.
:param pulumi.Input[str] stored_info_type_id: The storedInfoType ID can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: `[a-zA-Z\d-_]+`. The maximum length is 100 characters. Can be empty to allow the system to generate one.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: StoredInfoTypeArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a pre-built stored infoType to be used for inspection. See https://cloud.google.com/dlp/docs/creating-stored-infotypes to learn more.
Auto-naming is currently not supported for this resource.
:param str resource_name: The name of the resource.
:param StoredInfoTypeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StoredInfoTypeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config: Optional[pulumi.Input[pulumi.InputType['GooglePrivacyDlpV2StoredInfoTypeConfigArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
stored_info_type_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StoredInfoTypeArgs.__new__(StoredInfoTypeArgs)
if config is None and not opts.urn:
raise TypeError("Missing required property 'config'")
__props__.__dict__["config"] = config
__props__.__dict__["location"] = location
__props__.__dict__["project"] = project
__props__.__dict__["stored_info_type_id"] = stored_info_type_id
__props__.__dict__["current_version"] = None
__props__.__dict__["name"] = None
__props__.__dict__["pending_versions"] = None
super(StoredInfoType, __self__).__init__(
'google-native:dlp/v2:StoredInfoType',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StoredInfoType':
"""
Get an existing StoredInfoType resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = StoredInfoTypeArgs.__new__(StoredInfoTypeArgs)
__props__.__dict__["current_version"] = None
__props__.__dict__["name"] = None
__props__.__dict__["pending_versions"] = None
return StoredInfoType(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="currentVersion")
def current_version(self) -> pulumi.Output['outputs.GooglePrivacyDlpV2StoredInfoTypeVersionResponse']:
"""
Current version of the stored info type.
"""
return pulumi.get(self, "current_version")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pendingVersions")
def pending_versions(self) -> pulumi.Output[Sequence['outputs.GooglePrivacyDlpV2StoredInfoTypeVersionResponse']]:
"""
Pending versions of the stored info type. Empty if no versions are pending.
"""
return pulumi.get(self, "pending_versions")
| 45.757576
| 294
| 0.663135
| 8,621
| 0.951545
| 0
| 0
| 6,427
| 0.709382
| 0
| 0
| 3,862
| 0.426269
|
c50a6f36d8c6b2d26bcac12eab8fe5a236ca18f3
| 7,795
|
py
|
Python
|
python/GafferSceneUI/SceneHistoryUI.py
|
pier-robot/gaffer
|
9267f2ba3822b14430d8a283c745261110b0f570
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneUI/SceneHistoryUI.py
|
pier-robot/gaffer
|
9267f2ba3822b14430d8a283c745261110b0f570
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneUI/SceneHistoryUI.py
|
pier-robot/gaffer
|
9267f2ba3822b14430d8a283c745261110b0f570
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2019, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import functools
import IECore
import IECoreScene
import Gaffer
import GafferUI
import GafferScene
import GafferSceneUI
def appendViewContextMenuItems( viewer, view, menuDefinition ) :
if not isinstance( view, GafferSceneUI.SceneView ) :
return None
menuDefinition.append(
"/History",
{
"subMenu" : functools.partial(
__historySubMenu,
context = view.getContext(),
scene = view["in"],
selectedPath = __sceneViewSelectedPath( view )
)
}
)
def connectToEditor( editor ) :
if isinstance( editor, GafferUI.Viewer ) :
editor.keyPressSignal().connect( __viewerKeyPress, scoped = False )
elif isinstance( editor, GafferSceneUI.HierarchyView ) or isinstance( editor, GafferSceneUI.LightEditor ) :
editor.keyPressSignal().connect( __hierarchyViewKeyPress, scoped = False )
elif isinstance( editor, GafferUI.Editor ) :
editor.keyPressSignal().connect( __nodeEditorKeyPress, scoped = False )
##########################################################################
# Internal implementation
##########################################################################
def __historySubMenu( menu, context, scene, selectedPath ) :
menuDefinition = IECore.MenuDefinition()
menuDefinition.append(
"/Edit Source...",
{
"active" : selectedPath is not None,
"command" : functools.partial( __editSourceNode, context, scene, selectedPath ),
"shortCut" : "Alt+E",
}
)
menuDefinition.append(
"/Edit Tweaks...",
{
"active" : selectedPath is not None,
"command" : functools.partial( __editTweaksNode, context, scene, selectedPath ),
"shortCut" : "Alt+Shift+E",
}
)
return menuDefinition
def __sceneViewSelectedPath( sceneView ) :
sceneGadget = sceneView.viewportGadget().getPrimaryChild()
if sceneGadget.getSelection().size() == 1 :
return sceneGadget.getSelection().paths()[0]
else :
return None
def __contextSelectedPath( context ) :
selection = GafferSceneUI.ContextAlgo.getSelectedPaths( context )
if selection.size() != 1 :
return None
return selection.paths()[0]
def __editSourceNode( context, scene, path, nodeEditor = None ) :
with context :
source = GafferScene.SceneAlgo.source( scene, path )
if source is None :
return
node = source.node()
node = __ancestorWithNonViewableChildNodes( node ) or node
if nodeEditor is not None :
nodeEditor.setNodeSet( Gaffer.StandardSet( [ node ] ) )
else :
GafferUI.NodeEditor.acquire( node, floating = True )
def __tweaksNode( scene, path ) :
tweaks = GafferScene.SceneAlgo.objectTweaks( scene, path )
if tweaks is not None :
return tweaks
attributes = scene.fullAttributes( path )
shaderAttributeNames = [ x[0] for x in attributes.items() if isinstance( x[1], IECoreScene.ShaderNetwork ) ]
# Just happens to order as Surface, Light, Displacement, which is what we want.
shaderAttributeNames = list( reversed( sorted( shaderAttributeNames ) ) )
if not len( shaderAttributeNames ) :
return None
return GafferScene.SceneAlgo.shaderTweaks( scene, path, shaderAttributeNames[0] )
def __editTweaksNode( context, scene, path, nodeEditor = None ) :
with context :
tweaks = __tweaksNode( scene, path )
if tweaks is None :
return
node = __ancestorWithNonViewableChildNodes( tweaks ) or tweaks
if nodeEditor is not None :
nodeEditor.setNodeSet( Gaffer.StandardSet( [ node ] ) )
else :
GafferUI.NodeEditor.acquire( node, floating = True )
def __ancestorWithNonViewableChildNodes( node ) :
result = None
while isinstance( node, Gaffer.Node ) :
if Gaffer.Metadata.value( node, "graphEditor:childrenViewable" ) == False :
result = node
node = node.parent()
return result
__editSourceKeyPress = GafferUI.KeyEvent( "E", GafferUI.KeyEvent.Modifiers.Alt )
__editTweaksKeyPress = GafferUI.KeyEvent(
"E",
GafferUI.KeyEvent.Modifiers(
GafferUI.KeyEvent.Modifiers.Alt | GafferUI.KeyEvent.Modifiers.Shift
)
)
def __viewerKeyPress( viewer, event ) :
view = viewer.view()
if not isinstance( view, GafferSceneUI.SceneView ) :
return False
if event == __editSourceKeyPress :
selectedPath = __sceneViewSelectedPath( view )
if selectedPath is not None :
__editSourceNode( view.getContext(), view["in"], selectedPath )
return True
elif event == __editTweaksKeyPress :
selectedPath = __sceneViewSelectedPath( view )
if selectedPath is not None :
__editTweaksNode( view.getContext(), view["in"], selectedPath )
return True
def __hierarchyViewKeyPress( hierarchyView, event ) :
if event == __editSourceKeyPress :
selectedPath = __contextSelectedPath( hierarchyView.getContext() )
if selectedPath is not None :
__editSourceNode( hierarchyView.getContext(), hierarchyView.scene(), selectedPath )
return True
elif event == __editTweaksKeyPress :
selectedPath = __contextSelectedPath( hierarchyView.getContext() )
if selectedPath is not None :
__editTweaksNode( hierarchyView.getContext(), hierarchyView.scene(), selectedPath )
return True
def __nodeEditorKeyPress( nodeEditor, event ) :
layout = nodeEditor.ancestor( GafferUI.CompoundEditor )
if layout is None :
return False
## \todo In Gaffer 0.61, we should get the scene directly from the focus node.
scene = None
for hierarchyView in layout.editors( GafferSceneUI.HierarchyView ) :
if hierarchyView.scene() is not None :
scene = hierarchyView.scene()
break
if scene is None :
for viewer in layout.editors( GafferUI.Viewer ) :
if isinstance( viewer.view(), GafferSceneUI.SceneView ) :
scene = viewer.view()["in"]
break
if scene is None :
return False
context = layout.scriptNode().context()
if event == __editSourceKeyPress :
selectedPath = __contextSelectedPath( context )
if selectedPath is not None :
__editSourceNode( context, scene, selectedPath, nodeEditor )
return True
elif event == __editTweaksKeyPress :
selectedPath = __contextSelectedPath( context )
if selectedPath is not None :
__editTweaksNode( context, scene, selectedPath, nodeEditor )
return True
| 31.946721
| 109
| 0.710969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,276
| 0.291982
|
c50ac3b029d23e93f95a2998c1cb8c9b33f3b8ee
| 294
|
py
|
Python
|
core/middleware/scheduler.py
|
jiangxuewen16/hq-crawler
|
f03ec1e454513307e335943f224f4d927eaf2bbf
|
[
"MIT"
] | 1
|
2021-02-25T08:33:40.000Z
|
2021-02-25T08:33:40.000Z
|
core/middleware/scheduler.py
|
jiangxuewen16/hq-crawler
|
f03ec1e454513307e335943f224f4d927eaf2bbf
|
[
"MIT"
] | null | null | null |
core/middleware/scheduler.py
|
jiangxuewen16/hq-crawler
|
f03ec1e454513307e335943f224f4d927eaf2bbf
|
[
"MIT"
] | 2
|
2021-03-08T07:25:16.000Z
|
2021-12-07T15:28:02.000Z
|
from django.utils.deprecation import MiddlewareMixin
from django.utils.autoreload import logger
class Scheduler(MiddlewareMixin):
def process_request(self, request):
pass
# logger.info(request)
def process_response(self, request, response):
return response
| 21
| 52
| 0.731293
| 194
| 0.659864
| 0
| 0
| 0
| 0
| 0
| 0
| 22
| 0.07483
|
c50d8c67882d7ef410bf79b36de881a95ed1d06e
| 631
|
py
|
Python
|
python/cw/letterfreq2.py
|
vesche/snippets
|
7a9d598df99c26c4e0c63669f9f95a94eeed0d08
|
[
"Unlicense"
] | 7
|
2016-01-03T19:42:07.000Z
|
2018-10-23T14:03:12.000Z
|
python/cw/letterfreq2.py
|
vesche/snippets
|
7a9d598df99c26c4e0c63669f9f95a94eeed0d08
|
[
"Unlicense"
] | null | null | null |
python/cw/letterfreq2.py
|
vesche/snippets
|
7a9d598df99c26c4e0c63669f9f95a94eeed0d08
|
[
"Unlicense"
] | 1
|
2018-03-09T08:52:01.000Z
|
2018-03-09T08:52:01.000Z
|
#!/usr/bin/env python
from __future__ import division
import sys
from string import ascii_lowercase
with open(sys.argv[1]) as f:
data = f.read().splitlines()
d = {}
for line in data:
for letter in line:
letter = letter.lower()
if letter not in ascii_lowercase+' ':
continue
if letter not in d:
d[letter] = 1
else:
d[letter] += 1
total = 0
for k,v in d.iteritems():
total += v
for k,v in d.iteritems():
d[k] = float('{:.2%}'.format(v/total)[:-1])
for k,v in sorted(d.items(), key=lambda(k,v): (-v, k)):
print "'{}' {}%".format(k, str(v))
| 21.033333
| 55
| 0.557845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.066561
|
c50e54be42b3e46e041c3408bc115beca68acf17
| 946
|
py
|
Python
|
function/python/brightics/function/extraction/test/label_encoder_test.py
|
GSByeon/studio
|
782cf484541c6d68e1451ff6a0d3b5dc80172664
|
[
"Apache-2.0"
] | null | null | null |
function/python/brightics/function/extraction/test/label_encoder_test.py
|
GSByeon/studio
|
782cf484541c6d68e1451ff6a0d3b5dc80172664
|
[
"Apache-2.0"
] | null | null | null |
function/python/brightics/function/extraction/test/label_encoder_test.py
|
GSByeon/studio
|
782cf484541c6d68e1451ff6a0d3b5dc80172664
|
[
"Apache-2.0"
] | 1
|
2020-11-19T06:44:15.000Z
|
2020-11-19T06:44:15.000Z
|
import unittest
from brightics.function.extraction.encoder import label_encoder, \
label_encoder_model
from brightics.common.datasets import load_iris
import random
def get_iris_randomgroup():
df = load_iris()
random_group1 = []
random_group2 = []
random_group2_map = {1:'A', 2:'B'}
for i in range(len(df)):
random_group1.append(random.randint(1, 2))
random_group2.append(random_group2_map[random.randint(1, 2)])
df['random_group1'] = random_group1
df['random_group2'] = random_group2
return df
class LabelEncoderTest(unittest.TestCase):
def test_groupby1(self):
df = get_iris_randomgroup()
enc_out = label_encoder(df, input_col='species', group_by=['random_group1', 'random_group2'])
print(enc_out['out_table'])
print(enc_out['model'].keys())
model_out = label_encoder_model(df, enc_out['model'])
print(model_out['out_table'])
| 30.516129
| 101
| 0.684989
| 389
| 0.411205
| 0
| 0
| 0
| 0
| 0
| 0
| 111
| 0.117336
|
c510311a203699e5c3e0a6d1d76232cf2598509a
| 4,629
|
py
|
Python
|
data/query-with-params/parameter_supported_query_results.py
|
samelamin/setup
|
73f7807ad1bd37bfc7e7021c8c71f9ef34c8b9b4
|
[
"BSD-2-Clause"
] | 7
|
2021-08-20T22:48:39.000Z
|
2022-01-29T04:07:43.000Z
|
redash_parameter_supported_query_results_query_runner/parameter_supported_query_results.py
|
ariarijp/redash-parameter-supported-query-results-query-runner
|
09e688c2be91354a8be76051b9a8e27c4cde5e4c
|
[
"BSD-2-Clause"
] | null | null | null |
redash_parameter_supported_query_results_query_runner/parameter_supported_query_results.py
|
ariarijp/redash-parameter-supported-query-results-query-runner
|
09e688c2be91354a8be76051b9a8e27c4cde5e4c
|
[
"BSD-2-Clause"
] | null | null | null |
import hashlib
import json
import logging
import re
import sqlite3
from typing import List, Optional, Tuple
import pystache
from redash.models import Query, User
from redash.query_runner import TYPE_STRING, guess_type, register
from redash.query_runner.query_results import Results, _load_query, create_table
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
class ChildQueryExecutionError(Exception):
pass
class ChildQuery:
query_id: int
params: dict
table: str
token: str
def __init__(self, query_id: int, params: dict, table: str, token: str) -> None:
super().__init__()
self.query_id = query_id
self.params = params
self.table = table
self.token = token
def _extract_child_queries(query: str) -> List[ChildQuery]:
tokens_list = _collect_tokens(query)
child_queries = []
for tokens in tokens_list:
child_query_token = tokens[0]
query_id = tokens[1]
params = json.loads(tokens[2]) if tokens[2] else {}
table = _tmp_table_name(query_id, child_query_token)
child_queries.append(ChildQuery(query_id, params, table, child_query_token))
return child_queries
def _collect_tokens(query: str) -> list:
pattern = re.compile(r"\s(query_(\d+)(?:\(\s*'({.+})'\s*\))?)", re.IGNORECASE)
matches = pattern.findall(query)
return [(m[0], int(m[1]), m[2]) for m in list(matches)]
def _tmp_table_name(query_id: int, child_query_token: str):
return f"tmp_query{query_id}_{hashlib.sha256(child_query_token.encode('utf-8')).hexdigest()}"
def _create_tables_from_child_queries(
user: User,
connection: sqlite3.Connection,
query: str,
child_queries: List[ChildQuery],
) -> str:
for i, child_query in enumerate(child_queries):
loaded_child_query = _load_query(user, child_query.query_id)
params = (
child_query.params
if child_query.params
else get_default_params(loaded_child_query)
)
_rendered_child_query = pystache.render(loaded_child_query.query_text, params)
logger.debug(
f"ResultsWithParams child_queries[{i}], query_id={child_query.query_id} : {_rendered_child_query}"
)
results, error = loaded_child_query.data_source.query_runner.run_query(
_rendered_child_query, user
)
if error:
raise ChildQueryExecutionError(
f"Failed loading results for query id {loaded_child_query.id}."
)
results = json.loads(results)
table_name = child_query.table
create_table(connection, table_name, results)
query = query.replace(child_query.token, table_name, 1)
return query
def get_default_params(query: Query) -> dict:
return {p["name"]: p["value"] for p in query.options.get("parameters", {})}
class ParameterSupportedResults(Results):
@classmethod
def name(cls):
return "Parameter Supported Query Results(PoC)"
def run_query(
self, query: Query, user: User
) -> Tuple[Optional[str], Optional[str]]:
child_queries = _extract_child_queries(query)
connection = None
cursor = None
try:
connection = sqlite3.connect(":memory:")
query = _create_tables_from_child_queries(
user, connection, query, child_queries
)
cursor = connection.cursor()
cursor.execute(query)
if cursor.description is None:
return None, "Query completed but it returned no data."
columns = self.fetch_columns([(d[0], None) for d in cursor.description])
rows = []
column_names = [c["name"] for c in columns]
for i, row in enumerate(cursor):
if i == 0:
for j, col in enumerate(row):
guess = guess_type(col)
if columns[j]["type"] is None:
columns[j]["type"] = guess
elif columns[j]["type"] != guess:
columns[j]["type"] = TYPE_STRING
rows.append(dict(zip(column_names, row)))
return json_dumps({"columns": columns, "rows": rows}), None
except KeyboardInterrupt:
if connection:
connection.interrupt()
return None, "Query cancelled by user."
finally:
if cursor:
cursor.close()
if connection:
connection.close()
register(ParameterSupportedResults)
| 29.864516
| 110
| 0.62022
| 2,069
| 0.446965
| 0
| 0
| 87
| 0.018795
| 0
| 0
| 476
| 0.10283
|
c5116b08a2ee4021d6233bcbecfc48a6ba698572
| 1,111
|
py
|
Python
|
notes/migrations/0005_auto_20160130_0015.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 121
|
2015-04-11T20:59:48.000Z
|
2021-05-12T02:15:36.000Z
|
notes/migrations/0005_auto_20160130_0015.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 56
|
2015-08-10T08:16:35.000Z
|
2022-03-11T23:12:33.000Z
|
notes/migrations/0005_auto_20160130_0015.py
|
nicbou/markdown-notes
|
a5d398b032b7a837909b684bb3121c7b68f49e7b
|
[
"CC0-1.0"
] | 32
|
2015-08-11T02:50:44.000Z
|
2021-09-02T10:15:00.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('notes', '0004_auto_20151022_1517'),
]
operations = [
migrations.CreateModel(
name='Notebook',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['title'],
},
bases=(models.Model,),
),
migrations.AddField(
model_name='note',
name='notebook',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='notes.Notebook', null=True),
preserve_default=True,
),
]
| 30.861111
| 126
| 0.591359
| 936
| 0.842484
| 0
| 0
| 0
| 0
| 0
| 0
| 135
| 0.121512
|
c5118009a2cf132e4b87f2f696c2abdd36248815
| 5,479
|
py
|
Python
|
Coursework_02/Q3/airport/scenarios.py
|
eBe02/COMP0037-21_22
|
c0872548ff4b653e3f786734666838813db2149a
|
[
"Apache-2.0"
] | null | null | null |
Coursework_02/Q3/airport/scenarios.py
|
eBe02/COMP0037-21_22
|
c0872548ff4b653e3f786734666838813db2149a
|
[
"Apache-2.0"
] | null | null | null |
Coursework_02/Q3/airport/scenarios.py
|
eBe02/COMP0037-21_22
|
c0872548ff4b653e3f786734666838813db2149a
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 25 Jan 2022
@author: ucacsjj
'''
from .airport_map import MapCellType
from .airport_map import AirportMap
# This file contains a set of functions which build different maps. Only
# two of these are needed for the coursework. Others are ones which were
# used for developing and testing the algorithms and might be of use.
# Helper function which fills sets the type of all cells in a rectangular
# region to have the same type.
def _set_block_to_single_type(airport_map, cell_type, start_coords, end_coords):
for x in range(start_coords[0], end_coords[0] + 1):
for y in range(start_coords[1], end_coords[1] + 1):
airport_map.set_cell_type(x, y, cell_type)
# This scenario can be used to test the different traversability costs
def test_traversability_costs_scenario():
airport_map = AirportMap("Test Traversabilty Map", 15, 15)
for x in range(0, 14):
airport_map.set_wall(x, 7)
airport_map.add_secret_door(7, 7)
return airport_map, 200
def one_row_scenario():
airport_map = AirportMap("One Row Scenario", 15, 1)
airport_map.add_robot_end_station(14, 0, 100)
return airport_map, 200
def two_row_scenario():
airport_map = AirportMap("Two Row Scenario", 15, 2)
airport_map.add_robot_end_station(14, 0, 0)
return airport_map, 200
def two_2x2_scenario():
airport_map = AirportMap("2x2 Scenario", 2, 2)
airport_map.add_robot_end_station(0, 1, 100)
return airport_map, 800
def test_3x3_scenario():
airport_map = AirportMap("3x3 Scenario", 3, 3)
airport_map.add_robot_end_station(0, 2, 100)
return airport_map, 800
def three_row_scenario():
airport_map = AirportMap("Three Row Scenario", 15, 3)
airport_map.set_cell_type(2, 1, MapCellType.WALL)
airport_map.add_robot_end_station(14, 0, 0)
return airport_map, 200
def corridor_scenario():
airport_map = AirportMap("Three Row Scenario", 20, 7)
_set_block_to_single_type(airport_map, MapCellType.WALL, (0, 0), (19, 0))
_set_block_to_single_type(airport_map, MapCellType.WALL, (0, 6), (19, 6))
_set_block_to_single_type(airport_map, MapCellType.CHAIR, (2, 1), (5, 1))
for y in range(3,7):
airport_map.add_robot_end_station(19, y, 100)
#_set_block_to_single_type(airport_map, MapCellType.ROBOT_END_STATION, (19, 0), (19, 6))
return airport_map, 450
def mini_scenario():
# Create the map
airport_map = AirportMap("Mini Scenario", 15, 15)
# Create the wall on either side and the customs area
for x in range(0, 15):
airport_map.set_wall(x, 7)
for x in range(5, 7):
airport_map.set_customs_area(x, 7)
airport_map.add_charging_station(4, 4, 1, 1)
airport_map.add_secret_door(14, 7)
airport_map.add_toilet(4, 1)
airport_map.add_robot_end_station(0, 14, 100)
return airport_map, 800
def full_scenario():
airport_map = AirportMap("Full Scenario", 60, 40)
# The wall separating the two areas, including the customs area
# and the secret door
_set_block_to_single_type(airport_map, MapCellType.WALL, (0, 18), (59, 20))
_set_block_to_single_type(airport_map, MapCellType.CUSTOMS_AREA, (25, 18), (35, 20))
_set_block_to_single_type(airport_map, MapCellType.SECRET_DOOR, (59, 18), (59, 20))
# The reclaim areas
airport_map.add_rubbish_bin(2, 33)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (5, 30), (8, 36))
airport_map.add_rubbish_bin(11, 33)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (15, 28), (18, 39))
airport_map.add_rubbish_bin(22, 38)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (25, 28), (28, 39))
airport_map.add_rubbish_bin(31, 38)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (35, 28), (38, 39))
airport_map.add_rubbish_bin(41, 38)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (45, 28), (48, 39))
airport_map.add_rubbish_bin(51, 33)
_set_block_to_single_type(airport_map, MapCellType.BAGGAGE_CLAIM, (55, 30), (58, 36))
# The bins in the reclaim areas
# Add the horizontal chairs with bins at either end
for i in range(5):
y_coord = 2 + i * 3
_set_block_to_single_type(airport_map, MapCellType.CHAIR, (5, y_coord), (18, y_coord))
airport_map.add_rubbish_bin(4, y_coord)
airport_map.add_rubbish_bin(19, y_coord)
# Add the vertical chairs with bins at either end
for i in range(5):
x_coord = 42 + i * 3
_set_block_to_single_type(airport_map, MapCellType.CHAIR, (x_coord, 2), (x_coord, 14))
airport_map.add_rubbish_bin(x_coord, 1)
airport_map.add_rubbish_bin(x_coord, 15)
# The toilets. These generate rubbish to be collected
airport_map.add_toilet(0, 21)
airport_map.add_toilet(0, 17)
airport_map.add_toilet(38, 0)
airport_map.add_toilet(58, 21)
# These charge the robot back up again
airport_map.add_charging_station(1, 38, 15, 1)
airport_map.add_charging_station(58, 38, 15, 1)
airport_map.add_charging_station(36, 0, 30, 1)
airport_map.add_charging_station(59, 0, 40, 1)
airport_map.add_robot_end_station(1, 21, 50)
return airport_map, 800
| 32.613095
| 94
| 0.686804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,075
| 0.196204
|
c511d2974df6ea839e2f08eec91ae6a38dd211bf
| 332
|
py
|
Python
|
setup.py
|
abkfenris/adm_locations
|
266915ab7e7559bd4c66d4090bcd69a2a93ab563
|
[
"MIT"
] | null | null | null |
setup.py
|
abkfenris/adm_locations
|
266915ab7e7559bd4c66d4090bcd69a2a93ab563
|
[
"MIT"
] | null | null | null |
setup.py
|
abkfenris/adm_locations
|
266915ab7e7559bd4c66d4090bcd69a2a93ab563
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='csv_locate',
version='0.1',
py_modules=['csv_to_json'],
install_requires=[
'click',
'colorama',
'geocoder',
'geojson',
'jinja2',
],
entry_points='''
[console_scripts]
csv_locate=csv_to_json:convert
''',
)
| 17.473684
| 38
| 0.539157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 150
| 0.451807
|
c511e84604115905acf9bda9b841b2099fbb06a7
| 1,600
|
py
|
Python
|
app.py
|
LuizGGoncalves/Corona-Graphics-Python
|
e2374abcff6e67f226a3e8f3d5a3dec5b4f2a5a8
|
[
"MIT"
] | null | null | null |
app.py
|
LuizGGoncalves/Corona-Graphics-Python
|
e2374abcff6e67f226a3e8f3d5a3dec5b4f2a5a8
|
[
"MIT"
] | null | null | null |
app.py
|
LuizGGoncalves/Corona-Graphics-Python
|
e2374abcff6e67f226a3e8f3d5a3dec5b4f2a5a8
|
[
"MIT"
] | null | null | null |
from flask import Flask
from Config import app_config, app_active
from flask import render_template
from flask_sqlalchemy import SQLAlchemy
import Forms
import LDB
import gGraficos
config = app_config[app_active]
def create_app(config_name):
app = Flask(__name__, template_folder='templates')
app.secret_key = config.SECRET
app.config.from_object(app_config[app_active])
app.config.from_pyfile('Config.py')
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+mysqlconnector://root:@localhost:3306/csv'
db = SQLAlchemy(app)
db.__init__(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/grafico', methods = ["POST","GET"])
def grafico():
info = Forms.info()
if info.idade.data == None and info.sexo.data == None:
arq = 'Figura'
if info.situacao_cidade.data == None:
info.situacao_cidade.date = False
return render_template('grafico.html',
info=info,
arq=arq)
else:
dados = LDB.leitura(info.idade.data, info.sexo.data,info.cidade.data,info.situacao_cidade.data)
if len(dados) == 2:
dados.append((0, 0))
arq = gGraficos.criarimg(dados[0][0], dados[0][1], dados[1][0], dados[1][1], dados[2][0], dados[2][1],tipo=info.tipo.data)
arq ='png-grafico/' + str(arq)
return render_template('grafico.html',
info=info,
arq=arq)
return app
| 34.782609
| 134
| 0.59
| 0
| 0
| 0
| 0
| 1,006
| 0.62875
| 0
| 0
| 182
| 0.11375
|
c5137e8834c331e474c1e9e3483b2dcf01683c8b
| 4,852
|
py
|
Python
|
kitti_meters/frustum.py
|
HaochengWan/PVT
|
95818d303ee63084f044a057344b2049d1fa4492
|
[
"MIT"
] | 27
|
2021-12-14T02:10:37.000Z
|
2022-03-31T09:54:09.000Z
|
kitti_meters/frustum.py
|
HaochengWan/PVT
|
95818d303ee63084f044a057344b2049d1fa4492
|
[
"MIT"
] | 3
|
2022-02-20T09:42:01.000Z
|
2022-03-21T07:32:46.000Z
|
kitti_meters/frustum.py
|
HaochengWan/PVT
|
95818d303ee63084f044a057344b2049d1fa4492
|
[
"MIT"
] | 2
|
2021-12-30T05:43:41.000Z
|
2022-02-15T13:47:21.000Z
|
import numpy as np
import torch
from modules.frustum import get_box_corners_3d
from kitti_meters.util import get_box_iou_3d
__all__ = ['MeterFrustumKitti']
class MeterFrustumKitti:
def __init__(self, num_heading_angle_bins, num_size_templates, size_templates, class_name_to_class_id,
metric='iou_3d'):
super().__init__()
assert metric in ['iou_2d', 'iou_3d', 'accuracy', 'iou_3d_accuracy', 'iou_3d_class_accuracy']
self.metric = metric
self.num_heading_angle_bins = num_heading_angle_bins
self.num_size_templates = num_size_templates
self.size_templates = size_templates.view(self.num_size_templates, 3)
self.heading_angle_bin_centers = torch.arange(0, 2 * np.pi, 2 * np.pi / self.num_heading_angle_bins)
self.class_name_to_class_id = class_name_to_class_id
self.reset()
def reset(self):
self.total_seen_num = 0
self.total_correct_num = 0
self.iou_3d_corrent_num = 0
self.iou_2d_sum = 0
self.iou_3d_sum = 0
self.iou_3d_corrent_num_per_class = {cls: 0 for cls in self.class_name_to_class_id.keys()}
self.total_seen_num_per_class = {cls: 0 for cls in self.class_name_to_class_id.keys()}
def update(self, outputs, targets):
if self.metric == 'accuracy':
mask_logits = outputs['mask_logits']
mask_logits_target = targets['mask_logits']
self.total_seen_num += mask_logits_target.numel()
self.total_correct_num += torch.sum(mask_logits.argmax(dim=1) == mask_logits_target).item()
else:
center = outputs['center'] # (B, 3)
heading_scores = outputs['heading_scores'] # (B, NH)
heading_residuals = outputs['heading_residuals'] # (B, NH)
size_scores = outputs['size_scores'] # (B, NS)
size_residuals = outputs['size_residuals'] # (B, NS, 3)
center_target = targets['center'] # (B, 3)
heading_bin_id_target = targets['heading_bin_id'] # (B, )
heading_residual_target = targets['heading_residual'] # (B, )
size_template_id_target = targets['size_template_id'] # (B, )
size_residual_target = targets['size_residual'] # (B, 3)
class_id_target = targets['class_id'].cpu().numpy() # (B, )
batch_size = center.size(0)
batch_id = torch.arange(batch_size, device=center.device)
self.size_templates = self.size_templates.to(center.device)
self.heading_angle_bin_centers = self.heading_angle_bin_centers.to(center.device)
heading_bin_id = torch.argmax(heading_scores, dim=1)
heading = self.heading_angle_bin_centers[heading_bin_id] + heading_residuals[batch_id, heading_bin_id]
size_template_id = torch.argmax(size_scores, dim=1)
size = self.size_templates[size_template_id] + size_residuals[batch_id, size_template_id] # (B, 3)
corners = get_box_corners_3d(centers=center, headings=heading, sizes=size, with_flip=False) # (B, 8, 3)
heading_target = self.heading_angle_bin_centers[heading_bin_id_target] + heading_residual_target # (B, )
size_target = self.size_templates[size_template_id_target] + size_residual_target # (B, 3)
corners_target = get_box_corners_3d(centers=center_target, headings=heading_target,
sizes=size_target, with_flip=False) # (B, 8, 3)
iou_3d, iou_2d = get_box_iou_3d(corners.cpu().detach().numpy(), corners_target.cpu().detach().numpy())
self.iou_2d_sum += iou_2d.sum()
self.iou_3d_sum += iou_3d.sum()
self.iou_3d_corrent_num += np.sum(iou_3d >= 0.7)
self.total_seen_num += batch_size
for cls, cls_id in self.class_name_to_class_id.items():
mask = (class_id_target == cls_id)
self.iou_3d_corrent_num_per_class[cls] += np.sum(iou_3d[mask] >= (0.7 if cls == 'Car' else 0.5))
self.total_seen_num_per_class[cls] += np.sum(mask)
def compute(self):
if self.metric == 'iou_3d':
return self.iou_3d_sum / self.total_seen_num
elif self.metric == 'iou_2d':
return self.iou_2d_sum / self.total_seen_num
elif self.metric == 'accuracy':
return self.total_correct_num / self.total_seen_num
elif self.metric == 'iou_3d_accuracy':
return self.iou_3d_corrent_num / self.total_seen_num
elif self.metric == 'iou_3d_class_accuracy':
return sum(self.iou_3d_corrent_num_per_class[cls] / max(self.total_seen_num_per_class[cls], 1)
for cls in self.class_name_to_class_id.keys()) / len(self.class_name_to_class_id)
else:
raise KeyError
| 54.516854
| 117
| 0.650041
| 4,692
| 0.967024
| 0
| 0
| 0
| 0
| 0
| 0
| 493
| 0.101608
|