hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4f3acba8758e2f7d838cec407a87762f476e5617 | 2,846 | py | Python | analysis/merge_eval_files.py | xiaoeric/bmusegan | 3b54448bb488d7426c1fc4c0f9a65d373dc8c05f | [
"MIT"
] | 52 | 2018-04-19T04:59:22.000Z | 2022-02-21T10:06:54.000Z | analysis/merge_eval_files.py | xiaoeric/bmusegan | 3b54448bb488d7426c1fc4c0f9a65d373dc8c05f | [
"MIT"
] | 9 | 2018-10-30T07:35:15.000Z | 2021-08-06T08:33:03.000Z | analysis/merge_eval_files.py | xiaoeric/bmusegan | 3b54448bb488d7426c1fc4c0f9a65d373dc8c05f | [
"MIT"
] | 14 | 2018-04-26T06:06:37.000Z | 2021-08-05T08:19:37.000Z | """This file provides functions for merging evaluation files in a directory
"""
import os
import joblib
import numpy as np
EXP_NAMES = [
'lastfm_alternative_train_g_proposed_d_proposed_r_proposed_round',
'lastfm_alternative_train_g_proposed_d_proposed_r_proposed_bernoulli',
'lastfm_alternative_train_joint_g_proposed_d_proposed_r_proposed_round',
'lastfm_alternative_train_joint_g_proposed_d_proposed_r_proposed_bernoulli',
'lastfm_alternative_end2end_g_proposed_small_d_proposed_r_proposed_round',
'lastfm_alternative_end2end_g_proposed_small_d_proposed_r_proposed_'
'bernoulli',
'lastfm_alternative_pretrain_g_proposed_d_proposed',
'lastfm_alternative_pretrain_g_proposed_d_ablated',
'lastfm_alternative_pretrain_g_proposed_d_baseline',
]
SRC_DIRS = [
os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'exp', exp_name, 'eval')
for exp_name in EXP_NAMES
]
DST_PATHS = [
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data',
'eval_training_process', exp_name + '.npz')
for exp_name in EXP_NAMES
]
def get_npy_files(target_dir):
"""Return a list of paths to all the .npy files in a directory."""
filepaths = []
for path in os.listdir(target_dir):
if path.endswith('.npy'):
filepaths.append(path)
return filepaths
def load(filepath, eval_dir):
"""Load a evaluation file at the given path and return the stored data."""
step = int(os.path.splitext(filepath)[0])
data = np.load(os.path.join(eval_dir, filepath))
return (step, data[()]['score_matrix_mean'],
data[()]['score_pair_matrix_mean'])
def main():
"""Main function"""
for idx, eval_dir in enumerate(SRC_DIRS):
filepaths = get_npy_files(eval_dir)
collected = joblib.Parallel(n_jobs=30, verbose=5)(
joblib.delayed(load)(filepath, eval_dir) for filepath in filepaths)
steps = []
score_matrix_means = []
score_pair_matrix_means = []
for item in collected:
steps.append(item[0])
score_matrix_means.append(item[1])
score_pair_matrix_means.append(item[2])
steps = np.array(steps)
score_matrix_means = np.stack(score_matrix_means)
score_pair_matrix_means = np.stack(score_pair_matrix_means)
argsort = steps.argsort()
steps = steps[argsort]
score_matrix_means = score_matrix_means[argsort]
score_pair_matrix_means = score_pair_matrix_means[argsort]
np.savez(DST_PATHS[idx], steps=steps,
score_matrix_means=score_matrix_means,
score_pair_matrix_means=score_pair_matrix_means)
if __name__ == "__main__":
main()
| 35.135802 | 81 | 0.682713 |
44e191cc4e9b39673336a211ab40278be721f11f | 252 | py | Python | setup.py | Newsha612/pip-package | cdf7b34918547e0da620611a30bdd8998ff695b9 | [
"MIT"
] | null | null | null | setup.py | Newsha612/pip-package | cdf7b34918547e0da620611a30bdd8998ff695b9 | [
"MIT"
] | null | null | null | setup.py | Newsha612/pip-package | cdf7b34918547e0da620611a30bdd8998ff695b9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed May 8 13:04:20 2019
@author: newsh
"""
from setuptools import setup
setup(name='gym_Path',
version='0.0.1',
install_requires=['gym>=0.2.3',
'hfo_py>=0.2']
) | 18 | 39 | 0.511905 |
29e0c69bf58dc8c15ac5e70b71afe031a7311b2f | 1,136 | py | Python | mathematics_dataset/generate_settings.py | PhysicsTeacher13/Mathematics_Dataset | 7f13bf661e6f36d61542bf0360b27f31eb9efe20 | [
"Apache-2.0"
] | 1,577 | 2019-04-03T10:05:30.000Z | 2022-03-29T17:56:14.000Z | mathematics_dataset/generate_settings.py | PhysicsTeacher13/Mathematics_Dataset | 7f13bf661e6f36d61542bf0360b27f31eb9efe20 | [
"Apache-2.0"
] | 15 | 2019-04-03T18:55:03.000Z | 2022-01-12T16:04:12.000Z | mathematics_dataset/generate_settings.py | LaudateCorpus1/mathematics_dataset | e91dba649b843597c14b9d84dfe92bff79b7d299 | [
"Apache-2.0"
] | 226 | 2019-04-03T13:28:36.000Z | 2022-03-27T18:41:01.000Z | # Copyright 2018 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Settings for generation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import string
MAX_QUESTION_LENGTH = 160
MAX_ANSWER_LENGTH = 30
QUESTION_CHARS = (
['', ' '] + list(string.ascii_letters + string.digits + string.punctuation))
EMPTY_INDEX = QUESTION_CHARS.index('')
NUM_INDICES = len(QUESTION_CHARS)
CHAR_TO_INDEX = {char: index for index, char in enumerate(QUESTION_CHARS)}
INDEX_TO_CHAR = {index: char for index, char in enumerate(QUESTION_CHARS)}
| 34.424242 | 80 | 0.772007 |
74022a34beb2e38928d649e9f748eac77abf725e | 220 | py | Python | serwer/app/__init__.py | DawidPiechota/Mapa-literacka-dla-korpus-w | c2460c77bb2165d32bb9c2ad917bc0f69c0bd0e0 | [
"MIT"
] | null | null | null | serwer/app/__init__.py | DawidPiechota/Mapa-literacka-dla-korpus-w | c2460c77bb2165d32bb9c2ad917bc0f69c0bd0e0 | [
"MIT"
] | 2 | 2021-01-31T11:25:31.000Z | 2021-02-01T17:38:02.000Z | serwer/app/__init__.py | DawidPiechota/Mapa-literacka-dla-korpus-w | c2460c77bb2165d32bb9c2ad917bc0f69c0bd0e0 | [
"MIT"
] | 1 | 2021-03-22T20:20:56.000Z | 2021-03-22T20:20:56.000Z | from app.controllers import init_routes
from flask_cors import CORS
from app.services.app import app as flask_app
from app.services.db import init_db
def init_app():
CORS(flask_app)
init_db()
init_routes()
| 20 | 45 | 0.768182 |
1ba71007f9ed003b7c98d8c32698b4f37ee58e79 | 462 | py | Python | test_etl.py | kokare-darshan/incubyte | a3a419ac75b2fb116ece2b50f6947c1935131061 | [
"MIT"
] | null | null | null | test_etl.py | kokare-darshan/incubyte | a3a419ac75b2fb116ece2b50f6947c1935131061 | [
"MIT"
] | null | null | null | test_etl.py | kokare-darshan/incubyte | a3a419ac75b2fb116ece2b50f6947c1935131061 | [
"MIT"
] | null | null | null | import unittest
from etl import extract, MongoComm, mongo_srv_link
class TestETL(unittest.TestCase):
def test_extract(self):
self.assertIsNone(extract())
def test_mongo(self):
mongo = MongoComm(mongo_srv_link)
data = {"First Name": "Darshan", "Last Name": "Kokare", "Email":"kokare.darshan9@gmail.com"}
collection_name = "test_country"
ret = mongo.insert_data(data, collection_name)
self.assertTrue(ret) | 33 | 100 | 0.681818 |
11374fe75f0f6f379ab0386b42f764594f5d75db | 1,349 | py | Python | 2020/20200407/practice02.py | cbchoi/SIT32004 | 699598fc321845e46e5cce81c6c2a60999698e6e | [
"MIT"
] | 1 | 2019-03-04T05:35:37.000Z | 2019-03-04T05:35:37.000Z | 2020/20200407/practice02.py | cbchoi/SIT32004 | 699598fc321845e46e5cce81c6c2a60999698e6e | [
"MIT"
] | null | null | null | 2020/20200407/practice02.py | cbchoi/SIT32004 | 699598fc321845e46e5cce81c6c2a60999698e6e | [
"MIT"
] | 6 | 2019-03-10T23:39:10.000Z | 2020-03-20T11:37:12.000Z | import sqlite3
# Connect to SQLite DB
con = sqlite3.connect("retak2e.db")
with con:
# Create Cursor from connection object
cur = con.cursor()
# Put SQL Query
#cur.execute("CREATE TABLE customer(id integer primary key autoincrement, name text not null, category integer, region text);")
#cur.execute("INSERT INTO customer(name, category, region) values ('cbchoi', 1, 'Daejeon');")
#cur.execute("SELECT * FROM customer;")
#for item in cur.fetchall():
# print(item)
#cur.execute("INSERT INTO customer(name, category, region) values ('cbchoi2', 12, 'Pohang');")
#sql = "insert into customer (name, category, region) values (?, ?, ?)"
infoList = [('r.choi', 1, 'Seoul'), ('H.Kim', 2, 'Busan'), ('N.Lee', 2, 'Daejeon')]
#cur.executemany(sql, infoList)
cur.execute("SELECT * FROM customer")
[print(item) for item in cur.fetchall()]
for item in infoList:
cur.execute(f"insert into customer (name, category, region) values ('{item[0]}', {item[1]}, '{item[2]}')")
cur.execute("SELECT * FROM customer")
[print(item) for item in cur.fetchall()]
#print("before deletion")
#cur.execute("DELETE FROM customer WHERE *;")
#cur.execute("SELECT * FROM customer")
#[print(item) for item in cur.fetchall()]
# Reflect to Database
con.commit()
| 32.902439 | 131 | 0.633062 |
2eec65635582e626f940cea23f62bcf5bc85d60a | 8,889 | py | Python | scheduler/scheduler.py | weakiwi/haipproxy | 15293d1d2ccd7020fff5935719a56baf00cfeb82 | [
"MIT"
] | 1 | 2018-03-20T09:14:33.000Z | 2018-03-20T09:14:33.000Z | scheduler/scheduler.py | weakiwi/haipproxy | 15293d1d2ccd7020fff5935719a56baf00cfeb82 | [
"MIT"
] | null | null | null | scheduler/scheduler.py | weakiwi/haipproxy | 15293d1d2ccd7020fff5935719a56baf00cfeb82 | [
"MIT"
] | 1 | 2018-09-08T08:06:54.000Z | 2018-09-08T08:06:54.000Z | """
This module schedules all the tasks according to config.rules.
"""
import time
from multiprocessing import Pool
import click
import schedule
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging
from scrapy.utils.project import get_project_settings
from client import SquidClient
# from logger import (
# crawler_logger, scheduler_logger,
# client_logger)
from config.rules import (
CRAWLER_TASKS, VALIDATOR_TASKS,
CRAWLER_TASK_MAPS, TEMP_TASK_MAPS)
from crawler.spiders import all_spiders
from crawler.validators import all_validators
from config.settings import (
SPIDER_COMMON_TASK, SPIDER_AJAX_TASK,
SPIDER_GFW_TASK, SPIDER_AJAX_GFW_TASK,
TEMP_HTTP_QUEUE, TEMP_HTTPS_QUEUE,
TIMER_RECORDER, TTL_VALIDATED_RESOURCE)
from utils import (
get_redis_conn, acquire_lock,
release_lock)
DEFAULT_CRAWLER_TASKS = [
SPIDER_COMMON_TASK, SPIDER_AJAX_TASK,
SPIDER_GFW_TASK, SPIDER_AJAX_GFW_TASK]
DEFAULT_VALIDATORS_TASKS = [TEMP_HTTP_QUEUE, TEMP_HTTPS_QUEUE]
DEFAULT_CRAWLERS = all_spiders
DEFAULT_VALIDATORS = all_validators
class BaseCase:
def __init__(self, spider):
self.spider = spider
def check(self, task, maps):
task_queue = maps.get(task)
return self.spider.task_queue == task_queue
class BaseScheduler:
def __init__(self, name, tasks, task_queues=None):
"""
init function for schedulers.
:param name: scheduler name, generally the value is usage of the scheduler
:param tasks: tasks in config.rules
:param task_queues: for crawler, the value is task_queue,while for validator, it's task name
"""
self.name = name
self.tasks = tasks
self.task_queues = list() if not task_queues else task_queues
def schedule_with_delay(self):
for task in self.tasks:
interval = task.get('interval')
schedule.every(interval).minutes.do(self.schedule_task_with_lock, task)
while True:
schedule.run_pending()
time.sleep(1)
def schedule_all_right_now(self):
with Pool() as pool:
pool.map(self.schedule_task_with_lock, self.tasks)
def get_lock(self, conn, task):
if not task.get('enable'):
return None
task_queue = task.get('task_queue')
if task_queue not in self.task_queues:
return None
task_name = task.get('name')
lock_indentifier = acquire_lock(conn, task_name)
return lock_indentifier
def schedule_task_with_lock(self, task):
raise NotImplementedError
class CrawlerScheduler(BaseScheduler):
def schedule_task_with_lock(self, task):
"""Crawler scheduler filters tasks according to task type"""
if not task.get('enable'):
return None
task_queue = task.get('task_queue')
if task_queue not in self.task_queues:
return None
conn = get_redis_conn()
task_name = task.get('name')
interval = task.get('interval')
urls = task.get('resource')
lock_indentifier = acquire_lock(conn, task_name)
if not lock_indentifier:
return False
pipe = conn.pipeline(True)
try:
now = int(time.time())
pipe.hget(TIMER_RECORDER, task_name)
r = pipe.execute()[0]
if not r or (now - int(r.decode('utf-8'))) >= interval * 60:
pipe.lpush(task_queue, *urls)
pipe.hset(TIMER_RECORDER, task_name, now)
pipe.execute()
# scheduler_logger.info('crawler task {} has been stored into redis successfully'.format(task_name))
return True
else:
return None
finally:
release_lock(conn, task_name, lock_indentifier)
class ValidatorScheduler(BaseScheduler):
def schedule_task_with_lock(self, task):
"""Validator scheduler filters tasks according to task name
since it's task name stands for task type"""
if not task.get('enable'):
return None
task_queue = task.get('task_queue')
if task_queue not in self.task_queues:
return None
conn = get_redis_conn()
interval = task.get('interval')
task_name = task.get('name')
resource_queue = task.get('resource')
lock_indentifier = acquire_lock(conn, task_name)
if not lock_indentifier:
return False
pipe = conn.pipeline(True)
try:
now = int(time.time())
pipe.hget(TIMER_RECORDER, task_name)
pipe.zrevrangebyscore(resource_queue, '+inf', '-inf')
r, proxies = pipe.execute()
if not r or (now - int(r.decode('utf-8'))) >= interval * 60:
if not proxies:
# scheduler_logger.warning('fetched no proxies from task {}'.format(task_name))
print('fetched no proxies from task {}'.format(task_name))
return None
pipe.sadd(task_queue, *proxies)
pipe.hset(TIMER_RECORDER, task_name, now)
pipe.execute()
# scheduler_logger.info('validator task {} has been stored into redis successfully'.format(task_name))
return True
else:
return None
finally:
release_lock(conn, task_name, lock_indentifier)
@click.command()
@click.option('--usage', type=click.Choice(['crawler', 'validator']), default='crawler')
@click.argument('task_queues', nargs=-1)
def scheduler_start(usage, task_queues):
"""Start specified scheduler."""
# scheduler_logger.info('{} scheduler is starting...'.format(usage))
print('{} scheduler is starting...'.format(usage))
default_tasks = CRAWLER_TASKS if usage == 'crawler' else VALIDATOR_TASKS
default_allow_tasks = DEFAULT_CRAWLER_TASKS if usage == 'crawler' else DEFAULT_VALIDATORS_TASKS
maps = CRAWLER_TASK_MAPS if usage == 'crawler' else TEMP_TASK_MAPS
SchedulerCls = CrawlerScheduler if usage == 'crawler' else ValidatorScheduler
scheduler = SchedulerCls(usage, default_tasks)
if not task_queues:
scheduler.task_queues = default_allow_tasks
else:
for task_queue in task_queues:
allow_task_queue = maps.get(task_queue)
if not allow_task_queue:
# scheduler_logger.warning('scheduler task {} is an invalid task, the allowed tasks are {}'.format(
# task_queue, list(maps.keys())))
print('scheduler task {} is an invalid task, the allowed tasks are {}'.format(
task_queue, list(maps.keys())))
continue
scheduler.task_queues.append(allow_task_queue)
scheduler.schedule_all_right_now()
scheduler.schedule_with_delay()
@click.command()
@click.option('--usage', type=click.Choice(['crawler', 'validator']), default='crawler')
@click.argument('tasks', nargs=-1)
def crawler_start(usage, tasks):
"""Start specified spiders or validators from cmd with scrapy core api.
There are four kinds of spiders: common, ajax, gfw, ajax_gfw.If you don't
assign any tasks, all the spiders will run.
"""
maps = CRAWLER_TASK_MAPS if usage == 'crawler' else TEMP_TASK_MAPS
origin_spiders = DEFAULT_CRAWLERS if usage == 'crawler' else DEFAULT_VALIDATORS
if not tasks:
spiders = origin_spiders
else:
spiders = list()
cases = list(map(BaseCase, origin_spiders))
for task in tasks:
for case in cases:
if case.check(task, maps):
spiders.append(case.spider)
break
else:
# crawler_logger.warning('spider task {} is an invalid task, the allowed tasks are {}'.format(
# task, list(maps.keys())))
pass
if not spiders:
#crawler_logger.warning('no spider starts up, please check your task input')
return
settings = get_project_settings()
configure_logging(settings)
runner = CrawlerRunner(settings)
for spider in spiders:
runner.crawl(spider)
d = runner.join()
d.addBoth(lambda _: reactor.stop())
reactor.run()
@click.command()
@click.option('--usage', default='https', help='Usage of squid')
@click.option('--interval', default=TTL_VALIDATED_RESOURCE, help='Updating frenquency of squid conf.')
def squid_conf_update(usage, interval):
"""Timertask for updating proxies for squid config file"""
# client_logger.info('the updating task is starting...')
client = SquidClient(usage)
client.update_conf()
schedule.every(interval).minutes.do(client.update_conf)
while True:
schedule.run_pending()
time.sleep(1) | 36.580247 | 118 | 0.646304 |
69c186171d0286a275656ad7492a32a289e83e47 | 794 | py | Python | setup.py | RockefellerArchiveCenter/ElectronBonder | eeb7d6fd8146687059c53cc1e6cc243368105d18 | [
"MIT"
] | null | null | null | setup.py | RockefellerArchiveCenter/ElectronBonder | eeb7d6fd8146687059c53cc1e6cc243368105d18 | [
"MIT"
] | 7 | 2020-04-28T22:06:59.000Z | 2021-11-18T16:00:33.000Z | setup.py | RockefellerArchiveCenter/ElectronBonder | eeb7d6fd8146687059c53cc1e6cc243368105d18 | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="ElectronBonder",
url="https://github.com/RockefellerArchiveCenter/ElectronBonder",
description="Project Electron Client Library",
long_description=long_description,
long_description_content_type="text/markdown",
author="Rockefeller Archive Center",
author_email="archive@rockarch.org",
version="1.0",
license='MIT',
packages=find_packages(),
zip_safe=False,
classifiers=[
'Programming Language :: Python :: 3',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: MIT License',
],
python_requires=">=2.7",
install_requires=[
"requests",
"six",
],
)
| 27.37931 | 69 | 0.657431 |
427e66192e9b157b95eed1956777cc1640b641ea | 3,664 | py | Python | src/mailman_rss/scraper.py | bluesabre/mailman-rss | 5f80d8ad6fc66fff64c38ba3e9e394c28f22f8a8 | [
"MIT"
] | 2 | 2019-05-02T21:35:35.000Z | 2020-07-28T05:34:11.000Z | src/mailman_rss/scraper.py | bluesabre/mailman-rss | 5f80d8ad6fc66fff64c38ba3e9e394c28f22f8a8 | [
"MIT"
] | null | null | null | src/mailman_rss/scraper.py | bluesabre/mailman-rss | 5f80d8ad6fc66fff64c38ba3e9e394c28f22f8a8 | [
"MIT"
] | 4 | 2018-03-28T17:41:00.000Z | 2020-07-04T09:33:04.000Z | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from mailman_rss.mailman import MailmanArchive
from collections import namedtuple
from contextlib import closing
from logging import getLogger
import os
from datetime import datetime
import time
logger = getLogger(__file__)
try:
import sqlite3
except ImportError as e:
logger.error(e)
class HeaderScraper(object):
""" Mailman archive header scraper with cache storage. """
def __init__(self, archive_url, db_path):
self.archive_url = archive_url
self.db_path = db_path
self._connect()
def __del__(self):
if self._conn:
self._conn.close()
def _connect(self):
self._conn = sqlite3.connect(
self.db_path,
detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES
)
sqlite3.dbapi2.converters['DATETIME'] = (
sqlite3.dbapi2.converters['TIMESTAMP'])
self._conn.row_factory = sqlite3.Row
with self._conn as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS headers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
author VARCHAR(64) NOT NULL,
subject VARCHAR(256) NOT NULL,
url VARCHAR(128) UNIQUE,
fetched_at DATETIME NOT NULL,
read_at DATETIME DEFAULT NULL
)""")
def fetch(self, max_items=10):
""" Fetch new posts from archive. """
archive = MailmanArchive(self.archive_url)
with self._conn as conn:
c = conn.cursor()
for index, header in enumerate(archive.iter_headers()):
if index >= max_items:
logger.info("Max fetches reached: {}".format(index))
break
c.execute("SELECT COUNT(*) as c FROM headers WHERE url = ?",
(header.url,))
if int(c.fetchone()[0]):
# Record already fetched.
logger.info("Last fetched URL: {}".format(header.url))
break
c.execute("INSERT INTO headers "
"(author, subject, url, fetched_at) "
"VALUES (?, ?, ?, ?)",
(header.author, header.subject, header.url,
datetime.now()))
def iter_unread(self, mark_unread=False):
""" Iterate over unread message headers. """
with self._conn as conn:
c = conn.cursor()
c.execute("""
SELECT * FROM headers
WHERE read_at IS NULL
ORDER BY fetched_at DESC
""")
for row in c.fetchall():
yield row
if mark_unread:
c.execute(
"UPDATE headers SET read_at = ? WHERE id = ?",
(datetime.now(), row[0]))
def iter_all(self, mark_unread=False):
""" Iterate over all headers. """
with self._conn as conn:
c = conn.cursor()
c.execute("SELECT * FROM headers")
for row in c.fetchall():
yield row
def count(self, unread=False):
""" Count the number of fetched headers. """
with self._conn as conn:
c = conn.cursor()
if unread:
c.execute(
"SELECT COUNT(*) as c FROM headers WHERE read_at IS NULL")
else:
c.execute("SELECT COUNT(*) FROM headers")
return int(c.fetchone()[0])
| 34.566038 | 78 | 0.525109 |
6f4385c6ae7fbe5e965768077ee555789b3bd0ad | 3,246 | py | Python | pandajedi/jeditest/addTestTask.py | PanDAWMS/panda-jedi | e4c90563b3b9e9521cb73ccdedaa8ecaa38af5ed | [
"Apache-2.0"
] | 2 | 2020-04-17T10:24:09.000Z | 2020-05-12T17:59:06.000Z | pandajedi/jeditest/addTestTask.py | PanDAWMS/panda-jedi | e4c90563b3b9e9521cb73ccdedaa8ecaa38af5ed | [
"Apache-2.0"
] | 20 | 2015-08-25T13:40:14.000Z | 2022-03-29T12:50:46.000Z | pandajedi/jeditest/addTestTask.py | PanDAWMS/panda-jedi | e4c90563b3b9e9521cb73ccdedaa8ecaa38af5ed | [
"Apache-2.0"
] | 10 | 2015-05-27T14:01:42.000Z | 2021-09-20T17:38:02.000Z | import sys
import uuid
from pandajedi.jedicore.JediTaskBufferInterface import JediTaskBufferInterface
from pandajedi.jedicore.JediTaskSpec import JediTaskSpec
from pandajedi.jedicore.JediDatasetSpec import JediDatasetSpec
tbIF = JediTaskBufferInterface()
tbIF.setupInterface()
task = JediTaskSpec()
task.jediTaskID = sys.argv[1]
task.taskName = 'pandatest.{0}'.format(uuid.uuid4())
task.status = 'defined'
task.userName = 'pandasrv1'
task.vo = 'atlas'
task.prodSourceLabel = 'managed'
task.taskPriority = 100
task.currentPriority = task.taskPriority
task.architecture = 'i686-slc5-gcc43-opt'
task.transUses = 'Atlas-17.2.7'
task.transHome = 'AtlasProduction-17.2.8.10'
task.transPath = 'Reco_trf.py'
task.workQueue_ID = 3
tbIF.insertTask_JEDI(task)
ds = JediDatasetSpec()
ds.jediTaskID = task.jediTaskID
if len(sys.argv) > 2:
ds.datasetName = sys.argv[2]
else:
ds.datasetName = 'data12_8TeV.00214651.physics_Egamma.merge.AOD.f489_m1261'
ds.type = 'input'
ds.vo = task.vo
ds.cloud = 'US'
ds.streamName = 'IN'
ds.status = 'defined'
ds.nFiles = 0
ds.nFilesUsed = 0
ds.nFilesFinished = 0
ds.nFilesFailed = 0
st,datasetID = tbIF.insertDataset_JEDI(ds)
ds = JediDatasetSpec()
ds.jediTaskID = task.jediTaskID
ds.datasetName = 'ddo.000001.Atlas.Ideal.DBRelease.v220701'
ds.type = 'input'
ds.vo = task.vo
ds.cloud = 'US'
ds.streamName = 'DBR'
ds.status = 'defined'
ds.nFiles = 0
ds.nFilesUsed = 0
ds.nFilesFinished = 0
ds.nFilesFailed = 0
ds.masterID = datasetID
ds.attributes = 'repeat,nosplit'
tbIF.insertDataset_JEDI(ds)
ds = JediDatasetSpec()
ds.jediTaskID = task.jediTaskID
ds.datasetName = 'panda.jeditest.GEN.{0}'.format(uuid.uuid4())
ds.type = 'output'
ds.vo = task.vo
ds.cloud = 'US'
ds.streamName = 'OUT'
ds.status = 'defined'
ds.nFiles = 0
ds.nFilesUsed = 0
ds.nFilesFinished = 0
ds.nFilesFailed = 0
st,datasetID = tbIF.insertDataset_JEDI(ds)
tbIF.insertOutputTemplate_JEDI([{'jediTaskID':task.jediTaskID,
'datasetID':datasetID,
'filenameTemplate':'{0}.${{SN}}.pool.root'.format(ds.datasetName),
'serialNr':1,
'streamName':'OUT',
'outtype':ds.type}])
ds = JediDatasetSpec()
ds.jediTaskID = task.jediTaskID
ds.datasetName = 'panda.jeditest.log.{0}'.format(uuid.uuid4())
ds.type = 'log'
ds.vo = task.vo
ds.cloud = 'US'
ds.streamName = 'LOG'
ds.status = 'defined'
ds.nFiles = 0
ds.nFilesUsed = 0
ds.nFilesFinished = 0
ds.nFilesFailed = 0
st,datasetID = tbIF.insertDataset_JEDI(ds)
tbIF.insertOutputTemplate_JEDI([{'jediTaskID':task.jediTaskID,
'datasetID':datasetID,
'filenameTemplate':'{0}.${{SN}}.log.tgz'.format(ds.datasetName),
'serialNr':1,
'streamName':'LOG',
'outtype':ds.type}])
tbIF.insertJobParamsTemplate_JEDI(task.jediTaskID,'inputAODFile=${IN} maxEvents=1000 RunNumber=213816 autoConfiguration=everything preExec="from BTagging.BTaggingFlags import BTaggingFlags;BTaggingFlags.CalibrationTag=\"BTagCalibALL-07-02\"" DBRelease=${DBR} AMITag=p1462 outputNTUP_EMBLLDNFile=${OUT}')
| 29.243243 | 303 | 0.677141 |
56e532ce3c140b1895613b837ae29fabf38b40c4 | 2,169 | py | Python | students/K33402/Barabanov Denis/lr2/lr2/settings.py | dEbAR38/ITMO_ICT_WebDevelopment_2020-2021 | 208cbc6d2b6d40c3043d35ce773a3433b377f671 | [
"MIT"
] | null | null | null | students/K33402/Barabanov Denis/lr2/lr2/settings.py | dEbAR38/ITMO_ICT_WebDevelopment_2020-2021 | 208cbc6d2b6d40c3043d35ce773a3433b377f671 | [
"MIT"
] | null | null | null | students/K33402/Barabanov Denis/lr2/lr2/settings.py | dEbAR38/ITMO_ICT_WebDevelopment_2020-2021 | 208cbc6d2b6d40c3043d35ce773a3433b377f671 | [
"MIT"
] | null | null | null | from pathlib import Path
import os
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = 'p=00&+=$@30lq%1qt@34&m%@k3vz(js_x8z)m&((qjk^&#(xpy'
DEBUG = True
ALLOWED_HOSTS = []
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'homework.CustomUser'
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'homework.apps.HwSystemConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'lr2.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lr2.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
| 25.821429 | 91 | 0.667589 |
a8f8cf2cd104abac5f1efc801eb892adeec0b298 | 3,753 | py | Python | source/python-vsmclient/vsmclient/v1/devices.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 172 | 2015-01-07T08:40:17.000Z | 2019-02-18T07:01:11.000Z | source/python-vsmclient/vsmclient/v1/devices.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 83 | 2015-03-06T07:47:03.000Z | 2018-07-05T15:10:19.000Z | source/python-vsmclient/vsmclient/v1/devices.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 125 | 2015-01-05T12:22:15.000Z | 2019-02-18T07:01:39.000Z | # Copyright 2014 Intel Corporation, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Devices interface.
"""
import urllib
from vsmclient import base
class Device(base.Resource):
"""A device is a disk on server for osd as data or journal."""
def __repr__(self):
return "<Device: %s>" % self.id
class DeviceManager(base.ManagerWithFind):
"""
Manage :class:`Device` resources.
"""
resource_class = Device
def get(self, device_id):
"""
Get a device.
:param device_id: The ID of the device.
:rtype: :class:`Device`
"""
return self._get("/devices/%s" % device_id, "device")
def list(self, detailed=False, search_opts=None):
"""
Get a list of all devices.
:rtype: list of :class:`Device`
"""
if search_opts is None:
search_opts = {}
qparams = {}
for opt, val in search_opts.iteritems():
if val:
qparams[opt] = val
query_string = "?%s" % urllib.urlencode(qparams) if qparams else ""
detail = ""
if detailed:
detail = "/detail"
ret = self._list("/devices%s%s" % (detail, query_string),
"devices")
return ret
def get_available_disks(self, search_opts=None):
"""
Get a list of available disks
"""
if search_opts is None:
search_opts = {}
qparams = {}
for opt, val in search_opts.iteritems():
if val:
qparams[opt] = val
query_string = "?%s" % urllib.urlencode(qparams) if qparams else ""
resp, body = self.api.client.get("/devices/get_available_disks%s" % (query_string))
body = body.get("available_disks")
result_mode = search_opts.get('result_mode')
if result_mode == 'get_disks':
return {'disks': body}
ret = {"ret":1}
message = []
paths = search_opts.get("path")
disks = []
for disk in body:
disk_name = disk.get('disk_name','')
by_path = disk.get('by_path','')
by_uuid = disk.get('by_uuid','')
if disk_name:
disks.append(disk_name)
if by_path:
disks.append(by_path)
if by_uuid:
disks.append(by_uuid)
if paths:
unaviable_paths = [path for path in paths if path not in disks]
if unaviable_paths:
message.append('There is no %s '%(','.join(unaviable_paths)))
if message:
ret = {"ret":0,'message':'.'.join(message)}
return ret
def get_smart_info(self, search_opts=None):
"""
Get a dict of smart info
"""
if search_opts is None:
search_opts = {}
qparams = {}
for opt, val in search_opts.iteritems():
if val:
qparams[opt] = val
query_string = "?%s" % urllib.urlencode(qparams) if qparams else ""
resp, body = self.api.client.get("/devices/get_smart_info%s" % (query_string))
smart_info = body.get("smart_info")
return smart_info | 30.024 | 91 | 0.565414 |
97fbb59fce64a0232259b2c03d8c8118ce376ef1 | 264 | py | Python | backend/base/serializers.py | slk007/Ecommerce-django | f588a8d9b69fd7e16624b9c4fc9d2713ae47ac03 | [
"MIT"
] | null | null | null | backend/base/serializers.py | slk007/Ecommerce-django | f588a8d9b69fd7e16624b9c4fc9d2713ae47ac03 | [
"MIT"
] | null | null | null | backend/base/serializers.py | slk007/Ecommerce-django | f588a8d9b69fd7e16624b9c4fc9d2713ae47ac03 | [
"MIT"
] | null | null | null | from dataclasses import field
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Product
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = "__all__"
| 24 | 53 | 0.768939 |
6643e910916db9c0d89244834e5b9207bbc84e1a | 3,783 | bzl | Python | repositories.bzl | rdesgroppes/rules_jvm | efa25bf3082e2cac752081b2869d5149dfb7133a | [
"Apache-2.0"
] | 8 | 2021-12-13T10:22:03.000Z | 2022-03-31T22:40:48.000Z | repositories.bzl | rdesgroppes/rules_jvm | efa25bf3082e2cac752081b2869d5149dfb7133a | [
"Apache-2.0"
] | 8 | 2021-12-17T12:41:54.000Z | 2022-03-28T13:41:12.000Z | repositories.bzl | rdesgroppes/rules_jvm | efa25bf3082e2cac752081b2869d5149dfb7133a | [
"Apache-2.0"
] | 5 | 2021-12-13T09:50:32.000Z | 2022-03-25T21:49:33.000Z | load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("//java/private:zip_repository.bzl", "zip_repository")
def contrib_rules_jvm_deps():
maybe(
http_archive,
name = "apple_rules_lint",
sha256 = "8feab4b08a958b10cb2abb7f516652cd770b582b36af6477884b3bba1f2f0726",
strip_prefix = "apple_rules_lint-0.1.1",
url = "https://github.com/apple/apple_rules_lint/archive/0.1.1.zip",
)
maybe(
http_archive,
name = "io_bazel_stardoc",
sha256 = "c9794dcc8026a30ff67cf7cf91ebe245ca294b20b071845d12c192afe243ad72",
url = "https://github.com/bazelbuild/stardoc/releases/download/0.5.0/stardoc-0.5.0.tar.gz",
)
maybe(
http_archive,
name = "bazel_skylib",
sha256 = "07b4117379dde7ab382345c3b0f5edfc6b7cff6c93756eac63da121e0bbcc5de",
strip_prefix = "bazel-skylib-1.1.1",
url = "https://github.com/bazelbuild/bazel-skylib/archive/1.1.1.tar.gz",
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "c6003e1d2e7fefa78a3039f19f383b4f3a61e81be8c19356f85b6461998ad3db",
strip_prefix = "protobuf-3.17.3",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.17.3.tar.gz"],
)
maybe(
zip_repository,
name = "contrib_rules_jvm_deps",
path = "@contrib_rules_jvm//java/private:contrib_rules_jvm_deps.zip",
)
maybe(
http_archive,
name = "io_grpc_grpc_java",
sha256 = "e3781bcab2a410a7cd138f13b2e6a643e111575f6811b154c570f4d020e87507",
strip_prefix = "grpc-java-1.44.0",
urls = ["https://github.com/grpc/grpc-java/archive/v1.44.0.tar.gz"],
)
maybe(
http_archive,
name = "rules_jvm_external",
sha256 = "cd1a77b7b02e8e008439ca76fd34f5b07aecb8c752961f9640dea15e9e5ba1ca",
strip_prefix = "rules_jvm_external-4.2",
url = "https://github.com/bazelbuild/rules_jvm_external/archive/4.2.zip",
patches = [
"@contrib_rules_jvm//java/private:make-docs-visible.patch",
],
patch_args = ["-p1"],
)
def contrib_rules_jvm_gazelle_deps():
maybe(
http_archive,
name = "bazel_gazelle",
sha256 = "dae13a7c6adb742174aafd340ebcb36016de231bd4f926f79c140c7d9b599fb0",
strip_prefix = "bazel-gazelle-757e291d1befe9174fb1fcf0d9ade733cbb6b904",
urls = [
"https://github.com/bazelbuild/bazel-gazelle/archive/757e291d1befe9174fb1fcf0d9ade733cbb6b904.zip",
],
)
maybe(
http_archive,
name = "com_github_bazelbuild_buildtools",
sha256 = "d368c47bbfc055010f118efb2962987475418737e901f7782d2a966d1dc80296",
strip_prefix = "buildtools-4.2.5",
url = "https://github.com/bazelbuild/buildtools/archive/4.2.5.tar.gz",
)
maybe(
http_archive,
name = "io_bazel_rules_go",
sha256 = "f2dcd210c7095febe54b804bb1cd3a58fe8435a909db2ec04e31542631cf715c",
urls = [
"https://github.com/bazelbuild/rules_go/releases/download/v0.31.0/rules_go-v0.31.0.zip",
],
)
maybe(
http_archive,
name = "io_grpc_grpc_java",
sha256 = "e3781bcab2a410a7cd138f13b2e6a643e111575f6811b154c570f4d020e87507",
strip_prefix = "grpc-java-1.44.0",
urls = ["https://github.com/grpc/grpc-java/archive/v1.44.0.tar.gz"],
)
maybe(
http_archive,
name = "rules_proto",
sha256 = "66bfdf8782796239d3875d37e7de19b1d94301e8972b3cbd2446b332429b4df1",
strip_prefix = "rules_proto-4.0.0",
url = "https://github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz",
)
| 36.028571 | 111 | 0.659529 |
3522799f6f03fc4836f689264c061a70c588957a | 1,054 | py | Python | setup.py | delpass/aiokurento | eddbd908c24ea049db4a9f04a0e2c96c74ccd288 | [
"MIT"
] | 2 | 2016-08-20T15:07:07.000Z | 2017-03-28T08:33:13.000Z | setup.py | delpass/aiokurento | eddbd908c24ea049db4a9f04a0e2c96c74ccd288 | [
"MIT"
] | null | null | null | setup.py | delpass/aiokurento | eddbd908c24ea049db4a9f04a0e2c96c74ccd288 | [
"MIT"
] | null | null | null | # import os
from setuptools import setup
setup(
name="aiokurento",
version="0.1.0",
author="Yaroslav Sazonov",
author_email="delpass@gmail.com",
description="KMS driver for AsyncIO",
license="MIT",
keywords="kms kurento aio asyncio ",
url="http://packages.python.org/aiokurento",
packages=['kurento'],
long_description=open('README.md').read(),
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
#'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
'Development Status :: 3 - Alpha',
# 'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=['aiohttp']
)
| 32.9375 | 56 | 0.602467 |
a4778d8810c06a8426c694c102a3527a5dbe5e68 | 2,991 | py | Python | multinet/processing/__init__.py | multinet-app/multinet-girder | f34c87849d92c7fe2f8589760f97bebbe04bd4af | [
"Apache-2.0"
] | 3 | 2019-10-22T15:21:10.000Z | 2020-02-13T17:40:07.000Z | multinet/processing/__init__.py | multinet-app/multinet | f34c87849d92c7fe2f8589760f97bebbe04bd4af | [
"Apache-2.0"
] | 183 | 2019-08-01T14:27:00.000Z | 2020-03-04T17:47:49.000Z | multinet/processing/__init__.py | multinet-app/multinet-girder | f34c87849d92c7fe2f8589760f97bebbe04bd4af | [
"Apache-2.0"
] | 2 | 2020-08-20T11:57:17.000Z | 2020-11-10T22:54:19.000Z | """Functions for processing multinet data."""
from multinet.validation import ValidationFailure, TypeConversionFailure, MissingColumn
from multinet.processing.processors import (
process_number_entry,
process_boolean_entry,
process_date_entry,
)
from multinet.types import ColumnMetadata
from multinet.processing.types import (
UnprocessedTableRow,
ProcessedTableRow,
TableRowEntryProcessor,
)
# Import types
from typing import List, Dict, Tuple
# Maps types to the functions responsible for processing their entries
entry_processing_dict: Dict[str, TableRowEntryProcessor] = {
"number": process_number_entry,
"boolean": process_boolean_entry,
"date": process_date_entry,
}
def process_row(
row_index: int, row: UnprocessedTableRow, columns: List[ColumnMetadata]
) -> Tuple[ProcessedTableRow, List[ValidationFailure]]:
"""Process a single row, returning the processed row, and any errors."""
validation_errors: List[ValidationFailure] = []
# Copy row
new_row: ProcessedTableRow = dict(row)
for col in columns:
entry = row.get(col.key)
# If any of the following conditions are met, skip processing the entry
if entry is None:
validation_errors.append(MissingColumn(key=col.key))
continue
process_entry = entry_processing_dict.get(col.type)
if process_entry is None:
# This happens on any type not defined in `entry_processing_dict`
# E.g. label, category
continue
# Attempt normal entry processing
try:
# If the entry is an empty string, replace with None (null)
processed_entry = process_entry(entry) if entry else None
# Processing successful, update row entry with its new value
new_row[col.key] = processed_entry
except ValueError:
# Error in processing entry. Add error, leave entry unchanged
validation_errors.append(
TypeConversionFailure(
message=f"Cannot convert entry '{entry}' to type: {col.type}",
row=row_index,
column=col.key,
)
)
return (new_row, validation_errors)
def process_rows(
initial_rows: List[UnprocessedTableRow], col_metadata: List[ColumnMetadata]
) -> Tuple[List[ProcessedTableRow], List[ValidationFailure]]:
"""Perform any processing of table rows with the supplied metadata."""
if not col_metadata or not initial_rows:
# Copy rows to ensure consistent behavior, no change applied
rows: List[ProcessedTableRow] = [dict(row) for row in initial_rows]
return (rows, [])
rows = []
validation_errors: List[ValidationFailure] = []
for i, init_row in enumerate(initial_rows):
row, errors = process_row(i, init_row, col_metadata)
rows.append(row)
validation_errors.extend(errors)
return (rows, validation_errors)
| 33.606742 | 87 | 0.676697 |
493604932f7991ff7e87c512ff183fd0e1d339ab | 609 | py | Python | setup.py | atmelaku/TextAdvnetureGameApp | c917863f5c4d6c5147eeb74aa594a915bd939d19 | [
"MIT"
] | null | null | null | setup.py | atmelaku/TextAdvnetureGameApp | c917863f5c4d6c5147eeb74aa594a915bd939d19 | [
"MIT"
] | 1 | 2020-03-29T20:49:37.000Z | 2020-03-29T20:49:37.000Z | setup.py | atmelaku/TextAdvnetureGameApp | c917863f5c4d6c5147eeb74aa594a915bd939d19 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as f:
long_description = f.read()
setuptools.setup(
name="TextAdventureGameApp"
version="0.0.1",
author="Alebachew Melaku",
author_email="alebachewtegegne2017@yahoo.com",
url="https://github.com/yourusername/yourproject",
description="simple math game",
long_description=The provides some math questions to test our knowledge,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
extras_require=[],
tests_require=['pytest'],
python_requires='>=3.6',
)
| 27.681818 | 76 | 0.70936 |
0ebda6ee99e131aedf6612d9f59cff63242d7eef | 17,354 | py | Python | mercurial/hgweb/hgweb_mod.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | mercurial/hgweb/hgweb_mod.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | mercurial/hgweb/hgweb_mod.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | # hgweb/hgweb_mod.py - Web interface for a repository.
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import contextlib
import os
from .common import (
ErrorResponse,
HTTP_BAD_REQUEST,
HTTP_NOT_FOUND,
HTTP_NOT_MODIFIED,
HTTP_OK,
HTTP_SERVER_ERROR,
caching,
cspvalues,
permhooks,
)
from .request import wsgirequest
from .. import (
encoding,
error,
hg,
hook,
profiling,
repoview,
templatefilters,
templater,
ui as uimod,
util,
)
from . import (
protocol,
webcommands,
webutil,
wsgicgi,
)
perms = {
'changegroup': 'pull',
'changegroupsubset': 'pull',
'getbundle': 'pull',
'stream_out': 'pull',
'listkeys': 'pull',
'unbundle': 'push',
'pushkey': 'push',
}
archivespecs = util.sortdict((
('zip', ('application/zip', 'zip', '.zip', None)),
('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)),
('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)),
))
def makebreadcrumb(url, prefix=''):
'''Return a 'URL breadcrumb' list
A 'URL breadcrumb' is a list of URL-name pairs,
corresponding to each of the path items on a URL.
This can be used to create path navigation entries.
'''
if url.endswith('/'):
url = url[:-1]
if prefix:
url = '/' + prefix + url
relpath = url
if relpath.startswith('/'):
relpath = relpath[1:]
breadcrumb = []
urlel = url
pathitems = [''] + relpath.split('/')
for pathel in reversed(pathitems):
if not pathel or not urlel:
break
breadcrumb.append({'url': urlel, 'name': pathel})
urlel = os.path.dirname(urlel)
return reversed(breadcrumb)
class requestcontext(object):
"""Holds state/context for an individual request.
Servers can be multi-threaded. Holding state on the WSGI application
is prone to race conditions. Instances of this class exist to hold
mutable and race-free state for requests.
"""
def __init__(self, app, repo):
self.repo = repo
self.reponame = app.reponame
self.archivespecs = archivespecs
self.maxchanges = self.configint('web', 'maxchanges', 10)
self.stripecount = self.configint('web', 'stripes', 1)
self.maxshortchanges = self.configint('web', 'maxshortchanges', 60)
self.maxfiles = self.configint('web', 'maxfiles', 10)
self.allowpull = self.configbool('web', 'allowpull', True)
# we use untrusted=False to prevent a repo owner from using
# web.templates in .hg/hgrc to get access to any file readable
# by the user running the CGI script
self.templatepath = self.config('web', 'templates', untrusted=False)
# This object is more expensive to build than simple config values.
# It is shared across requests. The app will replace the object
# if it is updated. Since this is a reference and nothing should
# modify the underlying object, it should be constant for the lifetime
# of the request.
self.websubtable = app.websubtable
self.csp, self.nonce = cspvalues(self.repo.ui)
# Trust the settings from the .hg/hgrc files by default.
def config(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.config(section, name, default,
untrusted=untrusted)
def configbool(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configbool(section, name, default,
untrusted=untrusted)
def configint(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configint(section, name, default,
untrusted=untrusted)
def configlist(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configlist(section, name, default,
untrusted=untrusted)
def archivelist(self, nodeid):
allowed = self.configlist('web', 'allow_archive')
for typ, spec in self.archivespecs.items():
if typ in allowed or self.configbool('web', 'allow%s' % typ):
yield {'type': typ, 'extension': spec[2], 'node': nodeid}
def templater(self, req):
# determine scheme, port and server name
# this is needed to create absolute urls
proto = req.env.get('wsgi.url_scheme')
if proto == 'https':
proto = 'https'
default_port = '443'
else:
proto = 'http'
default_port = '80'
port = req.env['SERVER_PORT']
port = port != default_port and (':' + port) or ''
urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/')
logoimg = self.config('web', 'logoimg', 'hglogo.png')
staticurl = self.config('web', 'staticurl') or req.url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
# some functions for the templater
def motd(**map):
yield self.config('web', 'motd', '')
# figure out which style to use
vars = {}
styles = (
req.form.get('style', [None])[0],
self.config('web', 'style'),
'paper',
)
style, mapfile = templater.stylemap(styles, self.templatepath)
if style == styles[0]:
vars['style'] = style
start = req.url[-1] == '?' and '&' or '?'
sessionvars = webutil.sessionvars(vars, start)
if not self.reponame:
self.reponame = (self.config('web', 'name')
or req.env.get('REPO_NAME')
or req.url.strip('/') or self.repo.root)
def websubfilter(text):
return templatefilters.websub(text, self.websubtable)
# create the templater
defaults = {
'url': req.url,
'logourl': logourl,
'logoimg': logoimg,
'staticurl': staticurl,
'urlbase': urlbase,
'repo': self.reponame,
'encoding': encoding.encoding,
'motd': motd,
'sessionvars': sessionvars,
'pathdef': makebreadcrumb(req.url),
'style': style,
'nonce': self.nonce,
}
tmpl = templater.templater.frommapfile(mapfile,
filters={'websub': websubfilter},
defaults=defaults)
return tmpl
class hgweb(object):
"""HTTP server for individual repositories.
Instances of this class serve HTTP responses for a particular
repository.
Instances are typically used as WSGI applications.
Some servers are multi-threaded. On these servers, there may
be multiple active threads inside __call__.
"""
def __init__(self, repo, name=None, baseui=None):
if isinstance(repo, str):
if baseui:
u = baseui.copy()
else:
u = uimod.ui.load()
r = hg.repository(u, repo)
else:
# we trust caller to give us a private copy
r = repo
r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
r.ui.setconfig('ui', 'nontty', 'true', 'hgweb')
r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb')
# resolve file patterns relative to repo root
r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
# displaying bundling progress bar while serving feel wrong and may
# break some wsgi implementation.
r.ui.setconfig('progress', 'disable', 'true', 'hgweb')
r.baseui.setconfig('progress', 'disable', 'true', 'hgweb')
self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
self._lastrepo = self._repos[0]
hook.redirect(True)
self.reponame = name
def _webifyrepo(self, repo):
repo = getwebview(repo)
self.websubtable = webutil.getwebsubs(repo)
return repo
@contextlib.contextmanager
def _obtainrepo(self):
"""Obtain a repo unique to the caller.
Internally we maintain a stack of cachedlocalrepo instances
to be handed out. If one is available, we pop it and return it,
ensuring it is up to date in the process. If one is not available,
we clone the most recently used repo instance and return it.
It is currently possible for the stack to grow without bounds
if the server allows infinite threads. However, servers should
have a thread limit, thus establishing our limit.
"""
if self._repos:
cached = self._repos.pop()
r, created = cached.fetch()
else:
cached = self._lastrepo.copy()
r, created = cached.fetch()
if created:
r = self._webifyrepo(r)
self._lastrepo = cached
self.mtime = cached.mtime
try:
yield r
finally:
self._repos.append(cached)
def run(self):
"""Start a server from CGI environment.
Modern servers should be using WSGI and should avoid this
method, if possible.
"""
if not encoding.environ.get('GATEWAY_INTERFACE',
'').startswith("CGI/1."):
raise RuntimeError("This function is only intended to be "
"called while running as a CGI script.")
wsgicgi.launch(self)
def __call__(self, env, respond):
"""Run the WSGI application.
This may be called by multiple threads.
"""
req = wsgirequest(env, respond)
return self.run_wsgi(req)
def run_wsgi(self, req):
"""Internal method to run the WSGI application.
This is typically only called by Mercurial. External consumers
should be using instances of this class as the WSGI application.
"""
with self._obtainrepo() as repo:
profile = repo.ui.configbool('profiling', 'enabled')
with profiling.profile(repo.ui, enabled=profile):
for r in self._runwsgi(req, repo):
yield r
def _runwsgi(self, req, repo):
rctx = requestcontext(self, repo)
# This state is global across all threads.
encoding.encoding = rctx.config('web', 'encoding', encoding.encoding)
rctx.repo.ui.environ = req.env
if rctx.csp:
# hgwebdir may have added CSP header. Since we generate our own,
# replace it.
req.headers = [h for h in req.headers
if h[0] != 'Content-Security-Policy']
req.headers.append(('Content-Security-Policy', rctx.csp))
# work with CGI variables to create coherent structure
# use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
req.url = req.env['SCRIPT_NAME']
if not req.url.endswith('/'):
req.url += '/'
if req.env.get('REPO_NAME'):
req.url += req.env['REPO_NAME'] + '/'
if 'PATH_INFO' in req.env:
parts = req.env['PATH_INFO'].strip('/').split('/')
repo_parts = req.env.get('REPO_NAME', '').split('/')
if parts[:len(repo_parts)] == repo_parts:
parts = parts[len(repo_parts):]
query = '/'.join(parts)
else:
query = req.env['QUERY_STRING'].partition('&')[0]
query = query.partition(';')[0]
# process this if it's a protocol request
# protocol bits don't need to create any URLs
# and the clients always use the old URL structure
cmd = req.form.get('cmd', [''])[0]
if protocol.iscmd(cmd):
try:
if query:
raise ErrorResponse(HTTP_NOT_FOUND)
if cmd in perms:
self.check_perm(rctx, req, perms[cmd])
return protocol.call(rctx.repo, req, cmd)
except ErrorResponse as inst:
# A client that sends unbundle without 100-continue will
# break if we respond early.
if (cmd == 'unbundle' and
(req.env.get('HTTP_EXPECT',
'').lower() != '100-continue') or
req.env.get('X-HgHttp2', '')):
req.drain()
else:
req.headers.append(('Connection', 'Close'))
req.respond(inst, protocol.HGTYPE,
body='0\n%s\n' % inst)
return ''
# translate user-visible url structure to internal structure
args = query.split('/', 2)
if 'cmd' not in req.form and args and args[0]:
cmd = args.pop(0)
style = cmd.rfind('-')
if style != -1:
req.form['style'] = [cmd[:style]]
cmd = cmd[style + 1:]
# avoid accepting e.g. style parameter as command
if util.safehasattr(webcommands, cmd):
req.form['cmd'] = [cmd]
if cmd == 'static':
req.form['file'] = ['/'.join(args)]
else:
if args and args[0]:
node = args.pop(0).replace('%2F', '/')
req.form['node'] = [node]
if args:
req.form['file'] = args
ua = req.env.get('HTTP_USER_AGENT', '')
if cmd == 'rev' and 'mercurial' in ua:
req.form['style'] = ['raw']
if cmd == 'archive':
fn = req.form['node'][0]
for type_, spec in rctx.archivespecs.items():
ext = spec[2]
if fn.endswith(ext):
req.form['node'] = [fn[:-len(ext)]]
req.form['type'] = [type_]
# process the web interface request
try:
tmpl = rctx.templater(req)
ctype = tmpl('mimetype', encoding=encoding.encoding)
ctype = templater.stringify(ctype)
# check read permissions non-static content
if cmd != 'static':
self.check_perm(rctx, req, None)
if cmd == '':
req.form['cmd'] = [tmpl.cache['default']]
cmd = req.form['cmd'][0]
# Don't enable caching if using a CSP nonce because then it wouldn't
# be a nonce.
if rctx.configbool('web', 'cache', True) and not rctx.nonce:
caching(self, req) # sets ETag header or raises NOT_MODIFIED
if cmd not in webcommands.__all__:
msg = 'no such method: %s' % cmd
raise ErrorResponse(HTTP_BAD_REQUEST, msg)
elif cmd == 'file' and 'raw' in req.form.get('style', []):
rctx.ctype = ctype
content = webcommands.rawfile(rctx, req, tmpl)
else:
content = getattr(webcommands, cmd)(rctx, req, tmpl)
req.respond(HTTP_OK, ctype)
return content
except (error.LookupError, error.RepoLookupError) as err:
req.respond(HTTP_NOT_FOUND, ctype)
msg = str(err)
if (util.safehasattr(err, 'name') and
not isinstance(err, error.ManifestLookupError)):
msg = 'revision not found: %s' % err.name
return tmpl('error', error=msg)
except (error.RepoError, error.RevlogError) as inst:
req.respond(HTTP_SERVER_ERROR, ctype)
return tmpl('error', error=str(inst))
except ErrorResponse as inst:
req.respond(inst, ctype)
if inst.code == HTTP_NOT_MODIFIED:
# Not allowed to return a body on a 304
return ['']
return tmpl('error', error=str(inst))
def check_perm(self, rctx, req, op):
for permhook in permhooks:
permhook(rctx, req, op)
def getwebview(repo):
"""The 'web.view' config controls changeset filter to hgweb. Possible
values are ``served``, ``visible`` and ``all``. Default is ``served``.
The ``served`` filter only shows changesets that can be pulled from the
hgweb instance. The``visible`` filter includes secret changesets but
still excludes "hidden" one.
See the repoview module for details.
The option has been around undocumented since Mercurial 2.5, but no
user ever asked about it. So we better keep it undocumented for now."""
viewconfig = repo.ui.config('web', 'view', 'served',
untrusted=True)
if viewconfig == 'all':
return repo.unfiltered()
elif viewconfig in repoview.filtertable:
return repo.filtered(viewconfig)
else:
return repo.filtered('served')
| 35.781443 | 80 | 0.560044 |
bc0d5847af177216bd7b5995a5c71323ba83a4d6 | 158 | py | Python | yatube/context_processor.py | Venatorr/yatube | 87dbb67fc9157994bde2765cc5d42fcf55cea308 | [
"BSD-3-Clause"
] | null | null | null | yatube/context_processor.py | Venatorr/yatube | 87dbb67fc9157994bde2765cc5d42fcf55cea308 | [
"BSD-3-Clause"
] | 10 | 2021-03-19T12:08:00.000Z | 2022-03-12T00:48:42.000Z | yatube/context_processor.py | Venatorr/django_blog | 87dbb67fc9157994bde2765cc5d42fcf55cea308 | [
"BSD-3-Clause"
] | null | null | null | import datetime as dt
def year(request):
"""
Добавляет переменную с текущим годом.
"""
return {
"year": dt.datetime.now().year
} | 15.8 | 41 | 0.575949 |
c130a51666e905ab0901eb68b649d4c75412567f | 742 | py | Python | bgmapi/spiders/user.py | wattlebird/Bangumi_Spider | f221614714e9f257c1d667fa5a7f1fcb43587371 | [
"BSD-2-Clause"
] | 19 | 2015-01-28T08:09:34.000Z | 2021-12-18T05:40:52.000Z | bgmapi/spiders/user.py | wattlebird/Bangumi_Spider | f221614714e9f257c1d667fa5a7f1fcb43587371 | [
"BSD-2-Clause"
] | null | null | null | bgmapi/spiders/user.py | wattlebird/Bangumi_Spider | f221614714e9f257c1d667fa5a7f1fcb43587371 | [
"BSD-2-Clause"
] | 7 | 2015-06-16T06:32:11.000Z | 2021-11-02T11:40:03.000Z | # -*- coding: utf-8 -*-
import scrapy
import json
from bgmapi.items import User
class UserSpider(scrapy.Spider):
name = 'user-api'
allowed_domains = ['http://mirror.api.bgm.rin.cat/user']
def __init__(self, id_min=1, id_max=300000, *args, **kwargs):
super(UserSpider, self).__init__(*args, **kwargs)
self.start_urls = ["http://mirror.api.bgm.rin.cat/user/{0}".format(i) for i in range(int(id_min),int(id_max))]
def parse(self, response):
data = json.loads(response.body_as_unicode())
if 'error' in data:
return
yield User(
name=data['username'],
nickname=data['nickname'],
uid=data['id'],
group=data['usergroup']
) | 30.916667 | 118 | 0.59434 |
6eb794984ff17f26b4139ff1812077d9f4ff865e | 3,153 | py | Python | caldavclientlibrary/admin/xmlaccounts/commands/changepassword.py | skarra/CalDAVClientLibrary | b925efdeee7aba581586a15544af12f399c59c9d | [
"Apache-2.0"
] | 1 | 2015-05-26T04:15:19.000Z | 2015-05-26T04:15:19.000Z | caldavclientlibrary/admin/xmlaccounts/commands/changepassword.py | skarra/CalDAVClientLibrary | b925efdeee7aba581586a15544af12f399c59c9d | [
"Apache-2.0"
] | null | null | null | caldavclientlibrary/admin/xmlaccounts/commands/changepassword.py | skarra/CalDAVClientLibrary | b925efdeee7aba581586a15544af12f399c59c9d | [
"Apache-2.0"
] | null | null | null | ##
# Copyright (c) 2007-2013 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from caldavclientlibrary.admin.xmlaccounts.commands.command import Command
import getopt
class ChangePassword(Command):
"""
Command to change the password of an existing directory record.
"""
CMDNAME = "passwd"
def __init__(self):
super(ChangePassword, self).__init__(self.CMDNAME, "Change the password for a record.")
self.uid = None
def usage(self):
print """USAGE: %s TYPE [OPTIONS]
TYPE: One of "users", "groups", "locations" or "resources". Also,
"u", "g", "l" or "r" as shortcuts.
Options:
-f file path to accounts.xml
--uid UID of record to change
""" % (self.cmdname,)
def execute(self, argv):
"""
Execute the command specified by the command line arguments.
@param argv: command line arguments.
@type argv: C{list}
@return: 1 for success, 0 for failure.
@rtype: C{int}
"""
# Check first argument for type
argv = self.getTypeArgument(argv)
if argv is None:
return 0
opts, args = getopt.getopt(argv, 'f:h', ["help", "uid=", ])
for name, value in opts:
if name == "-f":
self.path = value
elif name in ("-h", "--help"):
self.usage()
return 1
elif name == "--uid":
self.uid = value
else:
print "Unknown option: %s." % (name,)
self.usage()
return 0
if not self.path:
print "Must specify a path."
self.usage()
return 0
if not self.uid:
print "Must specify a UID."
self.usage()
return 0
if args:
print "Arguments not allowed."
self.usage()
return 0
if not self.loadAccounts():
return 0
return self.doCommand()
def doCommand(self):
"""
Run the command.
"""
if self.doChangePassword():
return self.writeAccounts()
return 0
def doChangePassword(self):
"""
Prompts the user for details and then changes the password of a record in the directory.
"""
# First check record exists
record = self.directory.getRecord(self.recordType, self.uid)
if record is None:
print "No '%s' record matching uid '%s'" % (self.recordType, self.uid,)
return 0
record.password = self.promptPassword()
return 1
| 27.417391 | 96 | 0.575325 |
07e05b5fa761b037859a34127eaac4568589a38b | 718 | py | Python | fastats/linear_algebra/inv.py | AaronCritchley/fastats | 3c8e50fffc08ee0dd44c615f4ed93adc47c08ffe | [
"MIT"
] | 26 | 2017-07-17T09:19:53.000Z | 2021-11-30T01:36:56.000Z | fastats/linear_algebra/inv.py | AaronCritchley/fastats | 3c8e50fffc08ee0dd44c615f4ed93adc47c08ffe | [
"MIT"
] | 320 | 2017-09-02T16:26:25.000Z | 2021-07-28T05:19:49.000Z | fastats/linear_algebra/inv.py | AaronCritchley/fastats | 3c8e50fffc08ee0dd44c615f4ed93adc47c08ffe | [
"MIT"
] | 13 | 2017-07-06T19:02:29.000Z | 2020-01-22T11:36:34.000Z |
import numpy as np
from fastats.linear_algebra.det import det
from fastats.linear_algebra.matrix_minor import matrix_minor
def inv(A):
"""
Returns the inverse of A using the adjoint method.
>>> import numpy as np
>>> A = np.array([[4, 3], [3, 2]])
>>> A_inv = inv(A)
>>> A_inv
array([[-2., 3.],
[ 3., -4.]])
"""
m, n = A.shape
co_factors = np.empty_like(A, dtype=np.float64)
for i in range(n):
for j in range(m):
minor = matrix_minor(A, i, j)
co_factors[i, j] = ((-1) ** (i + j)) * det(minor)
adjoint = co_factors.T
return adjoint / det(A)
if __name__ == '__main__':
import pytest
pytest.main([__file__])
| 21.117647 | 61 | 0.557103 |
3165e86995f2bbce800bcfcc29862de91d542b20 | 1,817 | py | Python | pages/product_page.py | varulven/selenium_course | 63d512ef7fa3a3e152cef8440eddfdc971740df8 | [
"Apache-2.0"
] | null | null | null | pages/product_page.py | varulven/selenium_course | 63d512ef7fa3a3e152cef8440eddfdc971740df8 | [
"Apache-2.0"
] | 1 | 2021-06-02T01:33:42.000Z | 2021-06-02T01:33:42.000Z | pages/product_page.py | varulven/selenium_course | 63d512ef7fa3a3e152cef8440eddfdc971740df8 | [
"Apache-2.0"
] | null | null | null | from .base_page import BasePage
from .locators import ProductPageLocators
class ProductPage(BasePage):
def add_to_cart(self):
button = self.browser.find_element(*ProductPageLocators.ADD_TO_BASKET)
button.click()
def should_be_message_about_add_to_cart(self):
assert self.is_element_present(*ProductPageLocators.MESSAGE_ABOUT_ITEM), "Message about item in cart is not " \
"presented "
def should_be_message_about_price(self):
assert self.is_element_present(*ProductPageLocators.MESSAGE_PRICE), "Message about price in cart is not " \
"presented "
def text_in_message_about_add_to_cart(self):
item_name = self.browser.find_element(*ProductPageLocators.ITEM_NAME).text
item_name_in_message = self.browser.find_element(*ProductPageLocators.MESSAGE_ABOUT_ITEM).text
assert item_name == item_name_in_message, f"expected item_name is '{item_name}', but u got'{item_name_in_message}' "
def price_in_message_about_price(self):
price = self.browser.find_element(*ProductPageLocators.PRICE).text
price_in_cart = self.browser.find_element(*ProductPageLocators.MESSAGE_PRICE).text
assert price in price_in_cart, f"expected item_cost is '{price}', but u got in cart'{price_in_cart}' "
def should_not_be_success_message(self):
assert self.is_not_element_present(*ProductPageLocators.SUCCESS_MESSAGE), \
"Success message is presented, but should not be"
def should_disappeared_success_message(self):
assert self.is_disappeared(*ProductPageLocators.SUCCESS_MESSAGE), \
"Success message is not disappeared, but should be"
| 51.914286 | 124 | 0.690149 |
7e34b3c91c16c440f12c51415c509400e1f315dc | 2,272 | py | Python | python/paddle/v2/framework/tests/test_lrn_op.py | shenchaohua/Paddle | 9c5942db13308d53cc115708058c1e885f4b57a3 | [
"Apache-2.0"
] | null | null | null | python/paddle/v2/framework/tests/test_lrn_op.py | shenchaohua/Paddle | 9c5942db13308d53cc115708058c1e885f4b57a3 | [
"Apache-2.0"
] | 9 | 2017-09-13T07:39:31.000Z | 2017-10-18T05:58:23.000Z | python/paddle/v2/framework/tests/test_lrn_op.py | shenchaohua/Paddle | 9c5942db13308d53cc115708058c1e885f4b57a3 | [
"Apache-2.0"
] | null | null | null | import unittest
import numpy as np
from op_test import OpTest
class TestLRNOp(OpTest):
def get_input(self):
''' TODO(gongweibao): why it's grad diff is so large?
x = np.ndarray(
shape=(self.N, self.C, self.H, self.W), dtype=float, order='C')
for m in range(0, self.N):
for i in range(0, self.C):
for h in range(0, self.H):
for w in range(0, self.W):
x[m][i][h][w] = m * self.C * self.H * self.W + \
i * self.H * self.W + \
h * self.W + w + 1
'''
x = np.random.rand(self.N, self.C, self.H, self.W).astype("float32")
return x + 1
def get_out(self):
start = -(self.n - 1) / 2
end = start + self.n
mid = np.empty((self.N, self.C, self.H, self.W), dtype=float)
mid.fill(self.k)
for m in range(0, self.N):
for i in range(0, self.C):
for c in range(start, end + 1):
ch = i + c
if ch < 0 or ch >= self.C:
continue
s = mid[m][i][:][:]
r = self.x[m][ch][:][:]
s += np.square(r) * self.alpha
mid2 = np.power(mid, -self.beta)
return np.multiply(self.x, mid2), mid
def get_attrs(self):
attrs = {
'n': self.n,
'k': self.k,
'alpha': self.alpha,
'beta': self.beta
}
return attrs
def setUp(self):
self.op_type = "lrn"
self.N = 2
self.C = 3
self.H = 5
self.W = 5
self.n = 5
self.k = 2.0
self.alpha = 0.0001
self.beta = 0.75
self.x = self.get_input()
self.out, self.mid_out = self.get_out()
self.inputs = {'X': self.x}
self.outputs = {'Out': self.out, 'MidOut': self.mid_out}
self.attrs = self.get_attrs()
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X'], 'Out', max_relative_error=0.01)
if __name__ == "__main__":
exit(0) # LRN grad implement wrong
unittest.main()
| 28.759494 | 76 | 0.456866 |
2c1044d1ee314eee412c9ecaa921ef3a32780029 | 318 | py | Python | examples/load.py | thombashi/appconfigpy | 71aaa3110341ede8706a213cb1eb018a654ca8a6 | [
"MIT"
] | 3 | 2018-09-16T13:06:48.000Z | 2021-02-17T08:51:17.000Z | examples/load.py | thombashi/appconfigpy | 71aaa3110341ede8706a213cb1eb018a654ca8a6 | [
"MIT"
] | null | null | null | examples/load.py | thombashi/appconfigpy | 71aaa3110341ede8706a213cb1eb018a654ca8a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import json
from common import app_config_mgr
print("loading configuration file path: {:s}".format(app_config_mgr.config_filepath))
print("configuration values: {}".format(json.dumps(app_config_mgr.load(), indent=4)))
| 22.714286 | 85 | 0.751572 |
180f123609217ad3ff0c12a789c0142a88780d95 | 16,874 | py | Python | src/dynamic_graph/sot_talos_balance/test/appli_dcmCopControl.py | nim65s/sot-talos-balance | e24b9a3bd4377b0a0ea474dce44295282332661b | [
"BSD-2-Clause"
] | null | null | null | src/dynamic_graph/sot_talos_balance/test/appli_dcmCopControl.py | nim65s/sot-talos-balance | e24b9a3bd4377b0a0ea474dce44295282332661b | [
"BSD-2-Clause"
] | 8 | 2020-03-12T11:03:07.000Z | 2021-11-05T14:44:20.000Z | src/dynamic_graph/sot_talos_balance/test/appli_dcmCopControl.py | nim65s/sot-talos-balance | e24b9a3bd4377b0a0ea474dce44295282332661b | [
"BSD-2-Clause"
] | 4 | 2019-05-07T13:12:29.000Z | 2020-06-09T17:02:53.000Z | # flake8: noqa
from math import sqrt
import numpy as np
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT, Derivator_of_Vector, FeaturePosture, MatrixHomoToPoseQuaternion, Task
from dynamic_graph.sot.core.matrix_util import matrixToTuple
from dynamic_graph.sot.core.meta_tasks_kine import MetaTaskKine6d, MetaTaskKineCom, gotoNd
from dynamic_graph.sot.dynamic_pinocchio import DynamicPinocchio
from dynamic_graph.tracer_real_time import TracerRealTime
import dynamic_graph.sot_talos_balance.talos.base_estimator_conf as base_estimator_conf
import dynamic_graph.sot_talos_balance.talos.control_manager_conf as cm_conf
import dynamic_graph.sot_talos_balance.talos.distribute_conf as distribute_conf
import dynamic_graph.sot_talos_balance.talos.ft_calibration_conf as ft_conf
import dynamic_graph.sot_talos_balance.talos.parameter_server_conf as param_server_conf
from dynamic_graph.sot_talos_balance.create_entities_utils import *
from dynamic_graph.sot_talos_balance.simple_controller_6d import SimpleController6d
cm_conf.CTRL_MAX = 1000.0 # temporary hack
robot.timeStep = robot.device.getTimeStep()
dt = robot.timeStep
# --- Pendulum parameters
robot_name = 'robot'
robot.dynamic.com.recompute(0)
robotDim = robot.dynamic.getDimension()
mass = robot.dynamic.data.mass[0]
h = robot.dynamic.com.value[2]
g = 9.81
omega = sqrt(g / h)
# --- Parameter server
robot.param_server = create_parameter_server(param_server_conf, dt)
# --- Initial feet and waist
robot.dynamic.createOpPoint('LF', robot.OperationalPointsMap['left-ankle'])
robot.dynamic.createOpPoint('RF', robot.OperationalPointsMap['right-ankle'])
robot.dynamic.createOpPoint('WT', robot.OperationalPointsMap['waist'])
robot.dynamic.LF.recompute(0)
robot.dynamic.RF.recompute(0)
robot.dynamic.WT.recompute(0)
# -------------------------- DESIRED TRAJECTORY --------------------------
# --- Trajectory generators
# --- CoM
robot.comTrajGen = create_com_trajectory_generator(dt, robot)
# --- Left foot
robot.lfTrajGen = create_pose_rpy_trajectory_generator(dt, robot, 'LF')
# robot.lfTrajGen.x.recompute(0) # trigger computation of initial value
robot.lfToMatrix = PoseRollPitchYawToMatrixHomo('lf2m')
plug(robot.lfTrajGen.x, robot.lfToMatrix.sin)
# --- Right foot
robot.rfTrajGen = create_pose_rpy_trajectory_generator(dt, robot, 'RF')
# robot.rfTrajGen.x.recompute(0) # trigger computation of initial value
robot.rfToMatrix = PoseRollPitchYawToMatrixHomo('rf2m')
plug(robot.rfTrajGen.x, robot.rfToMatrix.sin)
# --- Waist
robot.waistTrajGen = create_orientation_rpy_trajectory_generator(dt, robot, 'WT')
# robot.waistTrajGen.x.recompute(0) # trigger computation of initial value
robot.waistMix = Mix_of_vector("waistMix")
robot.waistMix.setSignalNumber(3)
robot.waistMix.addSelec(1, 0, 3)
robot.waistMix.addSelec(2, 3, 3)
robot.waistMix.default.value = [0.0] * 6
robot.waistMix.signal("sin1").value = [0.0] * 3
plug(robot.waistTrajGen.x, robot.waistMix.signal("sin2"))
robot.waistToMatrix = PoseRollPitchYawToMatrixHomo('w2m')
plug(robot.waistMix.sout, robot.waistToMatrix.sin)
# --- Rho
robot.rhoTrajGen = create_scalar_trajectory_generator(dt, 0.5, 'rhoTrajGen')
robot.rhoScalar = Component_of_vector("rho_scalar")
robot.rhoScalar.setIndex(0)
plug(robot.rhoTrajGen.x, robot.rhoScalar.sin)
# --- Interface with controller entities
wp = DummyWalkingPatternGenerator('dummy_wp')
wp.init()
wp.omega.value = omega
plug(robot.waistToMatrix.sout, wp.waist)
plug(robot.lfToMatrix.sout, wp.footLeft)
plug(robot.rfToMatrix.sout, wp.footRight)
plug(robot.comTrajGen.x, wp.com)
plug(robot.comTrajGen.dx, wp.vcom)
plug(robot.comTrajGen.ddx, wp.acom)
robot.wp = wp
# --- Compute the values to use them in initialization
robot.wp.comDes.recompute(0)
robot.wp.dcmDes.recompute(0)
robot.wp.zmpDes.recompute(0)
# -------------------------- ESTIMATION --------------------------
# --- Base Estimation
robot.device_filters = create_device_filters(robot, dt)
robot.imu_filters = create_imu_filters(robot, dt)
robot.base_estimator = create_base_estimator(robot, dt, base_estimator_conf)
robot.m2qLF = MatrixHomoToPoseQuaternion('m2qLF')
plug(robot.dynamic.LF, robot.m2qLF.sin)
plug(robot.m2qLF.sout, robot.base_estimator.lf_ref_xyzquat)
robot.m2qRF = MatrixHomoToPoseQuaternion('m2qRF')
plug(robot.dynamic.RF, robot.m2qRF.sin)
plug(robot.m2qRF.sout, robot.base_estimator.rf_ref_xyzquat)
# robot.be_filters = create_be_filters(robot, dt)
## --- Reference frame
#rf = SimpleReferenceFrame('rf')
#rf.init(robot_name)
#plug(robot.dynamic.LF, rf.footLeft)
#plug(robot.dynamic.RF, rf.footRight)
#rf.reset.value = 1
#robot.rf = rf
## --- State transformation
#stf = StateTransformation("stf")
#stf.init()
#plug(robot.rf.referenceFrame,stf.referenceFrame)
#plug(robot.base_estimator.q,stf.q_in)
#plug(robot.base_estimator.v,stf.v_in)
#robot.stf = stf
# --- Conversion
e2q = EulerToQuat('e2q')
plug(robot.base_estimator.q, e2q.euler)
robot.e2q = e2q
# --- Kinematic computations
robot.rdynamic = DynamicPinocchio("real_dynamics")
robot.rdynamic.setModel(robot.dynamic.model)
robot.rdynamic.setData(robot.rdynamic.model.createData())
#plug(robot.base_estimator.q,robot.rdynamic.position)
robot.baseselec = Selec_of_vector("base_selec")
robot.baseselec.selec(0, 6)
plug(robot.base_estimator.q, robot.baseselec.sin)
plug(robot.baseselec.sout, robot.rdynamic.ffposition)
plug(robot.device.state, robot.rdynamic.position)
robot.rdynamic.velocity.value = [0.0] * robotDim
robot.rdynamic.acceleration.value = [0.0] * robotDim
# --- CoM Estimation
cdc_estimator = DcmEstimator('cdc_estimator')
cdc_estimator.init(dt, robot_name)
plug(robot.e2q.quaternion, cdc_estimator.q)
plug(robot.base_estimator.v, cdc_estimator.v)
robot.cdc_estimator = cdc_estimator
# --- DCM Estimation
estimator = DummyDcmEstimator("dummy")
estimator.omega.value = omega
estimator.mass.value = 1.0
plug(robot.cdc_estimator.c, estimator.com)
plug(robot.cdc_estimator.dc, estimator.momenta)
estimator.init()
robot.estimator = estimator
# --- Force calibration
robot.ftc = create_ft_calibrator(robot, ft_conf)
# --- ZMP estimation
zmp_estimator = SimpleZmpEstimator("zmpEst")
robot.rdynamic.createOpPoint('sole_LF', 'left_sole_link')
robot.rdynamic.createOpPoint('sole_RF', 'right_sole_link')
plug(robot.rdynamic.sole_LF, zmp_estimator.poseLeft)
plug(robot.rdynamic.sole_RF, zmp_estimator.poseRight)
plug(robot.ftc.left_foot_force_out, zmp_estimator.wrenchLeft)
plug(robot.ftc.right_foot_force_out, zmp_estimator.wrenchRight)
zmp_estimator.init()
robot.zmp_estimator = zmp_estimator
# -------------------------- ADMITTANCE CONTROL --------------------------
# --- DCM controller
Kp_dcm = [5.0, 5.0, 5.0]
Ki_dcm = [0.0, 0.0, 0.0] # zero (to be set later)
gamma_dcm = 0.2
dcm_controller = DcmController("dcmCtrl")
dcm_controller.Kp.value = Kp_dcm
dcm_controller.Ki.value = Ki_dcm
dcm_controller.decayFactor.value = gamma_dcm
dcm_controller.mass.value = mass
dcm_controller.omega.value = omega
plug(robot.cdc_estimator.c, dcm_controller.com)
plug(robot.estimator.dcm, dcm_controller.dcm)
plug(robot.wp.zmpDes, dcm_controller.zmpDes)
plug(robot.wp.dcmDes, dcm_controller.dcmDes)
dcm_controller.init(dt)
robot.dcm_control = dcm_controller
Ki_dcm = [1.0, 1.0, 1.0] # this value is employed later
# --- Distribute wrench
distribute = create_distribute_wrench(distribute_conf)
plug(robot.e2q.quaternion, distribute.q)
plug(robot.dcm_control.wrenchRef, distribute.wrenchDes)
plug(robot.rhoScalar.sout, distribute.rho)
distribute.init(robot_name)
robot.distribute = distribute
# --- Ankle controllers
LeftPitchJoint = 4
LeftRollJoint = 5
RightPitchJoint = 10
RightRollJoint = 11
Kp_ankles = [1e-3] * 2
# --- Right ankle
controller = AnkleAdmittanceController("rightController")
plug(robot.ftc.right_foot_force_out, controller.wrench)
controller.gainsXY.value = [0.] * 2
plug(robot.distribute.copRight, controller.pRef)
controller.init()
robot.rightAnkleController = controller
# --- Left ankle
controller = AnkleAdmittanceController("leftController")
plug(robot.ftc.left_foot_force_out, controller.wrench)
controller.gainsXY.value = [0.] * 2
plug(robot.distribute.copLeft, controller.pRef)
controller.init()
robot.leftAnkleController = controller
# --- Control Manager
robot.cm = create_ctrl_manager(cm_conf, dt, robot_name='robot')
robot.cm.addCtrlMode('sot_input')
robot.cm.setCtrlMode('all', 'sot_input')
robot.cm.addEmergencyStopSIN('zmp')
robot.cm.addEmergencyStopSIN('distribute')
# -------------------------- SOT CONTROL --------------------------
# --- Upper body
robot.taskUpperBody = Task('task_upper_body')
robot.taskUpperBody.feature = FeaturePosture('feature_upper_body')
q = list(robot.dynamic.position.value)
robot.taskUpperBody.feature.state.value = q
robot.taskUpperBody.feature.posture.value = q
# robotDim = robot.dynamic.getDimension() # 38
robot.taskUpperBody.feature.selectDof(18, True)
robot.taskUpperBody.feature.selectDof(19, True)
robot.taskUpperBody.feature.selectDof(20, True)
robot.taskUpperBody.feature.selectDof(21, True)
robot.taskUpperBody.feature.selectDof(22, True)
robot.taskUpperBody.feature.selectDof(23, True)
robot.taskUpperBody.feature.selectDof(24, True)
robot.taskUpperBody.feature.selectDof(25, True)
robot.taskUpperBody.feature.selectDof(26, True)
robot.taskUpperBody.feature.selectDof(27, True)
robot.taskUpperBody.feature.selectDof(28, True)
robot.taskUpperBody.feature.selectDof(29, True)
robot.taskUpperBody.feature.selectDof(30, True)
robot.taskUpperBody.feature.selectDof(31, True)
robot.taskUpperBody.feature.selectDof(32, True)
robot.taskUpperBody.feature.selectDof(33, True)
robot.taskUpperBody.feature.selectDof(34, True)
robot.taskUpperBody.feature.selectDof(35, True)
robot.taskUpperBody.feature.selectDof(36, True)
robot.taskUpperBody.feature.selectDof(37, True)
robot.taskUpperBody.controlGain.value = 100.0
robot.taskUpperBody.add(robot.taskUpperBody.feature.name)
plug(robot.dynamic.position, robot.taskUpperBody.feature.state)
# --- CONTACTS
#define contactLF and contactRF
robot.contactLF = MetaTaskKine6d('contactLF', robot.dynamic, 'LF', robot.OperationalPointsMap['left-ankle'])
robot.contactLF.feature.frame('desired')
robot.contactLF.gain.setConstant(300)
plug(robot.wp.footLeftDes, robot.contactLF.featureDes.position) #.errorIN?
plug(robot.leftAnkleController.vDes, robot.contactLF.featureDes.velocity)
robot.contactLF.task.setWithDerivative(True)
locals()['contactLF'] = robot.contactLF
robot.contactRF = MetaTaskKine6d('contactRF', robot.dynamic, 'RF', robot.OperationalPointsMap['right-ankle'])
robot.contactRF.feature.frame('desired')
robot.contactRF.gain.setConstant(300)
plug(robot.wp.footRightDes, robot.contactRF.featureDes.position) #.errorIN?
plug(robot.rightAnkleController.vDes, robot.contactRF.featureDes.velocity)
robot.contactRF.task.setWithDerivative(True)
locals()['contactRF'] = robot.contactRF
# --- COM
robot.taskCom = MetaTaskKineCom(robot.dynamic)
plug(robot.wp.comDes, robot.taskCom.featureDes.errorIN)
robot.taskCom.task.controlGain.value = 10
# --- Waist
robot.keepWaist = MetaTaskKine6d('keepWaist', robot.dynamic, 'WT', robot.OperationalPointsMap['waist'])
robot.keepWaist.feature.frame('desired')
robot.keepWaist.gain.setConstant(300)
plug(robot.wp.waistDes, robot.keepWaist.featureDes.position)
robot.keepWaist.feature.selec.value = '111000'
locals()['keepWaist'] = robot.keepWaist
# --- SOT solver
robot.sot = SOT('sot')
robot.sot.setSize(robot.dynamic.getDimension())
# --- Plug SOT control to device through control manager
plug(robot.sot.control, robot.cm.ctrl_sot_input)
plug(robot.cm.u_safe, robot.device.control)
robot.sot.push(robot.taskUpperBody.name)
robot.sot.push(robot.contactRF.task.name)
robot.sot.push(robot.contactLF.task.name)
robot.sot.push(robot.taskCom.task.name)
robot.sot.push(robot.keepWaist.task.name)
# --- Fix robot.dynamic inputs
plug(robot.device.velocity, robot.dynamic.velocity)
robot.dvdt = Derivator_of_Vector("dv_dt")
robot.dvdt.dt.value = dt
plug(robot.device.velocity, robot.dvdt.sin)
plug(robot.dvdt.sout, robot.dynamic.acceleration)
# -------------------------- PLOTS --------------------------
# --- ROS PUBLISHER
robot.publisher = create_rospublish(robot, 'robot_publisher')
create_topic(robot.publisher, robot.wp, 'comDes', robot=robot, data_type='vector') # desired CoM
create_topic(robot.publisher, robot.cdc_estimator, 'c', robot=robot, data_type='vector') # estimated CoM
create_topic(robot.publisher, robot.cdc_estimator, 'dc', robot=robot, data_type='vector') # estimated CoM velocity
create_topic(robot.publisher, robot.dynamic, 'com', robot=robot, data_type='vector') # resulting SOT CoM
create_topic(robot.publisher, robot.dcm_control, 'dcmDes', robot=robot, data_type='vector') # desired DCM
create_topic(robot.publisher, robot.estimator, 'dcm', robot=robot, data_type='vector') # estimated DCM
create_topic(robot.publisher, robot.dcm_control, 'zmpDes', robot=robot, data_type='vector') # desired ZMP
create_topic(robot.publisher, robot.dynamic, 'zmp', robot=robot, data_type='vector') # SOT ZMP
create_topic(robot.publisher, robot.zmp_estimator, 'zmp', robot=robot, data_type='vector') # estimated ZMP
create_topic(robot.publisher, robot.dcm_control, 'zmpRef', robot=robot, data_type='vector') # reference ZMP
create_topic(robot.publisher, robot.dcm_control, 'wrenchRef', robot=robot,
data_type='vector') # unoptimized reference wrench
create_topic(robot.publisher, robot.distribute, 'wrenchLeft', robot=robot, data_type='vector') # reference left wrench
create_topic(robot.publisher, robot.distribute, 'wrenchRight', robot=robot,
data_type='vector') # reference right wrench
create_topic(robot.publisher, robot.distribute, 'wrenchRef', robot=robot,
data_type='vector') # optimized reference wrench
#create_topic(robot.publisher, robot.device, 'forceLLEG', robot = robot, data_type='vector') # measured left wrench
#create_topic(robot.publisher, robot.device, 'forceRLEG', robot = robot, data_type='vector') # measured right wrench
#create_topic(robot.publisher, robot.device_filters.ft_LF_filter, 'x_filtered', robot = robot, data_type='vector') # filtered left wrench
#create_topic(robot.publisher, robot.device_filters.ft_RF_filter, 'x_filtered', robot = robot, data_type='vector') # filtered right wrench
create_topic(robot.publisher, robot.ftc, 'left_foot_force_out', robot=robot,
data_type='vector') # calibrated left wrench
create_topic(robot.publisher, robot.ftc, 'right_foot_force_out', robot=robot,
data_type='vector') # calibrated right wrench
create_topic(robot.publisher, robot.zmp_estimator, 'copRight', robot=robot, data_type='vector')
create_topic(robot.publisher, robot.rightAnkleController, 'pRef', robot=robot, data_type='vector')
create_topic(robot.publisher, robot.rightAnkleController, 'dRP', robot=robot, data_type='vector')
create_topic(robot.publisher, robot.zmp_estimator, 'copLeft', robot=robot, data_type='vector')
create_topic(robot.publisher, robot.leftAnkleController, 'pRef', robot=robot, data_type='vector')
create_topic(robot.publisher, robot.leftAnkleController, 'dRP', robot=robot, data_type='vector')
## --- TRACER
#robot.tracer = TracerRealTime("com_tracer")
#robot.tracer.setBufferSize(80*(2**20))
#robot.tracer.open('/tmp','dg_','.dat')
#robot.device.after.addSignal('{0}.triger'.format(robot.tracer.name))
#addTrace(robot.tracer, robot.wp, 'comDes') # desired CoM
#addTrace(robot.tracer, robot.cdc_estimator, 'c') # estimated CoM
#addTrace(robot.tracer, robot.cdc_estimator, 'dc') # estimated CoM velocity
#addTrace(robot.tracer, robot.com_admittance_control, 'comRef') # reference CoM
#addTrace(robot.tracer, robot.dynamic, 'com') # resulting SOT CoM
#addTrace(robot.tracer, robot.dcm_control, 'dcmDes') # desired DCM
#addTrace(robot.tracer, robot.estimator, 'dcm') # estimated DCM
#addTrace(robot.tracer, robot.dcm_control, 'zmpDes') # desired ZMP
#addTrace(robot.tracer, robot.dynamic, 'zmp') # SOT ZMP
#addTrace(robot.tracer, robot.zmp_estimator, 'zmp') # estimated ZMP
#addTrace(robot.tracer, robot.dcm_control, 'zmpRef') # reference ZMP
#addTrace(robot.tracer, robot.dcm_control, 'wrenchRef') # unoptimized reference wrench
#addTrace(robot.tracer, robot.distribute, 'wrenchLeft') # reference left wrench
#addTrace(robot.tracer, robot.distribute, 'wrenchRight') # reference right wrench
#addTrace(robot.tracer, robot.distribute, 'wrenchRef') # optimized reference wrench
#addTrace(robot.tracer, robot.ftc, 'left_foot_force_out') # calibrated left wrench
#addTrace(robot.tracer, robot.ftc, 'right_foot_force_out') # calibrated right wrench
#robot.tracer.start()
| 40.272076 | 138 | 0.764253 |
1e1a40c9e5dacace1b7468321e89ed0049a68db2 | 1,099 | py | Python | examples/user_interfaces/mpl_with_glade_316.py | yuvallanger/matplotlib | e0020d318a9a9685594c6bff4631f74599321459 | [
"MIT",
"BSD-3-Clause"
] | 8 | 2017-04-11T08:55:30.000Z | 2022-03-25T04:31:26.000Z | examples/user_interfaces/mpl_with_glade_316.py | epgauss/matplotlib | c9898ea9a30c67c579ab27cd61b68e2abae0fb0e | [
"MIT",
"BSD-3-Clause"
] | null | null | null | examples/user_interfaces/mpl_with_glade_316.py | epgauss/matplotlib | c9898ea9a30c67c579ab27cd61b68e2abae0fb0e | [
"MIT",
"BSD-3-Clause"
] | 14 | 2015-10-05T04:15:46.000Z | 2020-06-11T18:06:02.000Z | #!/usr/bin/env python3
from gi.repository import Gtk
from matplotlib.figure import Figure
from matplotlib.axes import Subplot
from numpy import arange, sin, pi
from matplotlib.backends.backend_gtk3agg import FigureCanvasGTK3Agg as FigureCanvas
class Window1Signals(object):
def on_window1_destroy(self, widget):
Gtk.main_quit()
def main():
builder = Gtk.Builder()
builder.add_objects_from_file("mpl_with_glade_316.glade", ("window1", ""))
builder.connect_signals(Window1Signals())
window = builder.get_object("window1")
sw = builder.get_object("scrolledwindow1")
# Start of Matplotlib specific code
figure = Figure(figsize=(8,6), dpi=71)
axis = figure.add_subplot(111)
t = arange(0.0, 3.0, 0.01)
s = sin(2*pi*t)
axis.plot(t,s)
axis.set_xlabel('time [s]')
axis.set_ylabel('voltage [V]')
canvas = FigureCanvas(figure) # a Gtk.DrawingArea
canvas.set_size_request(800,600)
sw.add_with_viewport(canvas)
# End of Matplotlib specific code
window.show_all()
Gtk.main()
if __name__ == "__main__":
main()
| 25.55814 | 83 | 0.700637 |
76bbd901e81e5640c5a674959359811495c7506d | 10,374 | py | Python | mandala/storages/kv_impl/sqlite_impl.py | amakelov/mandala | a9ec051ef730ada4eed216c62a07b033126e78d5 | [
"Apache-2.0"
] | 9 | 2022-02-22T19:24:01.000Z | 2022-03-23T04:46:41.000Z | mandala/storages/kv_impl/sqlite_impl.py | amakelov/mandala | a9ec051ef730ada4eed216c62a07b033126e78d5 | [
"Apache-2.0"
] | null | null | null | mandala/storages/kv_impl/sqlite_impl.py | amakelov/mandala | a9ec051ef730ada4eed216c62a07b033126e78d5 | [
"Apache-2.0"
] | null | null | null | from ..kv import KVStore, KVIndex
from ...common_imports import *
class SQLiteStorage(KVStore):
UID_COL = '__index__'
def __init__(self,
root:Path=None,
dtype:str='BLOB',
serializer:TCallable=None,
deserializer:TCallable=None,
journal_mode:str='WAL',
page_size:int=32768,
mmap_size_MB:int=256,
cache_size_pages:int=1000,
synchronous:str='normal'):
self._root = root
self.dtype = dtype
if serializer is None:
serializer = pickle.dumps
self.serializer = serializer
if deserializer is None:
deserializer = pickle.loads
self.deserializer = deserializer
self.journal_mode = journal_mode
self.page_size = page_size
self.mmap_size_MB = mmap_size_MB
self.cache_size_pages = cache_size_pages
self.synchronous = synchronous
self.path = None
self.address = None
self.uri = True
if self.root is not None:
self._init(root=self.root)
self.setup()
@property
def root(self) -> TOption[Path]:
return self._root
@root.setter
def root(self, value:Path=None):
self._init(root=value)
self.setup()
def get_meta(self) -> TAny:
return {
'dtype': self.dtype,
'journal_mode': self.journal_mode,
'page_size': self.page_size,
'mmap_size_MB': self.mmap_size_MB,
'cache_size_pages': self.cache_size_pages,
'synchronous': self.synchronous
}
def reflect_root_change(self, new_root: Path):
self._init(root=new_root)
@staticmethod
def from_meta(root:Path, desc:TAny) -> 'KVStore':
raise NotImplementedError()
def _init(self, root:Path):
"""
Sets dynamic attributes associated with this root
"""
# sets root, path, address and uri
assert root.is_absolute()
if not root.exists():
root.mkdir(parents=True)
self._root = root
self.path = self._root / 'data.db'
self.address = self.path
############################################################################
### SQLite internals
############################################################################
def get_optimizations(self) -> TList[str]:
"""
NOTE:
- you cannot change `page_size` after setting `journal_mode = WAL`
- `journal_mode = WAL` is persistent across database connections
- `cache_size` is in pages when positive, in kB when negative
"""
if self.mmap_size_MB is None:
mmap_size = 0
else:
mmap_size = self.mmap_size_MB * 1024**2
pragma_dict = OrderedDict([
# 'temp_store': 'memory',
('synchronous', self.synchronous),
('page_size', self.page_size),
('cache_size', self.cache_size_pages),
('journal_mode', self.journal_mode),
('mmap_size', mmap_size),
])
lines = [f'PRAGMA {k} = {v};' for k, v in pragma_dict.items()]
return lines
def apply_optimizations(self, conn:sqlite3.Connection):
opts = self.get_optimizations()
for line in opts:
conn.execute(line)
def conn(self) -> sqlite3.Connection:
return sqlite3.connect(str(self.address),
isolation_level=None, uri=self.uri)
def setup(self):
query = f"""CREATE TABLE IF NOT EXISTS var({self.UID_COL} CHAR(32) PRIMARY KEY NOT NULL, value {self.dtype} NOT NULL); """
with self.conn() as conn:
# certain pragmas, such as page_size, must be given before the database
# is even written to disk
# self.run_optimization(conn=conn)
self.apply_optimizations(conn=conn)
conn.execute(query)
def execute(self, query:str, parameters:TIter[TAny]=None):
with self.conn() as conn:
self.apply_optimizations(conn=conn)
if parameters is not None:
res = conn.execute(query, parameters)
else:
res = conn.execute(query)
return res
def read(self, query:str) -> TList[TTuple[TAny,...]]:
return self.execute(query=query).fetchall()
def where_clause(self, keys:TUnion[TList[str], pd.Index]) -> str:
return ', '.join([f"'{k}'" for k in keys])
def disk_usage(self) -> float:
"""
Return disk usage in megabytes
"""
assert self.path is not None
return os.stat(self.path).st_size / 1024**2
############################################################################
### core interface
############################################################################
def exists(self, k:str) -> bool:
query = f"""SELECT EXISTS(SELECT 1 FROM var WHERE var.{self.UID_COL} = '{k}')"""
results = self.execute(query).fetchall()
return results[0][0] == 1
def mexists(self, ks: TList[str]) -> TList[bool]:
unique_ks, key_to_unique_idx = np.unique(ar=ks, return_inverse=True)
unique_exists = self.mexists_unique(ks=unique_ks)
return [unique_exists[i] for i in key_to_unique_idx]
def mexists_unique(self, ks:TList[str]) -> TList[bool]:
query = f"""SELECT {self.UID_COL} FROM var WHERE {self.UID_COL} IN ({self.where_clause(keys=ks)})"""
result = set([elt[0] for elt in self.read(query=query)])
return [k in result for k in ks]
def set(self, k:str, v:TAny):
query = f"""INSERT OR REPLACE INTO var({self.UID_COL}, value) VALUES('{k}', ?)"""
self.execute(query=query, parameters=[self.serializer(v)])
def mset(self, mapping:TMutMap, chunk_size:int=65_536):
values_list = []
parameters = []
for k, v in mapping.items():
values_list.append(f"('{k}', ?)")
parameters.append(self.serializer(v))
# you need to break this up into chunks b/c sqlite has limits on how
# many parameters you can have
num_items = len(mapping)
num_chunks = math.ceil(num_items / chunk_size)
value_chunks = [values_list[chunk_size*i:chunk_size*(i+1)]
for i in range(num_chunks)]
parameter_chunks = [parameters[chunk_size*i:chunk_size*(i+1)]
for i in range(num_chunks)]
with self.conn() as conn:
for value_chunk, parameter_chunk in zip(value_chunks, parameter_chunks):
values = ',\n'.join(value_chunk)
query = f"""INSERT OR REPLACE INTO var({self.UID_COL}, value) VALUES {values}; """
conn.execute(query, parameter_chunk)
def get(self, k:str) -> TAny:
query = f"""SELECT * FROM var WHERE {self.UID_COL} = '{k}'"""
res = self.read(query=query)
if not res:
raise KeyError(k)
return self.deserializer(res[0][1])
def mget(self, ks: TList[str]) -> TList[TAny]:
unique_ks, key_to_unique_idx = np.unique(ar=ks, return_inverse=True)
unique_vs = self.mget_unique(ks=unique_ks)
return [unique_vs[i] for i in key_to_unique_idx]
def mget_unique(self, ks:TList[str]=None) -> TList[TAny]:
if ks is None:
ks = self.keys()
query = f"""SELECT {self.UID_COL}, value FROM var WHERE {self.UID_COL} IN ({self.where_clause(keys=ks)})"""
res = {elt[0]: self.deserializer(elt[1])
for elt in self.read(query=query)}
return [res[k] for k in ks]
def delete(self, k:str, must_exist:bool=False):
if must_exist:
assert self.exists(k=k)
query = f"""DELETE FROM var WHERE {self.UID_COL} = '{k}'"""
self.execute(query)
def mdelete(self, ks: TList[str], must_exist:bool=False):
unique_ks, key_to_unique_idx = np.unique(ar=ks, return_inverse=True)
self.mdelete_unique(ks=unique_ks, must_exist=must_exist)
def mdelete_unique(self, ks:TList[str], must_exist:bool=False):
if must_exist:
exist_mask = self.mexists_unique(ks=ks)
ks = [k for i, k in enumerate(ks) if exist_mask[i]]
values = self.where_clause(keys=ks)
query = f"""DELETE FROM var WHERE {self.UID_COL} IN ({values})"""
self.execute(query)
############################################################################
###
############################################################################
def __len__(self) -> int:
query = """
SELECT count(*) FROM var;
"""
return self.execute(query).fetchall()[0][0]
def keys(self, limit:int=None) -> TList[str]:
if limit is None:
query = f"""
SELECT {self.UID_COL} from var
"""
return [elt[0] for elt in self.read(query=query)]
else:
query = f"""
SELECT {self.UID_COL} from var
LIMIT {limit}"""
return [elt[0] for elt in self.read(query=query)]
@property
def empty(self) -> bool:
keys = self.keys(limit=1)
return len(keys) == 0
def where(self, pred:TCallable, keys:TList[str]=None) -> TList[str]:
keys = self.keys() if keys is None else list(set(keys))
vals = self.mget_unique(ks=keys)
result = []
for k, val in zip(keys, vals):
try:
if pred(val):
result.append(k)
except:
pass
return result
def isin(self, rng:TList[TAny], keys:TList[str] = None) -> TList[str]:
keys = self.keys() if keys is None else list(set(keys))
vals = self.mget_unique(ks=keys)
result = []
for k, val in zip(keys, vals):
try:
if val in rng:
result.append(k)
except:
pass
return result
def __repr__(self) -> str:
return f'SQLiteStorage(root={self._root})'
KVIndex.register(impl=SQLiteStorage, impl_id=SQLiteStorage.get_impl_id()) | 37.451264 | 130 | 0.53904 |
0b374d14edf70ed2e3019e1657ac399ed9c2ab25 | 1,526 | py | Python | services/movies_ugc/etl/models/models.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | null | null | null | services/movies_ugc/etl/models/models.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | null | null | null | services/movies_ugc/etl/models/models.py | fuodorov/yacinema | 43ad869575fbaab7c7056229538638666aa87110 | [
"MIT"
] | 1 | 2021-09-30T09:49:40.000Z | 2021-09-30T09:49:40.000Z | import datetime
from pydantic import BaseModel, validator
class UserEvent(BaseModel):
movie_id: str
user_id: str
event: str
frame: int
event_time: datetime.datetime
def ch_table_properties(self) -> [str, str]:
"""
:return: Возвращает названия таблицы
(table_name) и столбцов (fields)
"""
table_name = "user_events"
fields = f"{', '.join(field for field in self.__fields__.keys())}"
return table_name, fields
@validator('event_time', pre=True, always=True)
def event_time_validator(cls, v):
return datetime.datetime.strptime(v, "%d-%m-%Y %H:%M:%S")
class UserLoginHistory(BaseModel):
user_id: str
user_ip: str
user_agent: str
login_time: datetime.datetime
def ch_table_properties(self) -> [str, str]:
"""
:return: Возвращает названия таблицы
(table_name) и столбцов (fields)
"""
table_name = "users_login"
fields = f"{', '.join(field for field in self.__fields__.keys())}"
return table_name, fields
@validator('login_time', pre=True, always=True)
def login_time_validator(cls, v):
return datetime.datetime.strptime(v, "%d-%m-%Y %H:%M:%S")
class CreateQueries(BaseModel):
database: str
table: str
data: str
class ReadQueries(BaseModel):
databases: str
tables: str
data: str
class Queries(BaseModel):
create: CreateQueries
read: ReadQueries
update: dict
delete: dict
| 22.776119 | 74 | 0.626474 |
64e81ea2f6bd5612b860d89ea42821954a37c62f | 281 | py | Python | tests/plugins/facebook.py | alfie-max/Publish | 0014cc45f2496c44c9171353dc42a58d73dd5490 | [
"MIT"
] | 1 | 2016-03-08T07:17:46.000Z | 2016-03-08T07:17:46.000Z | tests/plugins/facebook.py | alfie-max/Publish | 0014cc45f2496c44c9171353dc42a58d73dd5490 | [
"MIT"
] | 5 | 2021-03-18T19:55:25.000Z | 2022-03-11T23:11:27.000Z | tests/plugins/facebook.py | alfie-max/Publish | 0014cc45f2496c44c9171353dc42a58d73dd5490 | [
"MIT"
] | null | null | null | class Facebook(object):
def __init__(self):
self.__fields__ = ['Message']
def VerifyCredentials(self):
return True
def VerifyFields(self, fields):
return True
def SendMsg(self, msg):
return 0
__plugin__ = Facebook
__cname__ = 'facebook'
| 17.5625 | 35 | 0.651246 |
e19f2236ca6fe79779878c21cb24085538cf63f8 | 13,408 | py | Python | src/core/datasets.py | spencerpomme/GSPNet | ff165de95ec0f258ba444ff343d18d812a066b8f | [
"MIT"
] | null | null | null | src/core/datasets.py | spencerpomme/GSPNet | ff165de95ec0f258ba444ff343d18d812a066b8f | [
"MIT"
] | null | null | null | src/core/datasets.py | spencerpomme/GSPNet | ff165de95ec0f258ba444ff343d18d812a066b8f | [
"MIT"
] | null | null | null | '''
Copyright <2019> <COPYRIGHT Pingcheng Zhang>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Customized datset classes defined here.
A part of GSPNet project.
'''
import numpy as np
import pandas as pd
import pickle as pkl
import matplotlib.pyplot as plt
import os
import re
import time
import torch
from torch import nn, optim
from torch.utils import data
from torch.utils.data import TensorDataset, DataLoader
from torch.utils.data import SubsetRandomSampler, SequentialSampler
from glob import iglob, glob
from matplotlib import pyplot as plt
from matplotlib.legend_handler import HandlerLine2D
from tqdm import tqdm
# Environment global variable
TRAIN_ON_MULTI_GPUS = False # (torch.cuda.device_count() >= 2)
# util functions
def decide_label(file: str):
'''
This function hard codes classification criteria to label the tensors.
Args:
file: a file name string
nc: number of classes
Returns:
label: the class label of a given tensor file
'''
pattern = re.compile(
'^(\d{4})-([0-1]\d)-([0-3]\d)_([0-1]\d|[2][0-3]);([0-5]\d);([0-5]\d)-(\d{4})-([0-1]\d)-([0-3]\d)_([0-1]\d|[2][0-3]);([0-5]\d);([0-5]\d)')
file = file.split('\\')[1]
i = int(pattern.findall(file)[0][3])
# 3-hour-a-class
labels = [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
label = labels[i]
return label
# Customized RNN/LSTM datasets when dataset are to big to load at once into RAM
# Data feeders, type 1 (using classes)
class F2FDataset(data.Dataset):
'''
Frame to frame dataset.
'''
def __init__(self, datadir, seq_len):
'''
Initialization.
Args:
datadir: directory of serialized tensors
seq_len: timestep length of tensors
'''
self.paths = glob(datadir + '/*.pkl')
# only want full seq_len sized length numbers
self.seq_len = seq_len
self.length = len(self.paths) // seq_len * seq_len
self.idict = {}
for i in range(self.length):
self.idict[i] = self.paths[i]
self.input_ids = list(self.idict.keys())[:-1]
self.label_ids = list(self.idict.keys())[1:]
def __len__(self):
'''
Denotes the total number of samples
'''
return self.length - self.seq_len
def __getitem__(self, index):
'''
Generates one sample of data
'''
# Load data and get label
X = torch.load(self.idict[self.input_ids[index]])
y = torch.load(self.idict[self.label_ids[index]])
return X, y
class F2FDatasetRAM(data.Dataset):
'''
Frame to frame dataset.
'''
def __init__(self, datadir, seq_len):
'''
Initialization.
Args:
datadir: directory of serialized tensors
seq_len: timestep length of tensors
'''
self.paths = glob(datadir + '/*.pkl')
# only want full seq_len sized length numbers
self.seq_len = seq_len
self.length = len(self.paths) // seq_len * seq_len
self.idict = {}
for i in range(self.length):
self.idict[i] = self.paths[i]
self.input_ids = list(self.idict.keys())[:-1]
self.label_ids = list(self.idict.keys())[1:]
# load all tensor into RAM
tensors = []
for path in tqdm(self.paths, total=self.length, ascii=True):
tensor = torch.load(path).numpy()
tensors.append(tensor)
# pad one at the end of the sequence with first state
pad_tensor = torch.load(self.paths[0]).numpy()
tensors.append(pad_tensor)
tensors = np.array(tensors).astype('float32')
self.tensors = tensors.reshape((self.length, -1))
def __len__(self):
'''
Denotes the total number of samples
'''
return self.length - self.seq_len
def __getitem__(self, index):
'''
Generates one sample of data
'''
# Load data and get label
X = self.tensors[index]
y = self.tensors[index+1]
X = torch.from_numpy(X)
y = torch.from_numpy(y)
return X, y
class S2FDataset(data.Dataset):
'''
Sequence of Frames to one frame dataset.
'''
def __init__(self, datadir, seq_len):
'''
Initialization.
Args:
datadir: directory of serialized tensors
seq_len: timestep length of tensors
'''
self.paths = glob(datadir + '/*.pkl')
path_num = len(self.paths)
n_batches = path_num // seq_len
# only want full size batches
self.paths = self.paths[: n_batches * seq_len]
self.seq_len = seq_len
self.length = len(self.paths) // seq_len * seq_len
self.idict = {}
for i in range(self.length):
self.idict[i] = self.paths[i]
def __len__(self):
'''
Denotes the total number of samples.
'''
return self.length - self.seq_len
def __getitem__(self, index):
'''
Generates one sample of data.
'''
# Load data and get label
X = []
for i in range(self.seq_len):
x = torch.load(self.idict[index + i]).numpy()
X.append(x)
X = np.array(X).astype('float32')
X = X.reshape((len(X), -1))
X = torch.from_numpy(X)
y = torch.load(self.idict[index + self.seq_len]).type(torch.float32)
# flatten y to be the same dimention as X
y = y.flatten()
return X, y
class S2FDatasetRAM(data.Dataset):
'''
Sequence of Frames to one frame dataset.
Load all data into RAM at once.
'''
def __init__(self, datadir, seq_len):
'''
Initialization.
Args:
datadir: directory of serialized tensors
seq_len: timestep length of tensors
batch_size: divide a sequence to n_batch sequences
'''
self.paths = glob(datadir + '/*.pkl')
path_num = len(self.paths)
full_seq = path_num // seq_len * seq_len
# only want full size batches
self.paths = self.paths[: full_seq]
self.seq_len = seq_len
# total length of all tensors
self.length = len(self.paths)
# load all tensor into RAM
tensors = []
for path in tqdm(self.paths, total=self.length, ascii=True):
tensor = torch.load(path).numpy()
tensors.append(tensor)
tensors = np.array(tensors).astype('float32')
self.tensors = tensors.reshape((self.length, -1))
def __len__(self):
'''
Denotes the total number of samples.
'''
return self.length - self.seq_len
def __getitem__(self, index):
'''
Generates one sample of data.
'''
X = self.tensors[index: index+self.seq_len]
y = self.tensors[index+self.seq_len]
X = torch.from_numpy(X)
y = torch.from_numpy(y)
return X, y
# Classification dataset
# TODO: Please clean unused variables
class SnapshotClassificationDataset(data.Dataset):
'''
A dataset that divide time snapshots into n classes, where n is the number
of snapshots per day(default) or other specified number. For example, the
several snapshots (15min) can be recognized as one class of hour X.
'''
def __init__(self, datadir: str):
'''
Initialization method.
Args:
datadir: directory containing `tensors` and `viz_images` folder
'''
self.datadir = datadir
self.paths = glob(self.datadir + '/*.pkl')
self.pattern = re.compile('(?<=-)\d+(?=.pkl)')
def __len__(self):
'''
Denotes the total number of samples.
'''
return len(self.paths)
def __getitem__(self, index):
'''
Generates one sample of data.
Decide class of the sample on the fly.
'''
path = self.paths[index]
X = torch.load(path)
y = decide_label(path)
return X, y
# TODO: Please clean unused variables
class SnapshotClassificationDatasetRAM(data.Dataset):
'''
The same dataset as SnapshotClassificationDataset, but load all data into
memory.
'''
def __init__(self, datadir: str):
'''
Initialization method.
Args:
datadir: directory containing `tensors` and `viz_images` folder
Explaination:
The n_classes is actually fixed. If the time unit of tensor
generation is 15min, then there would be 96 classes. The reason
why n_classes is still provided as an argument is to double check
the user knows (or, remembers) what he/she is doing.
'''
self.datadir = datadir
# capture time unit (10min, 15min, etc) from dir string
dir_pattern = re.compile('(?<=_)\d+(?=min)')
interval = int(dir_pattern.findall(self.datadir)[0])
# Patterns to extract key number from tensor path, which is used to
# determine the class of that tensor. This is possible thanks to
# naming rule of tensors.
self.paths = glob(self.datadir + '/*.pkl')
assert len(self.paths) != 0, 'glob error!'
self.pattern = re.compile('(?<=-)\d+(?=.pkl)')
# load tensor and labels into RAM
self.Xs = []
self.ys = []
for path in tqdm(self.paths, total=len(self.paths), ascii=True):
X = torch.load(path).type(torch.FloatTensor)
X = X.permute(2, 1, 0)
y = decide_label(path)
self.Xs.append(X)
self.ys.append(y)
def __len__(self):
'''
Denotes the total number of samples.
'''
return len(self.paths)
def __getitem__(self, index):
'''
Generates one sample of data.
'''
X = self.Xs[index]
y = self.ys[index]
return X, y
class EncoderDatasetRAM(data.Dataset):
'''
Auto encoder dataset.
'''
def __init__(self, datadir):
'''
Initialization.
Args:
datadir: directory of serialized tensors
seq_len: timestep length of tensors
'''
self.paths = glob(datadir + '/*.pkl')
self.length = len(self.paths)
# load all tensor into RAM
tensors = []
for path in tqdm(self.paths, total=self.length, ascii=True):
tensor = torch.load(path).numpy()
tensors.append(tensor)
tensors = np.array(tensors).astype('float32')
self.tensors = tensors.reshape((self.length, -1))
def __len__(self):
'''
Denotes the total number of samples
'''
return self.length - 1
def __getitem__(self, index):
'''
Generates one sample of data
'''
# Load data and get label
tensor = self.tensors[index]
X = torch.from_numpy(tensor)
y = torch.from_numpy(tensor)
return X, y
class ConvEncoderDatasetRAM(data.Dataset):
'''
Convolutional Auto encoder dataset.
'''
def __init__(self, datadir):
'''
Initialization.
Args:
datadir: directory of serialized tensors
'''
self.paths = glob(datadir + '/*.pkl')
self.length = len(self.paths)
# load all tensor into RAM
tensors = []
for path in tqdm(self.paths, total=self.length, ascii=True):
tensor = torch.load(path).numpy()
tensors.append(tensor)
tensors = np.array(tensors).astype('float32')
self.tensors = tensors
# numpy image to pytorch image need to swap axes
self.tensors = np.swapaxes(self.tensors, 1, 3)
print(f'Conv Autoencoder data shape: {tensors.shape}')
def __len__(self):
'''
Denotes the total number of samples
'''
return self.length - 1
def __getitem__(self, index):
'''
Generates one sample of data
'''
# Load data and get label
X = self.tensors[index]
X = torch.from_numpy(X)
y = self.tensors[index]
# y = y.reshape(1, -1)
y = torch.from_numpy(y)
# print(f'X.shape -> {X.shape} || y.shape -> {y.shape}')
return X, y
if __name__ == '__main__':
print('In module datasets.py')
| 29.211329 | 145 | 0.592184 |
618050544cfb6f83887e7e97bdf5839d9eaba27b | 3,847 | py | Python | zat/json_log_to_dataframe.py | SuperCowPowers/zat | 7f0de8bb052e8c84ab9bd00f195514d957eac9ec | [
"Apache-2.0"
] | 146 | 2019-11-07T20:57:27.000Z | 2022-03-24T00:03:55.000Z | zat/json_log_to_dataframe.py | Kitware/BroUtils | fed88c4310cf70c8b01c9a7eb0918b8c4d117e77 | [
"Apache-2.0"
] | 43 | 2017-09-18T06:22:35.000Z | 2018-09-05T19:59:43.000Z | zat/json_log_to_dataframe.py | Kitware/BroUtils | fed88c4310cf70c8b01c9a7eb0918b8c4d117e77 | [
"Apache-2.0"
] | 41 | 2017-09-11T09:59:47.000Z | 2018-09-01T18:36:47.000Z | """JSONLogToDataFrame: Converts a Zeek JSON log to a Pandas DataFrame"""
import os
# Third Party
import pandas as pd
# Local Imports
class JSONLogToDataFrame(object):
"""JSONLogToDataFrame: Converts a Zeek JSON log to a Pandas DataFrame
Notes:
Unlike the regular Zeek logs, when you dump the data to JSON you lose
all the type information. This means we have to guess/infer a lot
of the types, we HIGHLY recommend that you use the standard Zeek output
log format as it will result in both faster and better dataframes.
Todo:
1. Have a more formal column mapping
2. Convert Categorial columns
"""
def __init__(self):
"""Initialize the JSONLogToDataFrame class"""
# Type conversion Map: This is simple for now but can/should be improved
self.type_map = {}
def create_dataframe(self, log_filename, ts_index=True, aggressive_category=True, maxrows=None):
""" Create a Pandas dataframe from a Zeek JSON log file
Args:
log_filename (string): The full path to the Zeek log
ts_index (bool): Set the index to the 'ts' field (default = True)
aggressive_category (bool): convert unknown columns to category (default = True)
maxrows: Read in a subset of rows for testing/inspecting (default = None)
"""
# Sanity check the filename
if not os.path.isfile(log_filename):
print(f'Could not find file: {log_filename}')
return pd.DataFrame()
# Read in the JSON file as a dataframe
_df = pd.read_json(log_filename, nrows=maxrows, lines=True)
# If we have a ts field convert it to datetime (and optionally set as index)
if 'ts' in _df.columns:
_df['ts'] = pd.to_datetime(_df['ts'], unit='s')
# Set the index
if ts_index:
_df.set_index('ts', inplace=True)
# Okay our dataframe should be ready to go
return _df
# Simple test of the functionality
def test():
"""Test for JSONLogToDataFrame Class"""
import os
pd.set_option('display.width', 1000)
from zat.utils import file_utils
# Grab a test file
data_path = file_utils.relative_dir(__file__, '../data/json')
log_path = os.path.join(data_path, 'conn.log')
# Convert it to a Pandas DataFrame
log_to_df = JSONLogToDataFrame()
my_df = log_to_df.create_dataframe(log_path)
# Print out the head
print(my_df.head())
# Print out the datatypes
print(my_df.dtypes)
# Test a bunch
tests = ['capture_loss.log', 'dhcp.log', 'http.log', 'ntp.log', 'smb_mapping.log', 'weird.log',
'conn.log', 'dns.log', 'kerberos.log', 'packet_filter.log', 'ssl.log', 'x509.log',
'dce_rpc.log', 'files.log', 'loaded_scripts.log', 'smb_files.log', 'stats.log']
for log_path in [os.path.join(data_path, log) for log in tests]:
print('Testing: {:s}...'.format(log_path))
my_df = log_to_df.create_dataframe(log_path)
print(my_df.head())
print(my_df.dtypes)
# Test out maxrows arg
conn_path = os.path.join(data_path, 'conn.log')
my_df = log_to_df.create_dataframe(conn_path, maxrows=3)
print(my_df.head())
# Test an empty log (a log with header/close but no data rows)
log_path = os.path.join(data_path, 'http_empty.log')
my_df = log_to_df.create_dataframe(log_path)
# Print out the head
print(my_df.head())
# Print out the datatypes
print(my_df.dtypes)
print('JSONLogToDataFrame Test successful!')
if __name__ == '__main__':
# Run the test for easy testing/debugging
# Setup Pandas output options
pd.options.display.max_colwidth = 20
pd.options.display.max_columns = 10
test()
| 34.348214 | 100 | 0.642839 |
acaa184b5f7113792b8858295793ae6b6caf7031 | 15,755 | py | Python | Lib/distutils/command/bdist_wininst.py | thatneat/cpython | 2cd07920bb7d2d319999394092190f37935dc421 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2020-04-07T03:48:28.000Z | 2020-04-07T03:48:28.000Z | Lib/distutils/command/bdist_wininst.py | thatneat/cpython | 2cd07920bb7d2d319999394092190f37935dc421 | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | Lib/distutils/command/bdist_wininst.py | thatneat/cpython | 2cd07920bb7d2d319999394092190f37935dc421 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2019-11-25T00:28:23.000Z | 2019-11-25T00:28:23.000Z | """distutils.command.bdist_wininst
Implements the Distutils 'bdist_wininst' command: create a windows installer
exe-program."""
import sys, os
from distutils.core import Command
from distutils.util import get_platform
from distutils.dir_util import create_tree, remove_tree
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_wininst(Command):
description = "create an executable installer for MS Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized) "
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after "
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
# bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
_unsupported = (sys.platform != "win32")
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = None
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
def finalize_options(self):
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise DistutilsOptionError(
"target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,))
self.target_version = short_version
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError(
"install_script '%s' not found in scripts"
% self.install_script)
def run(self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise DistutilsPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install.plat_name = self.plat_name
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = '%d.%d' % sys.version_info[:2]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = key.upper()
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import mktemp
archive_basename = mktemp()
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
log.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
def get_inidata(self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return s.replace("\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(name.capitalize(), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import distutils
build_info = "Built %s with distutils-%s" % \
(time.ctime(time.time()), distutils.__version__)
lines.append("build_info=%s" % build_info)
return "\n".join(lines)
def create_exe(self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
self.announce("creating %s" % installer_name)
if bitmap:
with open(bitmap, "rb") as f:
bitmapdata = f.read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
with open(installer_name, "wb") as file:
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + b"\0"
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
# convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
encoding="latin-1") as script:
script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + b"\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
with open(arcname, "rb") as f:
file.write(f.read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
def get_exe_bytes(self):
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version and self.target_version < cur_version:
if self.target_version < "2.4":
bv = '6.0'
elif self.target_version == "2.4":
bv = '7.1'
elif self.target_version == "2.5":
bv = '8.0'
elif self.target_version <= "3.2":
bv = '9.0'
elif self.target_version <= "3.4":
bv = '10.0'
else:
bv = '14.0'
else:
# for current version - use authoritative check.
try:
from msvcrt import CRT_ASSEMBLY_VERSION
except ImportError:
# cross-building, so assume the latest version
bv = '14.0'
else:
# as far as we know, CRT is binary compatible based on
# the first field, so assume 'x.0' until proven otherwise
major = CRT_ASSEMBLY_VERSION.partition('.')[0]
bv = major + '.0'
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%s%s.exe" % (bv, sfix))
f = open(filename, "rb")
try:
return f.read()
finally:
f.close()
| 42.581081 | 91 | 0.556776 |
3f246990ab650bc190f2b8209cb5eae87541d16b | 794 | py | Python | seq2seq/util/seed.py | suhyeon0123/SplitRegex | b5f2fe890d81d335cda30ddd10414fbc75537d0e | [
"Apache-2.0"
] | null | null | null | seq2seq/util/seed.py | suhyeon0123/SplitRegex | b5f2fe890d81d335cda30ddd10414fbc75537d0e | [
"Apache-2.0"
] | null | null | null | seq2seq/util/seed.py | suhyeon0123/SplitRegex | b5f2fe890d81d335cda30ddd10414fbc75537d0e | [
"Apache-2.0"
] | null | null | null | import os
import torch
import numpy as np
import random
def seed_all(seed: int = 3000):
print("Using Seed Number {}".format(seed))
os.environ["PYTHONHASHSEED"] = str(
seed) # set PYTHONHASHSEED env var at fixed value
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.cuda.manual_seed(seed) # pytorch (both CPU and CUDA)
np.random.seed(seed) # for numpy pseudo-random generator
random.seed(
seed) # set fixed value for python built-in pseudo-random generator
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = False
def seed_worker(_worker_id):
worker_seed = torch.initial_seed() % 2 ** 32
np.random.seed(worker_seed)
random.seed(worker_seed)
| 28.357143 | 76 | 0.706549 |
375470ba34364a59575f74f017d9ea51314f6227 | 1,351 | py | Python | source/util/database.py | Remote-Area-Monitoring/Remote-Area-Monitoring | 1dc602ed52eed124d3a58215cf025b9204dad426 | [
"MIT"
] | null | null | null | source/util/database.py | Remote-Area-Monitoring/Remote-Area-Monitoring | 1dc602ed52eed124d3a58215cf025b9204dad426 | [
"MIT"
] | null | null | null | source/util/database.py | Remote-Area-Monitoring/Remote-Area-Monitoring | 1dc602ed52eed124d3a58215cf025b9204dad426 | [
"MIT"
] | null | null | null | from tinydb import TinyDB
from tinydb import Query
from source.util.timekeeper import Timestamps
class Database:
def __init__(self, database_file):
self.db = TinyDB(database_file)
self.ts = Timestamps()
def insert(self, dataobj):
self.db.insert(dataobj)
def insert_multiple(self, data_list):
self.db.insert_multiple(data_list)
def get_all(self):
return self.db.all()
def get_records(self, start_date: float = None):
data = []
if start_date is not None:
data.extend(self.db.search(Query()['timestamp'] >= start_date))
else:
data = self.db.all()
if not data:
return None
return data
def get_data_single_field(self, field, value, timestamp=None):
if timestamp is not None:
return self.db.search((Query()[field] == value) & (Query()['timestamp'] >= timestamp))
else:
return self.db.search(Query()[field] == value)
def get_data_from_obj(self, dataobj, timestamp=None):
if timestamp is not None:
return self.db.search((Query().fragment(dataobj)) & (Query()['timestamp'] >= timestamp))
return self.db.search(Query().fragment(dataobj))
def remove_single_record(self, dataobj):
return self.db.remove(Query().fragment(dataobj))
| 31.418605 | 100 | 0.626943 |
acf7598b1354a20394cb15ba6d132ba43b324dce | 2,789 | py | Python | local/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/utils.py | sahilsdei/django_ecommerce | edc2513e41aca178d1ccae14ebaa6c7b1d709e73 | [
"MIT"
] | null | null | null | local/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/utils.py | sahilsdei/django_ecommerce | edc2513e41aca178d1ccae14ebaa6c7b1d709e73 | [
"MIT"
] | null | null | null | local/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/utils.py | sahilsdei/django_ecommerce | edc2513e41aca178d1ccae14ebaa6c7b1d709e73 | [
"MIT"
] | null | null | null | ##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from __future__ import print_function
import sys
import traceback
from regression.python_test_utils import test_utils as utils
def create_domain(server, db_name, schema_name, schema_id, domain_name):
"""
This function is used to add the domain to existing schema
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_name: schema name
:type schema_name: str
:param schema_id: schema id
:type schema_id: int
:param domain_name: domain name
:type domain_name: str
:return: None
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
query = 'CREATE DOMAIN ' + schema_name + '.' + domain_name + \
' AS character(10) DEFAULT 1'
pg_cursor.execute(query)
connection.commit()
# Get 'oid' from newly created domain
pg_cursor.execute("SELECT d.oid, d.typname FROM pg_type d WHERE"
" d.typname='%s' AND d.typnamespace='%s'" %
(domain_name, schema_id))
domains = pg_cursor.fetchone()
connection.close()
return domains
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_domain(server, db_name, schema_id, domain_name):
"""
This function get the oid & name of the domain
:param server: server details
:type server: dict
:param db_name: db name
:type db_name: str
:param schema_id: schema id
:type schema_id: int
:param domain_name: domain name
:type domain_name: str
:return:
"""
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT d.oid, d.typname FROM pg_type d WHERE"
" d.typname='%s' AND d.typnamespace='%s'" %
(domain_name, schema_id))
domains = pg_cursor.fetchone()
connection.close()
return domains
| 34.8625 | 74 | 0.534242 |
76a9462bb6d0c56839f505166b0deabd2d575bd9 | 2,441 | py | Python | JMRPiFoundations/Skeleton/RPiSparkProvider.py | PowerRocker/rpi_spark_foundation | 3373399b72760a17a8cfc5bb4a67494fab8b615c | [
"MIT"
] | 2 | 2018-06-08T04:53:38.000Z | 2018-06-08T06:40:04.000Z | JMRPiFoundations/Skeleton/RPiSparkProvider.py | mobinrg/rpi_spark_foundations | 3373399b72760a17a8cfc5bb4a67494fab8b615c | [
"MIT"
] | null | null | null | JMRPiFoundations/Skeleton/RPiSparkProvider.py | mobinrg/rpi_spark_foundations | 3373399b72760a17a8cfc5bb4a67494fab8b615c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# RPi.Spark Provider
#
# Author: Kunpeng Zhang
# 2018.6.7
#
# See LICENSE for details.
def initSpark():
"""!
Create a RPiSpark object instance, it contain OLED, Keybuttons, Audio,
Speaker, etc. objects instance
@return a RPiSpark object instance
"""
from JMRPiFoundations.Devices.rpi_spark_z_1_0_0 import RPiSparkConfig
from JMRPiFoundations.Skeleton.RPiSpark import RPiSpark
from JMRPiSpark.Drives.Screen.SScreenSSD1306 import SScreenSSD1306
from JMRPiSpark.Drives.Display.SSD1306 import SSD1306_128x64
from JMRPiSpark.Drives.Key.RPiKeyButtons import RPiKeyButtons
from JMRPiSpark.Drives.Audio.RPiAudio import RPiAudioDevice
from JMRPiSpark.Drives.Attitude.MPU6050 import MPU6050
from JMRPiSpark.Drives.Attitude.MPU6050 import DEF_MPU6050_ADDRESS
# from JMRPiSpark.Drives.Audio.RPiTone import RPiTonePlayer
import spidev
try:
#open spi bus
spi = spidev.SpiDev()
spi.open( RPiSparkConfig.DSP_SPI_PORT, RPiSparkConfig.DSP_SPI_DEVICE)
spi.max_speed_hz = RPiSparkConfig.DSP_SPI_MAX_SPEED_HZ
spi.cshigh = False
spi.mode = 0
#create display
myDisplay = SSD1306_128x64(
spi,
spiDC = RPiSparkConfig.DSP_DC,
spiReset = RPiSparkConfig.DSP_RESET,
mirrorH = RPiSparkConfig.DSP_MIRROR_H,
mirrorV = RPiSparkConfig.DSP_MIRROR_V
)
myDisplay.init()
myDisplay.on()
myScreen = SScreenSSD1306(
myDisplay,
bufferColorMode = RPiSparkConfig.SCREEN_BUFFER_COLOR_MODE_BW,
displayDirection = RPiSparkConfig.SCREEN_ROTATING
)
except:
myScreen = None
print("Warning: We can not turn on OLED display, please check your RPi-Spark pHAT")
pass
try:
myMPU6050 = MPU6050( RPiSparkConfig.ATTITUDE_SENSOR_ADDR )
except:
myMPU6050 = None
print("Warning: We can not find attitude sensor, please check your RPi-Spark pHAT")
pass
# return RPiSpark
return RPiSpark(
version = RPiSparkConfig.HW_VERSION,
screen = myScreen,
keyboard = RPiKeyButtons(),
attitude = myMPU6050,
audio = RPiAudioDevice( pinRight = RPiSparkConfig.AUDIO_R, pinLeft = RPiSparkConfig.AUDIO_L )
# tone = RPiTonePlayer( RPiSparkConfig.SPEAKER )
) | 32.546667 | 105 | 0.667349 |
4b1da3e4eb8a2611264cc7f7dc464268b4c2d3b2 | 13,964 | py | Python | tutorials/03-gravity/plot_inv_1a_gravity_anomaly.py | Prithwijit-Chak/simpeg | d93145d768b5512621cdd75566b4a8175fee9ed3 | [
"MIT"
] | 358 | 2015-03-11T05:48:41.000Z | 2022-03-26T02:04:12.000Z | tutorials/03-gravity/plot_inv_1a_gravity_anomaly.py | thast/simpeg | 8021082b8b53f3c08fa87fc085547bdd56437c6b | [
"MIT"
] | 885 | 2015-01-19T09:23:48.000Z | 2022-03-29T12:08:34.000Z | tutorials/03-gravity/plot_inv_1a_gravity_anomaly.py | thast/simpeg | 8021082b8b53f3c08fa87fc085547bdd56437c6b | [
"MIT"
] | 214 | 2015-03-11T05:48:43.000Z | 2022-03-02T01:05:11.000Z | """
Least-Squares Inversion of Gravity Anomaly Data
===============================================
Here we invert gravity anomaly data to recover a density contrast model. We
formulate the inverse problem as a least-squares optimization problem. For
this tutorial, we focus on the following:
- Defining the survey from xyz formatted data
- Generating a mesh based on survey geometry
- Including surface topography
- Defining the inverse problem (data misfit, regularization, optimization)
- Specifying directives for the inversion
- Plotting the recovered model and data misfit
Although we consider gravity anomaly data in this tutorial, the same approach
can be used to invert gradiometry and other types of geophysical data.
"""
#########################################################################
# Import modules
# --------------
#
import os
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import tarfile
from discretize import TensorMesh
from SimPEG.utils import plot2Ddata, surface2ind_topo, model_builder
from SimPEG.potential_fields import gravity
from SimPEG import (
maps,
data,
data_misfit,
inverse_problem,
regularization,
optimization,
directives,
inversion,
utils,
)
# sphinx_gallery_thumbnail_number = 3
#############################################
# Define File Names
# -----------------
#
# File paths for assets we are loading. To set up the inversion, we require
# topography and field observations. The true model defined on the whole mesh
# is loaded to compare with the inversion result. These files are stored as a
# tar-file on our google cloud bucket:
# "https://storage.googleapis.com/simpeg/doc-assets/gravity.tar.gz"
#
# storage bucket where we have the data
data_source = "https://storage.googleapis.com/simpeg/doc-assets/gravity.tar.gz"
# download the data
downloaded_data = utils.download(data_source, overwrite=True)
# unzip the tarfile
tar = tarfile.open(downloaded_data, "r")
tar.extractall()
tar.close()
# path to the directory containing our data
dir_path = downloaded_data.split(".")[0] + os.path.sep
# files to work with
topo_filename = dir_path + "gravity_topo.txt"
data_filename = dir_path + "gravity_data.obs"
#############################################
# Load Data and Plot
# ------------------
#
# Here we load and plot synthetic gravity anomaly data. Topography is generally
# defined as an (N, 3) array. Gravity data is generally defined with 4 columns:
# x, y, z and data.
#
# Load topography
xyz_topo = np.loadtxt(str(topo_filename))
# Load field data
dobs = np.loadtxt(str(data_filename))
# Define receiver locations and observed data
receiver_locations = dobs[:, 0:3]
dobs = dobs[:, -1]
# Plot
mpl.rcParams.update({"font.size": 12})
fig = plt.figure(figsize=(7, 5))
ax1 = fig.add_axes([0.1, 0.1, 0.73, 0.85])
plot2Ddata(receiver_locations, dobs, ax=ax1, contourOpts={"cmap": "bwr"})
ax1.set_title("Gravity Anomaly")
ax1.set_xlabel("x (m)")
ax1.set_ylabel("y (m)")
ax2 = fig.add_axes([0.8, 0.1, 0.03, 0.85])
norm = mpl.colors.Normalize(vmin=-np.max(np.abs(dobs)), vmax=np.max(np.abs(dobs)))
cbar = mpl.colorbar.ColorbarBase(
ax2, norm=norm, orientation="vertical", cmap=mpl.cm.bwr, format="%.1e"
)
cbar.set_label("$mgal$", rotation=270, labelpad=15, size=12)
plt.show()
#############################################
# Assign Uncertainties
# --------------------
#
# Inversion with SimPEG requires that we define the standard deviation of our data.
# This represents our estimate of the noise in our data. For a gravity inversion,
# a constant floor value is generally applied to all data. For this tutorial,
# the standard deviation on each datum will be 1% of the maximum observed
# gravity anomaly value.
#
maximum_anomaly = np.max(np.abs(dobs))
uncertainties = 0.01 * maximum_anomaly * np.ones(np.shape(dobs))
#############################################
# Defining the Survey
# -------------------
#
# Here, we define the survey that will be used for this tutorial. Gravity
# surveys are simple to create. The user only needs an (N, 3) array to define
# the xyz locations of the observation locations. From this, the user can
# define the receivers and the source field.
#
# Define the receivers. The data consists of vertical gravity anomaly measurements.
# The set of receivers must be defined as a list.
receiver_list = gravity.receivers.Point(receiver_locations, components="gz")
receiver_list = [receiver_list]
# Define the source field
source_field = gravity.sources.SourceField(receiver_list=receiver_list)
# Define the survey
survey = gravity.survey.Survey(source_field)
#############################################
# Defining the Data
# -----------------
#
# Here is where we define the data that is inverted. The data is defined by
# the survey, the observation values and the standard deviation.
#
data_object = data.Data(survey, dobs=dobs, standard_deviation=uncertainties)
#############################################
# Defining a Tensor Mesh
# ----------------------
#
# Here, we create the tensor mesh that will be used to invert gravity anomaly
# data. If desired, we could define an OcTree mesh.
#
dh = 5.0
hx = [(dh, 5, -1.3), (dh, 40), (dh, 5, 1.3)]
hy = [(dh, 5, -1.3), (dh, 40), (dh, 5, 1.3)]
hz = [(dh, 5, -1.3), (dh, 15)]
mesh = TensorMesh([hx, hy, hz], "CCN")
########################################################
# Starting/Reference Model and Mapping on Tensor Mesh
# ---------------------------------------------------
#
# Here, we create starting and/or reference models for the inversion as
# well as the mapping from the model space to the active cells. Starting and
# reference models can be a constant background value or contain a-priori
# structures. Here, the background is 1e-6 g/cc.
#
# Define density contrast values for each unit in g/cc. Don't make this 0!
# Otherwise the gradient for the 1st iteration is zero and the inversion will
# not converge.
background_density = 1e-6
# Find the indices of the active cells in forward model (ones below surface)
ind_active = surface2ind_topo(mesh, xyz_topo)
# Define mapping from model to active cells
nC = int(ind_active.sum())
model_map = maps.IdentityMap(nP=nC) # model consists of a value for each active cell
# Define and plot starting model
starting_model = background_density * np.ones(nC)
##############################################
# Define the Physics
# ------------------
#
# Here, we define the physics of the gravity problem by using the simulation
# class.
#
simulation = gravity.simulation.Simulation3DIntegral(
survey=survey, mesh=mesh, rhoMap=model_map, actInd=ind_active
)
#######################################################################
# Define the Inverse Problem
# --------------------------
#
# The inverse problem is defined by 3 things:
#
# 1) Data Misfit: a measure of how well our recovered model explains the field data
# 2) Regularization: constraints placed on the recovered model and a priori information
# 3) Optimization: the numerical approach used to solve the inverse problem
#
# Define the data misfit. Here the data misfit is the L2 norm of the weighted
# residual between the observed data and the data predicted for a given model.
# Within the data misfit, the residual between predicted and observed data are
# normalized by the data's standard deviation.
dmis = data_misfit.L2DataMisfit(data=data_object, simulation=simulation)
# Define the regularization (model objective function).
reg = regularization.Simple(mesh, indActive=ind_active, mapping=model_map)
# Define how the optimization problem is solved. Here we will use a projected
# Gauss-Newton approach that employs the conjugate gradient solver.
opt = optimization.ProjectedGNCG(
maxIter=10, lower=-1.0, upper=1.0, maxIterLS=20, maxIterCG=10, tolCG=1e-3
)
# Here we define the inverse problem that is to be solved
inv_prob = inverse_problem.BaseInvProblem(dmis, reg, opt)
#######################################################################
# Define Inversion Directives
# ---------------------------
#
# Here we define any directiveas that are carried out during the inversion. This
# includes the cooling schedule for the trade-off parameter (beta), stopping
# criteria for the inversion and saving inversion results at each iteration.
#
# Defining a starting value for the trade-off parameter (beta) between the data
# misfit and the regularization.
starting_beta = directives.BetaEstimate_ByEig(beta0_ratio=1e1)
# Defining the fractional decrease in beta and the number of Gauss-Newton solves
# for each beta value.
beta_schedule = directives.BetaSchedule(coolingFactor=5, coolingRate=1)
# Options for outputting recovered models and predicted data for each beta.
save_iteration = directives.SaveOutputEveryIteration(save_txt=False)
# Updating the preconditionner if it is model dependent.
update_jacobi = directives.UpdatePreconditioner()
# Setting a stopping criteria for the inversion.
target_misfit = directives.TargetMisfit(chifact=1)
# Add sensitivity weights
sensitivity_weights = directives.UpdateSensitivityWeights(everyIter=False)
# The directives are defined as a list.
directives_list = [
sensitivity_weights,
starting_beta,
beta_schedule,
save_iteration,
update_jacobi,
target_misfit,
]
#####################################################################
# Running the Inversion
# ---------------------
#
# To define the inversion object, we need to define the inversion problem and
# the set of directives. We can then run the inversion.
#
# Here we combine the inverse problem and the set of directives
inv = inversion.BaseInversion(inv_prob, directives_list)
# Run inversion
recovered_model = inv.run(starting_model)
############################################################
# Recreate True Model
# -------------------
#
# Define density contrast values for each unit in g/cc
background_density = 0.0
block_density = -0.2
sphere_density = 0.2
# Define model. Models in SimPEG are vector arrays.
true_model = background_density * np.ones(nC)
# You could find the indicies of specific cells within the model and change their
# value to add structures.
ind_block = (
(mesh.gridCC[ind_active, 0] > -50.0)
& (mesh.gridCC[ind_active, 0] < -20.0)
& (mesh.gridCC[ind_active, 1] > -15.0)
& (mesh.gridCC[ind_active, 1] < 15.0)
& (mesh.gridCC[ind_active, 2] > -50.0)
& (mesh.gridCC[ind_active, 2] < -30.0)
)
true_model[ind_block] = block_density
# You can also use SimPEG utilities to add structures to the model more concisely
ind_sphere = model_builder.getIndicesSphere(np.r_[35.0, 0.0, -40.0], 15.0, mesh.gridCC)
ind_sphere = ind_sphere[ind_active]
true_model[ind_sphere] = sphere_density
############################################################
# Plotting True Model and Recovered Model
# ---------------------------------------
#
# Plot True Model
fig = plt.figure(figsize=(9, 4))
plotting_map = maps.InjectActiveCells(mesh, ind_active, np.nan)
ax1 = fig.add_axes([0.1, 0.1, 0.73, 0.8])
mesh.plotSlice(
plotting_map * true_model,
normal="Y",
ax=ax1,
ind=int(mesh.nCy / 2),
grid=True,
clim=(np.min(true_model), np.max(true_model)),
pcolorOpts={"cmap": "viridis"},
)
ax1.set_title("Model slice at y = 0 m")
ax2 = fig.add_axes([0.85, 0.1, 0.05, 0.8])
norm = mpl.colors.Normalize(vmin=np.min(true_model), vmax=np.max(true_model))
cbar = mpl.colorbar.ColorbarBase(
ax2, norm=norm, orientation="vertical", cmap=mpl.cm.viridis, format="%.1e"
)
cbar.set_label("$g/cm^3$", rotation=270, labelpad=15, size=12)
plt.show()
# Plot Recovered Model
fig = plt.figure(figsize=(9, 4))
plotting_map = maps.InjectActiveCells(mesh, ind_active, np.nan)
ax1 = fig.add_axes([0.1, 0.1, 0.73, 0.8])
mesh.plotSlice(
plotting_map * recovered_model,
normal="Y",
ax=ax1,
ind=int(mesh.nCy / 2),
grid=True,
clim=(np.min(recovered_model), np.max(recovered_model)),
pcolorOpts={"cmap": "viridis"},
)
ax1.set_title("Model slice at y = 0 m")
ax2 = fig.add_axes([0.85, 0.1, 0.05, 0.8])
norm = mpl.colors.Normalize(vmin=np.min(recovered_model), vmax=np.max(recovered_model))
cbar = mpl.colorbar.ColorbarBase(
ax2, norm=norm, orientation="vertical", cmap=mpl.cm.viridis
)
cbar.set_label("$g/cm^3$", rotation=270, labelpad=15, size=12)
plt.show()
###################################################################
# Plotting Predicted Data and Normalized Misfit
# ---------------------------------------------
#
# Predicted data with final recovered model
# SimPEG uses right handed coordinate where Z is positive upward.
# This causes gravity signals look "inconsistent" with density values in visualization.
dpred = inv_prob.dpred
# Observed data | Predicted data | Normalized data misfit
data_array = np.c_[dobs, dpred, (dobs - dpred) / uncertainties]
fig = plt.figure(figsize=(17, 4))
plot_title = ["Observed", "Predicted", "Normalized Misfit"]
plot_units = ["mgal", "mgal", ""]
ax1 = 3 * [None]
ax2 = 3 * [None]
norm = 3 * [None]
cbar = 3 * [None]
cplot = 3 * [None]
v_lim = [np.max(np.abs(dobs)), np.max(np.abs(dobs)), np.max(np.abs(data_array[:, 2]))]
for ii in range(0, 3):
ax1[ii] = fig.add_axes([0.33 * ii + 0.03, 0.11, 0.23, 0.84])
cplot[ii] = plot2Ddata(
receiver_list[0].locations,
data_array[:, ii],
ax=ax1[ii],
ncontour=30,
clim=(-v_lim[ii], v_lim[ii]),
contourOpts={"cmap": "bwr"},
)
ax1[ii].set_title(plot_title[ii])
ax1[ii].set_xlabel("x (m)")
ax1[ii].set_ylabel("y (m)")
ax2[ii] = fig.add_axes([0.33 * ii + 0.25, 0.11, 0.01, 0.85])
norm[ii] = mpl.colors.Normalize(vmin=-v_lim[ii], vmax=v_lim[ii])
cbar[ii] = mpl.colorbar.ColorbarBase(
ax2[ii], norm=norm[ii], orientation="vertical", cmap=mpl.cm.bwr
)
cbar[ii].set_label(plot_units[ii], rotation=270, labelpad=15, size=12)
plt.show()
| 31.954233 | 91 | 0.662489 |
fe9b434746f06e7aea9a10ec7e7db29e2a08d3fb | 270 | py | Python | src/splitting.py | IBPA/dairyML | 6966a1d17fe06ed4962981e8bcb6e9e63264b705 | [
"Apache-2.0"
] | null | null | null | src/splitting.py | IBPA/dairyML | 6966a1d17fe06ed4962981e8bcb6e9e63264b705 | [
"Apache-2.0"
] | null | null | null | src/splitting.py | IBPA/dairyML | 6966a1d17fe06ed4962981e8bcb6e9e63264b705 | [
"Apache-2.0"
] | 1 | 2021-12-11T19:16:29.000Z | 2021-12-11T19:16:29.000Z | from sklearn.model_selection import KFold, RepeatedKFold
ten_fold_5_repeat = RepeatedKFold(n_splits=10,n_repeats=5,random_state=7)
ten_fold_no_repeat = KFold(n_splits=10,shuffle=True,random_state=7)
five_fold_no_repeat = KFold(n_splits=5,shuffle=True,random_state=7) | 33.75 | 73 | 0.840741 |
571d073b77272a837a4f5653f604a128a8f675cd | 23,966 | py | Python | tests/fixtures/core.py | Mu-L/meltano | 7bf8f370608ee9a8833b33ea94112c6e219c8161 | [
"MIT"
] | null | null | null | tests/fixtures/core.py | Mu-L/meltano | 7bf8f370608ee9a8833b33ea94112c6e219c8161 | [
"MIT"
] | null | null | null | tests/fixtures/core.py | Mu-L/meltano | 7bf8f370608ee9a8833b33ea94112c6e219c8161 | [
"MIT"
] | null | null | null | import datetime
import itertools
import logging
import os
import shutil
from collections import namedtuple
from pathlib import Path
import pytest
import yaml
from meltano.core import bundle
from meltano.core.behavior.canonical import Canonical
from meltano.core.compiler.project_compiler import ProjectCompiler
from meltano.core.config_service import ConfigService
from meltano.core.elt_context import ELTContextBuilder
from meltano.core.environment_service import EnvironmentService
from meltano.core.job import Job, Payload, State
from meltano.core.logging.job_logging_service import JobLoggingService
from meltano.core.plugin import PluginType
from meltano.core.plugin.settings_service import PluginSettingsService
from meltano.core.plugin_discovery_service import (
LockedDefinitionService,
PluginDiscoveryService,
)
from meltano.core.plugin_install_service import PluginInstallService
from meltano.core.plugin_invoker import invoker_factory
from meltano.core.project import Project
from meltano.core.project_add_service import ProjectAddService
from meltano.core.project_files import ProjectFiles
from meltano.core.project_init_service import ProjectInitService
from meltano.core.project_plugins_service import (
PluginAlreadyAddedException,
ProjectPluginsService,
)
from meltano.core.project_settings_service import ProjectSettingsService
from meltano.core.schedule_service import ScheduleAlreadyExistsError, ScheduleService
from meltano.core.state_service import StateService
from meltano.core.task_sets_service import TaskSetsService
from meltano.core.utils import merge
PROJECT_NAME = "a_meltano_project"
@pytest.fixture(scope="class")
def discovery(): # noqa: WPS213
with bundle.find("discovery.yml").open() as base:
discovery = yaml.safe_load(base)
discovery[PluginType.EXTRACTORS].append(
{
"name": "tap-mock",
"label": "Mock",
"namespace": "tap_mock",
"variants": [
{
"name": "meltano",
"pip_url": "tap-mock",
"executable": "tap-mock",
"capabilities": ["discover", "catalog", "state"],
"settings": [
{"name": "test", "value": "mock"},
{"name": "start_date"},
{"name": "protected", "protected": True},
{"name": "secure", "kind": "password"},
{"name": "port", "kind": "integer", "value": 5000},
{"name": "list", "kind": "array", "value": []},
{
"name": "object",
"aliases": ["data"],
"kind": "object",
"value": {"nested": "from_default"},
},
{"name": "hidden", "kind": "hidden", "value": 42},
{
"name": "boolean",
"kind": "boolean",
"env_aliases": ["TAP_MOCK_ENABLED", "!TAP_MOCK_DISABLED"],
},
{"name": "auth.username"},
{"name": "auth.password", "kind": "password"},
],
"commands": {
"cmd": {
"args": "cmd meltano",
"description": "a description of cmd",
},
"cmd-variant": "cmd-variant meltano",
"test": {
"args": "--test",
"description": "Run tests",
},
"test_extra": {
"args": "test_extra",
"description": "Run extra tests",
"executable": "test-extra",
},
},
},
{
"name": "singer-io",
"original": True,
"deprecated": True,
"pip_url": "singer-tap-mock",
},
],
}
)
discovery[PluginType.EXTRACTORS].append(
{
"name": "tap-mock-noinstall",
"label": "Mock",
"namespace": "tap_mock_noinstall",
"variants": [
{
"name": "meltano",
"executable": "tap-mock-noinstall",
"capabilities": ["discover", "catalog", "state"],
"settings": [
{"name": "test", "value": "mock"},
{"name": "start_date"},
],
},
],
}
)
discovery[PluginType.LOADERS].append(
{
"name": "target-mock",
"namespace": "mock",
"pip_url": "target-mock",
"settings": [{"name": "schema", "env": "MOCKED_SCHEMA"}],
}
)
discovery[PluginType.TRANSFORMS].append(
{
"name": "tap-mock-transform",
"namespace": "tap_mock",
"pip_url": "tap-mock-transform",
"package_name": "dbt_mock",
}
)
discovery[PluginType.MODELS].append(
{
"name": "model-gitlab",
"namespace": "tap_gitlab",
"pip_url": "git+https://gitlab.com/meltano/model-gitlab.git",
}
)
discovery[PluginType.DASHBOARDS].append(
{
"name": "dashboard-google-analytics",
"namespace": "tap_google_analytics",
"pip_url": "git+https://gitlab.com/meltano/dashboard-google-analytics.git",
}
)
discovery[PluginType.ORCHESTRATORS].append(
{
"name": "orchestrator-mock",
"namespace": "pytest",
"pip_url": "orchestrator-mock",
}
)
discovery[PluginType.TRANSFORMERS].append(
{
"name": "transformer-mock",
"namespace": "transformer_mock",
"pip_url": "transformer-mock",
}
)
discovery[PluginType.UTILITIES].append(
{
"name": "utility-mock",
"namespace": "utility_mock",
"pip_url": "utility-mock",
"executable": "utility-mock",
"commands": {
"cmd": {
"args": "--option $ENV_VAR_ARG",
"description": "description of utility command",
},
"alternate-exec": {
"args": "--option $ENV_VAR_ARG",
"executable": "other-utility",
},
"containerized": {
"args": "",
"container_spec": {
"image": "mock-utils/mock",
"ports": {
"5000": "5000",
},
"volumes": ["$MELTANO_PROJECT_ROOT/example/:/usr/app/"],
},
},
},
}
)
discovery[PluginType.MAPPERS].append(
{
"name": "mapper-mock",
"namespace": "mapper_mock",
"variants": [
{
"name": "meltano",
"executable": "mapper-mock-cmd",
"pip_url": "mapper-mock",
"package_name": "mapper-mock",
}
],
}
)
return discovery
@pytest.fixture(scope="class")
def plugin_discovery_service(project, discovery):
return PluginDiscoveryService(project, discovery=discovery)
@pytest.fixture(scope="class")
def locked_definition_service(project):
return LockedDefinitionService(project)
@pytest.fixture(scope="class")
def project_compiler(project):
return ProjectCompiler(project)
@pytest.fixture(scope="class")
def project_init_service():
return ProjectInitService(PROJECT_NAME)
@pytest.fixture(scope="class")
def plugin_install_service(project, project_plugins_service):
return PluginInstallService(project, plugins_service=project_plugins_service)
@pytest.fixture(scope="class")
def project_add_service(project, project_plugins_service):
return ProjectAddService(project, plugins_service=project_plugins_service)
@pytest.fixture(scope="class")
def plugin_settings_service_factory(project, project_plugins_service):
def _factory(plugin, **kwargs):
return PluginSettingsService(
project, plugin, plugins_service=project_plugins_service, **kwargs
)
return _factory
@pytest.fixture(scope="class")
def plugin_invoker_factory(
project, project_plugins_service, plugin_settings_service_factory
):
def _factory(plugin, **kwargs):
return invoker_factory(
project,
plugin,
plugins_service=project_plugins_service,
plugin_settings_service=plugin_settings_service_factory(plugin),
**kwargs,
)
return _factory
@pytest.fixture(scope="class")
def add_model(project, plugin_install_service, project_add_service):
models = [
"model-carbon-intensity",
"model-gitflix",
"model-salesforce",
"model-gitlab",
]
for model in models:
plugin = project_add_service.add(PluginType.MODELS, model)
plugin_install_service.install_plugin(plugin)
yield
# clean-up
with project.meltano_update() as meltano:
meltano["plugins"]["models"] = [
model_def
for model_def in meltano["plugins"]["models"]
if model_def["name"] not in models
]
for created_model in models:
shutil.rmtree(project.model_dir(created_model))
@pytest.fixture(scope="class")
def config_service(project):
return ConfigService(project, use_cache=False)
@pytest.fixture(scope="class")
def project_plugins_service(project, config_service, plugin_discovery_service):
return ProjectPluginsService(
project,
config_service=config_service,
discovery_service=plugin_discovery_service,
use_cache=False,
)
@pytest.fixture(scope="class")
def tap(project_add_service):
try:
return project_add_service.add(
PluginType.EXTRACTORS, "tap-mock", variant="meltano"
)
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def alternative_tap(project_add_service, tap):
try:
return project_add_service.add(
PluginType.EXTRACTORS,
"tap-mock--singer-io",
inherit_from=tap.name,
variant="singer-io",
)
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def inherited_tap(project_add_service, tap):
try:
return project_add_service.add(
PluginType.EXTRACTORS,
"tap-mock-inherited",
inherit_from=tap.name,
commands={
"cmd": "cmd inherited",
"cmd-inherited": "cmd-inherited",
},
)
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def nonpip_tap(project_add_service):
try:
return project_add_service.add(
PluginType.EXTRACTORS,
"tap-mock-noinstall",
executable="tap-mock-noinstall",
)
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def target(project_add_service):
try:
return project_add_service.add(PluginType.LOADERS, "target-mock")
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def alternative_target(project_add_service):
# We don't load the `target` fixture here since this ProjectPlugin should
# have a BasePlugin parent, not the `target` ProjectPlugin
try:
return project_add_service.add(
PluginType.LOADERS, "target-mock-alternative", inherit_from="target-mock"
)
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def dbt(project_add_service):
try:
return project_add_service.add(PluginType.TRANSFORMERS, "dbt")
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def utility(project_add_service):
try:
return project_add_service.add(PluginType.UTILITIES, "utility-mock")
except PluginAlreadyAddedException as err:
return err.plugin
@pytest.fixture(scope="class")
def schedule_service(project, project_plugins_service):
return ScheduleService(project, plugins_service=project_plugins_service)
@pytest.fixture(scope="class")
def task_sets_service(project):
return TaskSetsService(project)
@pytest.fixture(scope="class")
def schedule(project, tap, target, schedule_service):
try:
return schedule_service.add(
None,
"schedule-mock",
extractor=tap.name,
loader=target.name,
transform="skip",
interval="@once",
start_date=datetime.datetime.now(),
)
except ScheduleAlreadyExistsError as err:
return err.schedule
@pytest.fixture(scope="function")
def environment_service(project):
service = EnvironmentService(project)
yield service
# Cleanup: remove any added Environments
for environment in service.list_environments():
service.remove(environment.name)
@pytest.fixture(scope="class")
def elt_context_builder(project, project_plugins_service):
return ELTContextBuilder(project, plugins_service=project_plugins_service)
@pytest.fixture(scope="class")
def job_logging_service(project):
return JobLoggingService(project)
@pytest.fixture(scope="class")
def project(test_dir, project_init_service):
project = project_init_service.init(add_discovery=True)
logging.debug(f"Created new project at {project.root}")
# empty out the `plugins`
with project.meltano_update() as meltano:
meltano.plugins = Canonical()
ProjectSettingsService(project).set("snowplow.collector_endpoints", "[]")
# cd into the new project root
os.chdir(project.root)
yield project
# clean-up
Project.deactivate()
os.chdir(test_dir)
shutil.rmtree(project.root)
logging.debug(f"Cleaned project at {project.root}")
@pytest.fixture(scope="class")
def project_files(test_dir, compatible_copy_tree):
project_init_service = ProjectInitService("a_multifile_meltano_project_core")
project = project_init_service.init(add_discovery=False)
logging.debug(f"Created new project at {project.root}")
current_dir = Path(__file__).parent
multifile_project_root = current_dir.joinpath("multifile_project/")
os.remove(project.meltanofile)
compatible_copy_tree(multifile_project_root, project.root)
# cd into the new project root
os.chdir(project.root)
yield ProjectFiles(root=project.root, meltano_file_path=project.meltanofile)
# clean-up
Project.deactivate()
os.chdir(test_dir)
shutil.rmtree(project.root)
logging.debug(f"Cleaned project at {project.root}")
@pytest.fixture(scope="class")
def mapper(project_add_service):
try:
return project_add_service.add(
PluginType.MAPPERS,
"mapper-mock",
variant="meltano",
mappings=[
{
"name": "mock-mapping-0",
"config": {
"transformations": [
{
"field_id": "author_email",
"tap_stream_name": "commits",
"type": "MASK-HIDDEN",
}
]
},
},
{
"name": "mock-mapping-1",
"config": {
"transformations": [
{
"field_id": "given_name",
"tap_stream_name": "users",
"type": "lowercase",
}
]
},
},
],
)
except PluginAlreadyAddedException as err:
return err.plugin
def create_job_id(description: str, env: str = "dev") -> str:
return f"{env}:tap-{description}-to-target-{description}"
@pytest.fixture
def num_params():
return 10
@pytest.fixture
def payloads(num_params):
mock_payloads_dict = {
"mock_state_payloads": [
{
"singer_state": {
f"bookmark-{idx_i}": idx_i + idx_j for idx_j in range(num_params)
}
}
for idx_i in range(num_params)
],
"mock_error_payload": {"error": "failed"},
"mock_empty_payload": {},
}
payloads = namedtuple("payloads", mock_payloads_dict)
return payloads(**mock_payloads_dict)
@pytest.fixture
def job_ids(num_params):
job_id_dict = {
"single_incomplete_job_id": create_job_id("single-incomplete"),
"single_complete_job_id": create_job_id("single-complete"),
"multiple_incompletes_job_id": create_job_id("multiple-incompletes"),
"multiple_completes_job_id": create_job_id("multiple-completes"),
"single_complete_then_multiple_incompletes_job_id": create_job_id(
"single-complete-then-multiple-incompletes"
),
"single_incomplete_then_multiple_completes_job_id": create_job_id(
"single-incomplete-then-multiple-completes"
),
}
job_ids = namedtuple("job_ids", job_id_dict)
return job_ids(**job_id_dict)
@pytest.fixture
def mock_time():
def _mock_time():
for idx in itertools.count(): # noqa: WPS526
yield datetime.datetime(1, 1, 1) + datetime.timedelta(hours=idx)
return _mock_time()
@pytest.fixture()
def job_args():
job_args_dict = {
"complete_job_args": {"state": State.SUCCESS, "payload_flags": Payload.STATE},
"incomplete_job_args": {
"state": State.FAIL,
"payload_flags": Payload.INCOMPLETE_STATE,
},
}
job_args = namedtuple("job_args", job_args_dict)
return job_args(**job_args_dict)
@pytest.fixture
def job_ids_with_jobs(job_ids, job_args, payloads, mock_time):
jobs = {
job_ids.single_incomplete_job_id: [
Job(
job_id=job_ids.single_incomplete_job_id,
**job_args.incomplete_job_args,
payload=payloads.mock_state_payloads[0],
)
],
job_ids.single_complete_job_id: [
Job(
job_id=job_ids.single_complete_job_id,
payload=payloads.mock_state_payloads[0],
**job_args.complete_job_args,
)
],
job_ids.multiple_incompletes_job_id: [
Job(
job_id=job_ids.multiple_incompletes_job_id,
**job_args.incomplete_job_args,
payload=payload,
)
for payload in payloads.mock_state_payloads
],
job_ids.multiple_completes_job_id: [
Job(
job_id=job_ids.multiple_completes_job_id,
payload=payload,
**job_args.complete_job_args,
)
for payload in payloads.mock_state_payloads
],
job_ids.single_complete_then_multiple_incompletes_job_id: [
Job(
job_id=job_ids.single_complete_then_multiple_incompletes_job_id,
payload=payloads.mock_state_payloads[0],
**job_args.complete_job_args,
)
]
+ [
Job(
job_id=job_ids.single_complete_then_multiple_incompletes_job_id,
payload=payload,
**job_args.incomplete_job_args,
)
for payload in payloads.mock_state_payloads[1:]
],
job_ids.single_incomplete_then_multiple_completes_job_id: [
Job(
job_id=job_ids.single_incomplete_then_multiple_completes_job_id,
payload=payloads.mock_state_payloads[0],
**job_args.incomplete_job_args,
)
]
+ [
Job(
job_id=job_ids.single_incomplete_then_multiple_completes_job_id,
payload=payload,
**job_args.complete_job_args,
)
for payload in payloads.mock_state_payloads[1:]
],
}
for job_list in jobs.values():
for job in job_list:
job.started_at = next(mock_time)
job.ended_at = next(mock_time)
return jobs
@pytest.fixture
def jobs(job_ids_with_jobs):
return [job for job_list in job_ids_with_jobs.values() for job in job_list]
@pytest.fixture
def job_ids_with_expected_states(job_ids, payloads, job_ids_with_jobs): # noqa: WPS210
final_state = {}
for state in payloads.mock_state_payloads:
merge(state, final_state)
expectations = {
job_ids.single_complete_job_id: payloads.mock_state_payloads[0],
job_ids.single_incomplete_job_id: payloads.mock_empty_payload,
}
for job_id, job_list in job_ids_with_jobs.items():
expectations[job_id] = {}
complete_jobs = []
incomplete_jobs = []
dummy_jobs = []
# Get latest complete non-dummy job.
for job in job_list:
if job.state == State.STATE_EDIT:
dummy_jobs.append(job)
elif job.payload_flags == Payload.STATE:
complete_jobs.append(job)
elif job.payload_flags == Payload.INCOMPLETE_STATE:
incomplete_jobs.append(job)
latest_complete_job = None
if complete_jobs:
latest_complete_job = max(complete_jobs, key=lambda _job: _job.ended_at)
# Get all incomplete jobs since latest complete non-dummy job.
latest_incomplete_job = None
if incomplete_jobs:
latest_incomplete_job = max(incomplete_jobs, key=lambda _job: _job.ended_at)
if latest_complete_job:
expectations[job_id] = merge(
expectations[job_id], latest_complete_job.payload
)
for job in incomplete_jobs:
if (not latest_complete_job) or (
job.ended_at > latest_complete_job.ended_at
):
expectations[job_id] = merge(expectations[job_id], job.payload)
# Get all dummy jobs since latest non-dummy job.
for job in dummy_jobs:
if (
not latest_complete_job or (job.ended_at > latest_complete_job.ended_at)
) and (
(not latest_incomplete_job)
or (job.ended_at > latest_incomplete_job.ended_at)
):
expectations[job_id] = merge(expectations[job_id], job.payload)
return [
(test_job_id, expected_state)
for test_job_id, expected_state in expectations.items()
]
@pytest.fixture
def job_history_session(jobs, session):
for job in jobs:
job.save(session)
yield session
@pytest.fixture
def state_service(job_history_session):
return StateService(session=job_history_session)
@pytest.fixture
def project_with_environment(project: Project) -> Project:
project.activate_environment("dev")
project.active_environment.env[
"ENVIRONMENT_ENV_VAR"
] = "${MELTANO_PROJECT_ROOT}/file.txt"
yield project
project.active_environment = None
| 31.575758 | 88 | 0.58462 |
3cf38873ed198c3fbd30552a9179ca1fb07f143f | 7,209 | py | Python | mapmanager.py | BJDev95/gym-vizdoom | e3e64e4fbcb33c79677c3e0eca09f48bfd3f745d | [
"MIT"
] | null | null | null | mapmanager.py | BJDev95/gym-vizdoom | e3e64e4fbcb33c79677c3e0eca09f48bfd3f745d | [
"MIT"
] | 1 | 2018-04-26T11:39:21.000Z | 2018-04-26T11:39:21.000Z | mapmanager.py | BJDev95/gym-vizdoom | e3e64e4fbcb33c79677c3e0eca09f48bfd3f745d | [
"MIT"
] | 1 | 2018-04-26T11:30:18.000Z | 2018-04-26T11:30:18.000Z | # Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
USAGE:
cd Documents/GitHub/NavigationByReinforcementLearning/Release/
from mapmanager import MapManager
mm = MapManager(height = 84, width = 84 ,firstmap = 2, lastmap = 400, render=False) #to load target images
mm.get_target_image(mapname) # to retrieve the targetimage of the specified map as numpy array, use without argument or with "map00" to retrieve black image
mm.get_random_map() #get a random map in specified range
"""
#Mostly self written
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
import time, random
from abc import ABCMeta, abstractmethod
from vizdoom import *
from skimage.transform import resize
from skimage.color import rgb2gray
from skimage.io import imsave
import cv2
import os
from tensorforce.environments import Environment
import tensorforce.util
from PIL import Image
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
pathtomap=os.path.join(__location__, 'gym_vizdoom/envs/D3_exploration_train.wad_manymaps.wad_snapshot.wad') #source of target images
DIRECTORY = os.path.join(__location__, 'targetimages/') #name of directory where targetimages are
MAPSTRING = "map"
class MapManager(Environment):
"""
Base environment class.
"""
def __init__(self, height = 84, width = 84 ,firstmap = 2, lastmap = 400, render = False):
self.iterator = firstmap #increases to iterate through maps start with firstmap
self.firstmap = firstmap #should be larger or equal 1 , 0 is black image
self.lastmap = lastmap #last map to generate target from
self.target_images = None
self.height = height
self.width = width
self.mapstring = MAPSTRING
self.directory = DIRECTORY
self.filename = "targetimages_map_" +str(self.firstmap)+"_to_"+ str(self.lastmap)+"_h"+str(self.height)+"w"+str(self.width)+".npy"
start_time = time.time()
#check if targets with given specs already exist and load
try:
#if exist load from file
self.target_images = np.load(self.directory+self.filename)
print ("succesfully loaded file: "+ self.filename + " from " + self.directory)
except:
print ("No file found creating new target image file, please stand by...")
#else load environment and execute target_generator()
self.render = render #render
self.env = DoomGame()
self.env.set_doom_scenario_path(pathtomap)
self.env.set_doom_map("map02")#MazeMap
self.env.set_screen_resolution(ScreenResolution.RES_640X480) # 160X120
self.env.set_screen_format(ScreenFormat.GRAY8)
self.env.set_render_hud(False)
self.env.set_render_crosshair(False)
self.env.set_render_weapon(False)
self.env.set_render_decals(render)
self.env.set_render_particles(render)
self.env.add_available_button(Button.MOVE_FORWARD)
#self.env.add_available_game_variable(GameVariable.POSITION_Z)
self.env.add_available_game_variable(GameVariable.AMMO2)
self.env.add_available_game_variable(GameVariable.POSITION_X)
self.env.add_available_game_variable(GameVariable.POSITION_Y)
self.env.set_episode_timeout(100010)
self.env.set_episode_start_time(10)
self.env.set_window_visible(render)
self.env.set_sound_enabled(False)
self.env.set_living_reward(0)
self.env.set_mode(Mode.PLAYER)
self.env.init()
self.target_generator()
self.target_saver()
self.close()
try:
# if exist load from file
self.target_images = np.load(self.directory + self.filename)
except:
print("Writing and Loading Failed debug!!")
start_time = time.time()-start_time
print("Needed " + str(start_time)+ " seconds to load/generate target images")
def close(self):
self.env=None
def target_saver(self):
print("Saving new array of size:")
print(self.target_images.shape)
processedtis = (self.target_images*255.9).astype('int')
np.save(self.directory+self.filename, processedtis)
def initialize_target_array(self):
"""
#old code not workin!!!
#shold be a black image instead of image
s = self.env.get_state().screen_buffer
s = self.process_image(s)
#self.target_images = np.stack(s, axis=2)
self.target_images = np.stack((s,s), axis = 2)
s_expanded = np.expand_dims(s, axis=2)
for i in range(2, self.firstmap): #two because of stacking statement
self.target_images = np.append(self.target_images[:, :, 1:], s_expanded, axis=2)
"""
self.target_images = np.ones((self.height, self.width, self.lastmap+1))
def target_generator(self):
self.initialize_target_array()
while(self.iterator < self.lastmap+1): #iterate throug all maps
mapname = self.mapstring +str(self.iterator)
if (self.iterator <10): #add 0 if below 10
mapname = self.mapstring+ "0"+ str(self.iterator)
self.iterator = self.iterator +1
self.env.set_doom_map(mapname)
s = self.env.get_state().screen_buffer
s = self.process_image(s)
s_expanded = np.expand_dims(s, axis=2)
self.target_images = np.append(self.target_images[:, :, 1:], s_expanded, axis=2)
def process_image(self, image):
s = resize(image, (self.height, self.width))
return s
def get_target_image(self, map = "map00"):
mapnumber = map[3:] #very specific to map name format "map000"
mapnumber = int(mapnumber)
temp=self.target_images[:,:, mapnumber]
ti= temp[:, :, np.newaxis]
return ti
def imagedisplay(self, image, name = "image"):
cv2.imshow(name, image/255)
cv2.waitKey(0)
cv2.destroyAllWindows()
pass
def main():
print("Not supposed to be used as main file")
def get_random_map(self):
mapnumber = np.random.randint(self.firstmap, high=self.lastmap + 1)
if (mapnumber < 10):
return self.mapstring + "0" + str(mapnumber)
else:
return self.mapstring + str(mapnumber)
if __name__ == '__main__':
main()
| 38.142857 | 156 | 0.654044 |
e566f4506ecd9c4a6d3279672d953e74c06af03d | 279 | py | Python | build/android/pylib/utils/timeout_retry.py | TwistedCore/external_v8 | c6725dab9be251fbfc6fd7d53c3513a23e78c36c | [
"BSD-3-Clause"
] | 27 | 2016-04-27T01:02:03.000Z | 2021-12-13T08:53:19.000Z | build/android/pylib/utils/timeout_retry.py | TwistedCore/external_v8 | c6725dab9be251fbfc6fd7d53c3513a23e78c36c | [
"BSD-3-Clause"
] | 2 | 2017-03-09T09:00:50.000Z | 2017-09-21T15:48:20.000Z | build/android/pylib/utils/timeout_retry.py | TwistedCore/external_v8 | c6725dab9be251fbfc6fd7d53c3513a23e78c36c | [
"BSD-3-Clause"
] | 17 | 2016-04-27T02:06:39.000Z | 2019-12-18T08:07:00.000Z | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=unused-wildcard-import
# pylint: disable=wildcard-import
from devil.utils.timeout_retry import *
| 31 | 72 | 0.781362 |
3bbbde00a46cf57ef869ac9aebba9149e346857f | 11,123 | py | Python | tmp/test_invariance_on_lfw.py | sonuranjitjacob/facenet | 0ae6f9ae443cc8f1c163aaf76fb613e8fa79a0a6 | [
"MIT"
] | 13,408 | 2016-02-15T09:05:20.000Z | 2022-03-31T14:00:56.000Z | tmp/test_invariance_on_lfw.py | jasonmayes/facenet | 846db07945d5edaec41beb691da7c90b49b4e3ab | [
"MIT"
] | 1,132 | 2016-02-12T22:34:40.000Z | 2022-03-29T12:02:33.000Z | tmp/test_invariance_on_lfw.py | jasonmayes/facenet | 846db07945d5edaec41beb691da7c90b49b4e3ab | [
"MIT"
] | 5,366 | 2016-03-29T09:09:42.000Z | 2022-03-30T14:16:42.000Z | """Test invariance to translation, scaling and rotation on the "Labeled Faces in the Wild" dataset (http://vis-www.cs.umass.edu/lfw/).
This requires test images to be cropped a bit wider than the normal to give some room for the transformations.
"""
# MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import argparse
import facenet
import lfw
import matplotlib.pyplot as plt
from scipy import misc
import os
import sys
import math
def main(args):
pairs = lfw.read_pairs(os.path.expanduser(args.lfw_pairs))
paths, actual_issame = lfw.get_paths(os.path.expanduser(args.lfw_dir), pairs)
result_dir = '../data/'
plt.ioff() # Disable interactive plotting mode
with tf.Graph().as_default():
with tf.Session() as sess:
# Load the model
print('Loading model "%s"' % args.model_file)
facenet.load_model(args.model_file)
# Get input and output tensors
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
image_size = int(images_placeholder.get_shape()[1])
# Run test on LFW to check accuracy for different horizontal/vertical translations of input images
if args.nrof_offsets>0:
step = 3
offsets = np.asarray([x*step for x in range(-args.nrof_offsets//2+1, args.nrof_offsets//2+1)])
horizontal_offset_accuracy = [None] * len(offsets)
for idx, offset in enumerate(offsets):
accuracy = evaluate_accuracy(sess, images_placeholder, phase_train_placeholder, image_size, embeddings,
paths, actual_issame, translate_images, (offset,0), 60, args.orig_image_size, args.seed)
print('Hoffset: %1.3f Accuracy: %1.3f+-%1.3f' % (offset, np.mean(accuracy), np.std(accuracy)))
horizontal_offset_accuracy[idx] = np.mean(accuracy)
vertical_offset_accuracy = [None] * len(offsets)
for idx, offset in enumerate(offsets):
accuracy = evaluate_accuracy(sess, images_placeholder, phase_train_placeholder, image_size, embeddings,
paths, actual_issame, translate_images, (0,offset), 60, args.orig_image_size, args.seed)
print('Voffset: %1.3f Accuracy: %1.3f+-%1.3f' % (offset, np.mean(accuracy), np.std(accuracy)))
vertical_offset_accuracy[idx] = np.mean(accuracy)
fig = plt.figure(1)
plt.plot(offsets, horizontal_offset_accuracy, label='Horizontal')
plt.plot(offsets, vertical_offset_accuracy, label='Vertical')
plt.legend()
plt.grid(True)
plt.title('Translation invariance on LFW')
plt.xlabel('Offset [pixels]')
plt.ylabel('Accuracy')
# plt.show()
print('Saving results in %s' % result_dir)
fig.savefig(os.path.join(result_dir, 'invariance_translation.png'))
save_result(offsets, horizontal_offset_accuracy, os.path.join(result_dir, 'invariance_translation_horizontal.txt'))
save_result(offsets, vertical_offset_accuracy, os.path.join(result_dir, 'invariance_translation_vertical.txt'))
# Run test on LFW to check accuracy for different rotation of input images
if args.nrof_angles>0:
step = 3
angles = np.asarray([x*step for x in range(-args.nrof_offsets//2+1, args.nrof_offsets//2+1)])
rotation_accuracy = [None] * len(angles)
for idx, angle in enumerate(angles):
accuracy = evaluate_accuracy(sess, images_placeholder, phase_train_placeholder, image_size, embeddings,
paths, actual_issame, rotate_images, angle, 60, args.orig_image_size, args.seed)
print('Angle: %1.3f Accuracy: %1.3f+-%1.3f' % (angle, np.mean(accuracy), np.std(accuracy)))
rotation_accuracy[idx] = np.mean(accuracy)
fig = plt.figure(2)
plt.plot(angles, rotation_accuracy)
plt.grid(True)
plt.title('Rotation invariance on LFW')
plt.xlabel('Angle [deg]')
plt.ylabel('Accuracy')
# plt.show()
print('Saving results in %s' % result_dir)
fig.savefig(os.path.join(result_dir, 'invariance_rotation.png'))
save_result(angles, rotation_accuracy, os.path.join(result_dir, 'invariance_rotation.txt'))
# Run test on LFW to check accuracy for different scaling of input images
if args.nrof_scales>0:
step = 0.05
scales = np.asarray([x*step+1 for x in range(-args.nrof_offsets//2+1, args.nrof_offsets//2+1)])
scale_accuracy = [None] * len(scales)
for scale_idx, scale in enumerate(scales):
accuracy = evaluate_accuracy(sess, images_placeholder, phase_train_placeholder, image_size, embeddings,
paths, actual_issame, scale_images, scale, 60, args.orig_image_size, args.seed)
print('Scale: %1.3f Accuracy: %1.3f+-%1.3f' % (scale, np.mean(accuracy), np.std(accuracy)))
scale_accuracy[scale_idx] = np.mean(accuracy)
fig = plt.figure(3)
plt.plot(scales, scale_accuracy)
plt.grid(True)
plt.title('Scale invariance on LFW')
plt.xlabel('Scale')
plt.ylabel('Accuracy')
# plt.show()
print('Saving results in %s' % result_dir)
fig.savefig(os.path.join(result_dir, 'invariance_scale.png'))
save_result(scales, scale_accuracy, os.path.join(result_dir, 'invariance_scale.txt'))
def save_result(aug, acc, filename):
with open(filename, "w") as f:
for i in range(aug.size):
f.write('%6.4f %6.4f\n' % (aug[i], acc[i]))
def evaluate_accuracy(sess, images_placeholder, phase_train_placeholder, image_size, embeddings,
paths, actual_issame, augment_images, aug_value, batch_size, orig_image_size, seed):
nrof_images = len(paths)
nrof_batches = int(math.ceil(1.0*nrof_images / batch_size))
emb_list = []
for i in range(nrof_batches):
start_index = i*batch_size
end_index = min((i+1)*batch_size, nrof_images)
paths_batch = paths[start_index:end_index]
images = facenet.load_data(paths_batch, False, False, orig_image_size)
images_aug = augment_images(images, aug_value, image_size)
feed_dict = { images_placeholder: images_aug, phase_train_placeholder: False }
emb_list += sess.run([embeddings], feed_dict=feed_dict)
emb_array = np.vstack(emb_list) # Stack the embeddings to a nrof_examples_per_epoch x 128 matrix
thresholds = np.arange(0, 4, 0.01)
embeddings1 = emb_array[0::2]
embeddings2 = emb_array[1::2]
_, _, accuracy = facenet.calculate_roc(thresholds, embeddings1, embeddings2, np.asarray(actual_issame), seed)
return accuracy
def scale_images(images, scale, image_size):
images_scale_list = [None] * images.shape[0]
for i in range(images.shape[0]):
images_scale_list[i] = misc.imresize(images[i,:,:,:], scale)
images_scale = np.stack(images_scale_list,axis=0)
sz1 = images_scale.shape[1]/2
sz2 = image_size/2
images_crop = images_scale[:,(sz1-sz2):(sz1+sz2),(sz1-sz2):(sz1+sz2),:]
return images_crop
def rotate_images(images, angle, image_size):
images_list = [None] * images.shape[0]
for i in range(images.shape[0]):
images_list[i] = misc.imrotate(images[i,:,:,:], angle)
images_rot = np.stack(images_list,axis=0)
sz1 = images_rot.shape[1]/2
sz2 = image_size/2
images_crop = images_rot[:,(sz1-sz2):(sz1+sz2),(sz1-sz2):(sz1+sz2),:]
return images_crop
def translate_images(images, offset, image_size):
h, v = offset
sz1 = images.shape[1]/2
sz2 = image_size/2
images_crop = images[:,(sz1-sz2+v):(sz1+sz2+v),(sz1-sz2+h):(sz1+sz2+h),:]
return images_crop
def parse_arguments(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--model_file', type=str,
help='File containing the model parameters as well as the model metagraph (with extension ".meta")',
default='~/models/facenet/20160514-234418/model.ckpt-500000')
parser.add_argument('--nrof_offsets', type=int,
help='Number of horizontal and vertical offsets to evaluate.', default=21)
parser.add_argument('--nrof_angles', type=int,
help='Number of angles to evaluate.', default=21)
parser.add_argument('--nrof_scales', type=int,
help='Number of scales to evaluate.', default=21)
parser.add_argument('--lfw_pairs', type=str,
help='The file containing the pairs to use for validation.', default='../data/pairs.txt')
parser.add_argument('--lfw_dir', type=str,
help='Path to the data directory containing aligned face patches.', default='~/datasets/lfw/lfw_realigned/')
parser.add_argument('--orig_image_size', type=int,
help='Image size (height, width) in pixels of the original (uncropped/unscaled) images.', default=224)
parser.add_argument('--lfw_nrof_folds', type=int,
help='Number of folds to use for cross validation. Mainly used for testing.', default=10)
parser.add_argument('--seed', type=int,
help='Random seed.', default=666)
return parser.parse_args(argv)
if __name__ == '__main__':
main(parse_arguments(sys.argv[1:]))
| 51.976636 | 134 | 0.650094 |
b91d1b5f3f2862c67514835f9d31c2c17bbb7340 | 18,942 | py | Python | code/manager.py | nuvlabox/peripheral-manager-ethernet | cad56702b2592ce49b2183aa33c0927651969bd3 | [
"Apache-2.0"
] | null | null | null | code/manager.py | nuvlabox/peripheral-manager-ethernet | cad56702b2592ce49b2183aa33c0927651969bd3 | [
"Apache-2.0"
] | null | null | null | code/manager.py | nuvlabox/peripheral-manager-ethernet | cad56702b2592ce49b2183aa33c0927651969bd3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""NuvlaBox Peripheral Manager Network
This service provides network devices discovery.
"""
import logging
import requests
import time
import json
import os
import xmltodict
import re
import base64
from threading import Event
# Packages for Service Discovery
from ssdpy import SSDPClient
from xml.dom import minidom
from urllib.parse import urlparse
from wsdiscovery.discovery import ThreadedWSDiscovery as WSDiscovery
from zeroconf import ZeroconfServiceTypes, ServiceBrowser, Zeroconf
scanning_interval = 30
logging.basicConfig(level=logging.INFO)
KUBERNETES_SERVICE_HOST = os.getenv('KUBERNETES_SERVICE_HOST')
namespace = os.getenv('MY_NAMESPACE', 'nuvlabox')
def wait_bootstrap(api_url):
"""
Waits for the NuvlaBox to finish bootstrapping, by checking
the context file.
:returns
"""
while True:
try:
logging.info(f'Waiting for {api_url}...')
r = requests.get(api_url + '/healthcheck')
r.raise_for_status()
if r.status_code == 200:
break
except:
time.sleep(15)
logging.info('NuvlaBox has been initialized.')
return
def network_per_exists_check(api_url, device_addr, peripheral_dir):
"""
Checks if peripheral already exists
"""
identifier = device_addr
try:
r = requests.get(f'{api_url}/{identifier}')
if r.status_code == 404:
return False
elif r.status_code == 200:
return True
else:
r.raise_for_status()
except requests.exceptions.InvalidSchema:
logging.error(f'The Agent API URL {api_url} seems to be malformed. Cannot continue...')
raise
except requests.exceptions.ConnectionError as ex:
logging.error(f'Cannot reach out to Agent API at {api_url}. Can be a transient issue: {str(ex)}')
logging.info(f'Attempting to find out if peripheral {identifier} already exists, with local search')
if identifier in os.listdir(f'{peripheral_dir}'):
return True
return False
except requests.exceptions.HTTPError as e:
logging.warning(f'Could not lookup peripheral {identifier}. Assuming it does not exist')
return False
def get_saved_peripherals(api_url, protocol):
"""
To be used at bootstrap, to check for existing peripherals, just to make sure we delete old and only insert new
peripherals, that have been modified during the NuvlaBox shutdown
:param api_url: url of the agent api for peripherals
:param protocol: protocol name = interface
:return: map of device identifiers and content
"""
query = f'{api_url}?parameter=interface&value={protocol}'
r = requests.get(query)
r.raise_for_status()
return r.json()
def get_ssdp_device_xml_as_json(url):
"""
Requests and parses XML file with information from SSDP
"""
if not url:
return {}
parsed_url = urlparse(url)
try:
if not parsed_url.scheme:
url = f'http://{url}'
except AttributeError:
return {}
try:
r = requests.get(url)
device_xml = minidom.parseString(r.content).getElementsByTagName('device')[0]
device_json = xmltodict.parse(device_xml.toxml())
return device_json.get('device', {})
except:
logging.warning(f"Cannot get and parse XML for SSDP device info from {url}")
return {}
def ssdpManager():
"""
Manages SSDP discoverable devices (SSDP and UPnP devices)
"""
client = SSDPClient()
devices = client.m_search("ssdp:all")
output = {
'peripherals': {},
'xml': {}
}
for device in devices:
try:
usn = device['usn']
except KeyError:
logging.warning(f'SSDP device {device} missinng USN field, and thus is considered not compliant. Ignoring')
continue
if ":device:" in usn:
# normally, USN = "uuid:XYZ::urn:schemas-upnp-org:device:DEVICETYPE:1"
# if this substring is not there, then we are not interested (it might be a service)
# TODO: consider aggregating all services being provided by a device
try:
device_class = usn.split(':device:')[1].split(':')[0]
except IndexError:
logging.exception(f'Failed to infer device class for from USN {usn}')
continue
else:
continue
try:
identifier = usn.replace("uuid:", "").split(":")[0]
except IndexError:
logging.warning(f'Cannot parse USN {usn}. Continuing with raw USN value as identifier')
identifier = usn
if identifier in output['peripherals']:
# ssdp device has already been identified. This entry might simply be another service/class
# of the same device let's just see if there's an update to the classes and move on
existing_classes = output['peripherals'][identifier]['classes']
if device_class in existing_classes:
continue
else:
output['peripherals'][identifier]['classes'].append(device_class)
else:
# new device
location = device.get('location')
device_from_location = get_ssdp_device_xml_as_json(location) # always a dict
alt_name = usn
if 'x-friendly-name' in device:
try:
alt_name = base64.b64decode(device.get('x-friendly-name')).decode()
except:
pass
name = device_from_location.get('friendlyName', alt_name)
description = device_from_location.get('modelDescription',
device.get('server', name))
output['peripherals'][identifier] = {
'classes': [device_class],
'available': True,
'identifier': identifier,
'interface': 'SSDP',
'name': name,
'description': description
}
if location:
output['peripherals'][identifier]['device-path'] = location
vendor = device_from_location.get('manufacturer')
if vendor:
output['peripherals'][identifier]['vendor'] = vendor
product = device_from_location.get('modelName')
if product:
output['peripherals'][identifier]['product'] = product
serial = device_from_location.get('serialNumber')
if serial:
output['peripherals'][identifier]['serial-number'] = serial
return output['peripherals']
def wsDiscoveryManager(wsdaemon):
"""
Manages WSDiscovery discoverable devices
"""
manager = {}
wsdaemon.start()
services = wsdaemon.searchServices(timeout=6)
for service in services:
identifier = str(service.getEPR()).split(':')[-1]
classes = [ re.split("/|:", str(c))[-1] for c in service.getTypes() ]
name = " | ".join(classes)
if identifier not in manager.keys():
output = {
"available": True,
"name": name,
"description": f"[wsdiscovery peripheral] {str(service.getEPR())} | Scopes: {', '.join([str(s) for s in service.getScopes()])}",
"classes": classes,
"identifier": identifier,
"interface": 'WS-Discovery',
}
if len(service.getXAddrs()) > 0:
output['device-path'] = ", ".join([str(x) for x in service.getXAddrs()])
manager[identifier] = output
wsdaemon.stop()
return manager
class ZeroConfListener:
all_info = {}
listening_to = {}
def remove_service(self, zeroconf, type, name):
logging.info(f"[zeroconf] Service {name} removed")
if name in self.all_info:
self.all_info.pop(name)
def add_service(self, zeroconf, type, name):
info = zeroconf.get_service_info(type, name)
logging.info(f"[zeroconf] Service {name} added")
self.all_info[name] = info
update_service = add_service
def format_zeroconf_services(services):
""" Formats the Zeroconf listener services into a Nuvla compliant data format
:param services: list of zeroconf services from lister, i.e. list = {'service_name': ServiceInfo, ...}
:return: Nuvla formatted peripheral data
"""
output = {}
for service_name, service_data in services.items():
try:
identifier = service_data.server
if identifier not in output:
output[identifier] = {
'name': service_data.server,
'description': f'{service_name}:{service_data.port}',
'identifier': identifier,
'available': True,
'interface': "Bonjour/Avahi",
'classes': [service_data.type]
}
if service_data.type not in output[identifier]['classes']:
output[identifier]['classes'].append(service_data.type)
if service_data.parsed_addresses() and 'device-path' not in output[identifier]:
output[identifier]['device-path'] = service_data.parsed_addresses()[0]
if service_name not in output[identifier]['description']:
output[identifier]['description'] += f' | {service_name}:{service_data.port}'
try:
properties = service_data.properties
if properties and isinstance(properties, dict):
dict_properties = dict(map(lambda tup:
map(lambda el: el.decode('ascii', errors="ignore"), tup),
properties.items()))
# Try to find a limited and predefined list of known useful attributes:
# for the device model name:
product_name_known_keys = ['model', 'ModelName', 'am', 'rpMd', 'name']
matched_keys = list(product_name_known_keys & dict_properties.keys())
if matched_keys:
output[identifier]['name'] = output[identifier]['product'] = dict_properties[matched_keys[0]]
# for additional description
if 'uname' in dict_properties:
output[identifier]['description'] += f'. OS: {dict_properties["uname"]}'
if 'description' in dict_properties:
output[identifier]['description'] += f'. Extra description: {dict_properties["description"]}'
# for additional classes
if 'class' in dict_properties:
output[identifier]['class'].append(dict_properties['class'])
except:
# this is only to get additional info on the peripheral, if it fails, we can live without it
pass
except:
logging.exception(f'Unable to categorize Zeroconf peripheral {service_name} with data: {service_data}')
continue
return output
def parse_zeroconf_devices(zc, listener):
""" Manages the Zeroconf listeners and parse the existing broadcasted services
:param nb_id: nuvlabox id
:param nb_version: nuvlabox version
:param zc: zeroconf object
:param listener: zeroconf listener instance
:return: list of peripheral documents
"""
service_types_available = set(ZeroconfServiceTypes.find())
old_service_types = set(listener.listening_to) - service_types_available
new_service_types = service_types_available - set(listener.listening_to)
for new in new_service_types:
try:
listener.listening_to[new] = ServiceBrowser(zc, new, listener)
except Exception:
logging.exception(f'Zeroconf exception in ServiceBrowser(zc={zc}, new={new}, listener={listener})')
for old in old_service_types:
listener.listening_to[old].cancel()
logging.info(f'Removing Zeroconf listener for service type {old}: {listener.listening_to.pop(old)}')
return format_zeroconf_services(listener.all_info)
def network_manager(zc_obj, zc_listener, wsdaemon):
"""
Runs and manages the outputs from the discovery.
"""
output = {}
if zc_obj:
zeroconf_output = parse_zeroconf_devices(zc_obj, zc_listener)
else:
zeroconf_output = {}
ssdp_output = ssdpManager()
ws_discovery_output = wsDiscoveryManager(wsdaemon)
output['ssdp'] = ssdp_output
output['ws-discovery'] = ws_discovery_output
output['zeroconf'] = zeroconf_output
return output
def post_peripheral(api_url: str, body: dict) -> dict:
""" Posts a new peripheral into Nuvla, via the Agent API
:param body: content of the peripheral
:param api_url: URL of the Agent API for peripherals
:return: Nuvla resource
"""
try:
r = requests.post(api_url, json=body)
r.raise_for_status()
return r.json()
except:
logging.error(f'Cannot create new peripheral in Nuvla. See agent logs for more details on the problem')
# this will be caught by the calling block
raise
def delete_peripheral(api_url: str, identifier: str, resource_id=None) -> dict:
""" Deletes an existing peripheral from Nuvla, via the Agent API
:param identifier: peripheral identifier (same as local filename)
:param api_url: URL of the Agent API for peripherals
:param resource_id: peripheral resource ID in Nuvla
:return: Nuvla resource
"""
if resource_id:
url = f'{api_url}/{identifier}?id={resource_id}'
else:
url = f'{api_url}/{identifier}'
try:
r = requests.delete(url)
r.raise_for_status()
return r.json()
except:
logging.error(f'Cannot delete peripheral {identifier} from Nuvla. See agent logs for more info about the issue')
# this will be caught by the calling block
raise
def remove_legacy_peripherals(api_url: str, peripherals_dir: str, protocols: list):
""" In previous versions of this component, the peripherals were stored in an incompatible manner.
To avoid duplicates, before starting this component, we make sure all legacy peripherals are deleted
:param api_url: agent api url for peripherals
:param peripherals_dir: path to peripherals dir
:param protocols: list of protocols to look for
:return:
"""
for proto in protocols:
if not proto:
# just to be sure we don't delete the top directory
continue
path = f'{peripherals_dir}{proto}'
if os.path.isdir(path):
for legacy_peripheral in os.listdir(path):
with open(f'{path}/{legacy_peripheral}') as lp:
nuvla_id = json.load(lp).get("resource_id")
# if it has a nuvla_id, there it must be removed from Nuvla
if nuvla_id:
try:
delete_peripheral(api_url, f"{proto}/{legacy_peripheral}", resource_id=nuvla_id)
continue
except:
pass
logging.info(f'Removed legacy peripheral {proto}/{legacy_peripheral}. If it still exists, it shall be re-created.')
os.remove(f'{path}/{legacy_peripheral}')
# by now, dir must be empty, so this shall work
os.rmdir(path)
logging.info(f'Removed all legacy peripherals for interface {proto}: {path}')
if __name__ == "__main__":
peripheral_path = '/srv/nuvlabox/shared/.peripherals/'
agent_api_endpoint = 'localhost:5080' if not KUBERNETES_SERVICE_HOST else f'agent.{namespace}'
base_api_url = f"http://{agent_api_endpoint}/api"
API_URL = f"{base_api_url}/peripheral"
e = Event()
logging.info('NETWORK PERIPHERAL MANAGER STARTED')
wait_bootstrap(base_api_url)
remove_legacy_peripherals(API_URL, peripheral_path, ['ssdp', 'ws-discovery', 'zeroconf'])
old_devices = {'ssdp': get_saved_peripherals(API_URL, 'SSDP'),
'ws-discovery': get_saved_peripherals(API_URL, 'WS-Discovery'),
'zeroconf': get_saved_peripherals(API_URL, 'Bonjour/Avahi')}
logging.info(f'Peripherals registered from the previous run: {old_devices}')
try:
zeroconf = Zeroconf()
except OSError as ex:
logging.error(f'Zeroconf failed to start and cannot be fixed without a restart: {str(ex)}')
zeroconf = zeroconf_listener = None
else:
zeroconf_listener = ZeroConfListener()
wsdaemon = WSDiscovery()
while True:
current_devices = network_manager(zeroconf, zeroconf_listener, wsdaemon)
logging.info('CURRENT DEVICES: {}'.format(current_devices))
for protocol in current_devices:
if current_devices[protocol] != old_devices[protocol]:
old_devices_set = set(old_devices[protocol].keys())
current_devices_set = set(current_devices[protocol].keys())
publishing = current_devices_set - old_devices_set
removing = old_devices_set - current_devices_set
for device in publishing:
peripheral_already_registered = \
network_per_exists_check(API_URL, device, peripheral_path)
if not peripheral_already_registered:
logging.info('PUBLISHING: {}'.format(current_devices[protocol][device]))
try:
resource = post_peripheral(API_URL, current_devices[protocol][device])
except Exception as ex:
logging.error(f'Unable to publish peripheral {device}: {str(ex)}')
continue
old_devices[protocol][device] = current_devices[protocol][device]
for device in removing:
logging.info('REMOVING: {}'.format(old_devices[protocol][device]))
peripheral_already_registered = \
network_per_exists_check(API_URL, device, peripheral_path)
if peripheral_already_registered:
try:
resource = delete_peripheral(API_URL, device)
except:
logging.exception(f'Cannot delete {device} from Nuvla')
continue
else:
logging.warning(f'{protocol} peripheral {device} seems to have been removed already')
del old_devices[protocol][device]
e.wait(timeout=scanning_interval)
| 35.47191 | 144 | 0.60738 |
8487c37cbe2341cf6b4d7c4d698b90b91a434123 | 8,207 | py | Python | flumine/controls/tradingcontrols.py | mberk/flumine | 6216bcc233326cf07852fca9c7d39a18cee265ad | [
"MIT"
] | 77 | 2017-12-09T07:10:18.000Z | 2022-03-03T09:50:35.000Z | flumine/controls/tradingcontrols.py | mberk/flumine | 6216bcc233326cf07852fca9c7d39a18cee265ad | [
"MIT"
] | 423 | 2017-01-21T07:26:51.000Z | 2022-03-04T11:13:54.000Z | flumine/controls/tradingcontrols.py | mberk/flumine | 6216bcc233326cf07852fca9c7d39a18cee265ad | [
"MIT"
] | 47 | 2017-04-22T17:31:41.000Z | 2022-01-11T08:52:37.000Z | import logging
from ..clients.clients import ExchangeType
from ..order.ordertype import OrderTypes
from ..order.orderpackage import OrderPackageType, BaseOrder
from . import BaseControl
from .. import utils
logger = logging.getLogger(__name__)
class OrderValidation(BaseControl):
"""
Validates order price and size is valid for
exchange.
"""
NAME = "ORDER_VALIDATION"
def _validate(self, order: BaseOrder, package_type: OrderPackageType) -> None:
if order.EXCHANGE == ExchangeType.BETFAIR:
self._validate_betfair_order(order)
def _validate_betfair_order(self, order):
if order.order_type.ORDER_TYPE == OrderTypes.LIMIT:
self._validate_betfair_size(order)
self._validate_betfair_price(order)
self._validate_betfair_min_size(order, OrderTypes.LIMIT)
elif order.order_type.ORDER_TYPE == OrderTypes.LIMIT_ON_CLOSE:
self._validate_betfair_price(order)
self._validate_betfair_liability(order)
self._validate_betfair_min_size(order, OrderTypes.LIMIT_ON_CLOSE)
elif order.order_type.ORDER_TYPE == OrderTypes.MARKET_ON_CLOSE:
self._validate_betfair_liability(order)
self._validate_betfair_min_size(order, OrderTypes.MARKET_ON_CLOSE)
else:
self._on_error(order, "Unknown orderType")
def _validate_betfair_size(self, order):
if order.order_type.size is None:
self._on_error(order, "Order size is None")
elif order.order_type.size <= 0:
self._on_error(order, "Order size is less than 0")
elif order.order_type.size != round(order.order_type.size, 2):
self._on_error(order, "Order size has more than 2dp")
def _validate_betfair_price(self, order):
if order.order_type.price is None:
self._on_error(order, "Order price is None")
elif utils.as_dec(order.order_type.price) not in utils.PRICES:
self._on_error(order, "Order price is not valid")
def _validate_betfair_liability(self, order):
if order.order_type.liability is None:
self._on_error(order, "Order liability is None")
elif order.order_type.liability <= 0:
self._on_error(order, "Order liability is less than 0")
elif order.order_type.liability != round(order.order_type.liability, 2):
self._on_error(order, "Order liability has more than 2dp")
def _validate_betfair_min_size(self, order, order_type):
client = self.flumine.client
if client.min_bet_validation is False:
return # some accounts do not have min bet restrictions
if order_type == OrderTypes.LIMIT:
if (
order.order_type.size < client.min_bet_size
and (order.order_type.price * order.order_type.size)
< client.min_bet_payout
):
self._on_error(
order,
"Order size is less than min bet size ({0}) or payout ({1}) for currency".format(
client.min_bet_size, client.min_bet_payout
),
)
else: # todo is this correct?
if (
order.side == "BACK"
and order.order_type.liability < client.min_bet_size
):
self._on_error(
order,
"Liability is less than min bet size ({0}) for currency".format(
client.min_bet_size
),
)
elif (
order.side == "LAY"
and order.order_type.liability < client.min_bsp_liability
):
self._on_error(
order,
"Liability is less than min BSP payout ({0}) for currency".format(
client.min_bsp_liability
),
)
class MarketValidation(BaseControl):
"""
Validates market is available and open for orders
"""
NAME = "MARKET_VALIDATION"
def _validate(self, order: BaseOrder, package_type: OrderPackageType) -> None:
if order.EXCHANGE == ExchangeType.BETFAIR:
self._validate_betfair_market_status(order)
def _validate_betfair_market_status(self, order):
market = self.flumine.markets.markets.get(order.market_id)
if market is None:
self._on_error(order, "Market is not available")
elif market.market_book is None:
self._on_error(order, "MarketBook is not available")
elif market.market_book.status != "OPEN":
self._on_error(order, "Market is not open")
class StrategyExposure(BaseControl):
"""
Validates:
- `strategy.validate_order` function
- `strategy.max_order_exposure` is not violated if order is executed
- `strategy.max_selection_exposure` is not violated if order is executed
Exposure calculation includes pending,
executable and execution complete orders.
"""
NAME = "STRATEGY_EXPOSURE"
def _validate(self, order: BaseOrder, package_type: OrderPackageType) -> None:
if package_type == OrderPackageType.PLACE:
# strategy.validate_order
runner_context = order.trade.strategy.get_runner_context(*order.lookup)
if order.trade.strategy.validate_order(runner_context, order) is False:
return self._on_error(order, order.violation_msg)
if package_type in (
OrderPackageType.PLACE,
OrderPackageType.REPLACE,
):
strategy = order.trade.strategy
if order.order_type.ORDER_TYPE == OrderTypes.LIMIT:
if order.side == "BACK":
order_exposure = order.order_type.size
else:
order_exposure = (
order.order_type.price - 1
) * order.order_type.size
elif order.order_type.ORDER_TYPE == OrderTypes.LIMIT_ON_CLOSE:
order_exposure = order.order_type.liability # todo correct?
elif order.order_type.ORDER_TYPE == OrderTypes.MARKET_ON_CLOSE:
order_exposure = order.order_type.liability
else:
return self._on_error(order, "Unknown order_type")
# per order
if order_exposure > strategy.max_order_exposure:
return self._on_error(
order,
"Order exposure ({0}) is greater than strategy.max_order_exposure ({1})".format(
order_exposure, strategy.max_order_exposure
),
)
# per selection
market = self.flumine.markets.markets[order.market_id]
if package_type == OrderPackageType.REPLACE:
exclusion = order
else:
exclusion = None
current_exposures = market.blotter.get_exposures(
strategy, lookup=order.lookup, exclusion=exclusion
)
"""
We use -min(...) in the below, as "worst_possible_profit_on_X" will be negative if the position is
at risk of loss, while exposure values are always atleast zero.
Exposure refers to the largest potential loss.
"""
if order.side == "BACK":
current_selection_exposure = -current_exposures[
"worst_possible_profit_on_lose"
]
else:
current_selection_exposure = -current_exposures[
"worst_possible_profit_on_win"
]
potential_exposure = current_selection_exposure + order_exposure
if potential_exposure > strategy.max_selection_exposure:
return self._on_error(
order,
"Potential selection exposure ({0:.2f}) is greater than strategy.max_selection_exposure ({1})".format(
potential_exposure,
strategy.max_selection_exposure,
),
)
| 40.428571 | 122 | 0.599245 |
9e6adac0e5c34aa27b823eb1670b00df1aaef7c6 | 648 | py | Python | packages/jet_bridge_base/jet_bridge_base/logger.py | bokal2/jet-bridge | dddc4f55c2d5a28c02ce9515dffc750e3887450f | [
"MIT"
] | 2 | 2021-05-31T19:13:34.000Z | 2022-02-03T13:12:35.000Z | packages/jet_bridge_base/jet_bridge_base/logger.py | bokal2/jet-bridge | dddc4f55c2d5a28c02ce9515dffc750e3887450f | [
"MIT"
] | null | null | null | packages/jet_bridge_base/jet_bridge_base/logger.py | bokal2/jet-bridge | dddc4f55c2d5a28c02ce9515dffc750e3887450f | [
"MIT"
] | null | null | null | import logging
from jet_bridge_base import settings
logger = logging.getLogger('jet_bridge')
level = logging.DEBUG if settings.DEBUG else logging.INFO
ch = logging.StreamHandler()
class Formatter(logging.Formatter):
formats = {
logging.INFO: '%(message)s'
}
default_format = '%(levelname)s - %(asctime)s: %(message)s'
def formatMessage(self, record):
return self.formats.get(record.levelno, self.default_format) % record.__dict__
formatter = Formatter('%(asctime)s %(levelname)s %(message)s', '%Y-%m-%d %H:%M:%S')
ch.setFormatter(formatter)
ch.setLevel(level)
logger.setLevel(level)
logger.addHandler(ch)
| 22.344828 | 86 | 0.708333 |
c72c3c76373c4d04d6e5b7d42dc30210375a6478 | 12,782 | py | Python | Classes/Mesh.py | Superomeg4/pyleecan | 2b695b5f39e77475a07aa0ea89489fb0a9659337 | [
"Apache-2.0"
] | null | null | null | Classes/Mesh.py | Superomeg4/pyleecan | 2b695b5f39e77475a07aa0ea89489fb0a9659337 | [
"Apache-2.0"
] | null | null | null | Classes/Mesh.py | Superomeg4/pyleecan | 2b695b5f39e77475a07aa0ea89489fb0a9659337 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Warning : this file has been generated, you shouldn't edit it"""
from os import linesep
from pyleecan.Classes.check import check_init_dict, check_var, raise_
from pyleecan.Functions.save import save
from pyleecan.Classes.frozen import FrozenClass
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from pyleecan.Methods.Mesh.Mesh.set_submesh import set_submesh
except ImportError as error:
set_submesh = error
try:
from pyleecan.Methods.Mesh.Mesh.get_all_node_coord import get_all_node_coord
except ImportError as error:
get_all_node_coord = error
try:
from pyleecan.Methods.Mesh.Mesh.add_element import add_element
except ImportError as error:
add_element = error
try:
from pyleecan.Methods.Mesh.Mesh.get_all_connectivity import get_all_connectivity
except ImportError as error:
get_all_connectivity = error
try:
from pyleecan.Methods.Mesh.Mesh.get_connectivity import get_connectivity
except ImportError as error:
get_connectivity = error
try:
from pyleecan.Methods.Mesh.Mesh.get_new_tag import get_new_tag
except ImportError as error:
get_new_tag = error
try:
from pyleecan.Methods.Mesh.Mesh.interface import interface
except ImportError as error:
interface = error
try:
from pyleecan.Methods.Mesh.Mesh.get_node_tags import get_node_tags
except ImportError as error:
get_node_tags = error
try:
from pyleecan.Methods.Mesh.Mesh.get_vertice import get_vertice
except ImportError as error:
get_vertice = error
from pyleecan.Classes.check import InitUnKnowClassError
from pyleecan.Classes.Element import Element
from pyleecan.Classes.Node import Node
class Mesh(FrozenClass):
"""Gather the mesh storage format"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.Mesh.Mesh.set_submesh
if isinstance(set_submesh, ImportError):
set_submesh = property(
fget=lambda x: raise_(
ImportError("Can't use Mesh method set_submesh: " + str(set_submesh))
)
)
else:
set_submesh = set_submesh
# cf Methods.Mesh.Mesh.get_all_node_coord
if isinstance(get_all_node_coord, ImportError):
get_all_node_coord = property(
fget=lambda x: raise_(
ImportError(
"Can't use Mesh method get_all_node_coord: "
+ str(get_all_node_coord)
)
)
)
else:
get_all_node_coord = get_all_node_coord
# cf Methods.Mesh.Mesh.add_element
if isinstance(add_element, ImportError):
add_element = property(
fget=lambda x: raise_(
ImportError("Can't use Mesh method add_element: " + str(add_element))
)
)
else:
add_element = add_element
# cf Methods.Mesh.Mesh.get_all_connectivity
if isinstance(get_all_connectivity, ImportError):
get_all_connectivity = property(
fget=lambda x: raise_(
ImportError(
"Can't use Mesh method get_all_connectivity: "
+ str(get_all_connectivity)
)
)
)
else:
get_all_connectivity = get_all_connectivity
# cf Methods.Mesh.Mesh.get_connectivity
if isinstance(get_connectivity, ImportError):
get_connectivity = property(
fget=lambda x: raise_(
ImportError(
"Can't use Mesh method get_connectivity: " + str(get_connectivity)
)
)
)
else:
get_connectivity = get_connectivity
# cf Methods.Mesh.Mesh.get_new_tag
if isinstance(get_new_tag, ImportError):
get_new_tag = property(
fget=lambda x: raise_(
ImportError("Can't use Mesh method get_new_tag: " + str(get_new_tag))
)
)
else:
get_new_tag = get_new_tag
# cf Methods.Mesh.Mesh.interface
if isinstance(interface, ImportError):
interface = property(
fget=lambda x: raise_(
ImportError("Can't use Mesh method interface: " + str(interface))
)
)
else:
interface = interface
# cf Methods.Mesh.Mesh.get_node_tags
if isinstance(get_node_tags, ImportError):
get_node_tags = property(
fget=lambda x: raise_(
ImportError(
"Can't use Mesh method get_node_tags: " + str(get_node_tags)
)
)
)
else:
get_node_tags = get_node_tags
# cf Methods.Mesh.Mesh.get_vertice
if isinstance(get_vertice, ImportError):
get_vertice = property(
fget=lambda x: raise_(
ImportError("Can't use Mesh method get_vertice: " + str(get_vertice))
)
)
else:
get_vertice = get_vertice
# save method is available in all object
save = save
def __init__(self, element=dict(), node=-1, submesh=list(), init_dict=None):
"""Constructor of the class. Can be use in two ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for Matrix, None will initialise the property with an empty Matrix
for pyleecan type, None will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary wiht every properties as keys
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if node == -1:
node = Node()
if init_dict is not None: # Initialisation by dict
check_init_dict(init_dict, ["element", "node", "submesh"])
# Overwrite default value with init_dict content
if "element" in list(init_dict.keys()):
element = init_dict["element"]
if "node" in list(init_dict.keys()):
node = init_dict["node"]
if "submesh" in list(init_dict.keys()):
submesh = init_dict["submesh"]
# Initialisation by argument
self.parent = None
# element can be None or a list of Element object
self.element = dict()
if type(element) is dict:
for key, obj in element.items():
if isinstance(obj, dict):
# Check that the type is correct (including daughter)
class_name = obj.get("__class__")
if class_name not in ["Element", "ElementMat"]:
raise InitUnKnowClassError(
"Unknow class name "
+ class_name
+ " in init_dict for element"
)
# Dynamic import to call the correct constructor
module = __import__(
"pyleecan.Classes." + class_name, fromlist=[class_name]
)
class_obj = getattr(module, class_name)
self.element[key] = class_obj(init_dict=obj)
else:
element = element # Should raise an error
elif element is None:
self.element = dict()
else:
self.element = element # Should raise an error
# node can be None, a Node object or a dict
if isinstance(node, dict):
# Check that the type is correct (including daughter)
class_name = node.get("__class__")
if class_name not in ["Node", "NodeMat"]:
raise InitUnKnowClassError(
"Unknow class name " + class_name + " in init_dict for node"
)
# Dynamic import to call the correct constructor
module = __import__("pyleecan.Classes." + class_name, fromlist=[class_name])
class_obj = getattr(module, class_name)
self.node = class_obj(init_dict=node)
else:
self.node = node
# submesh can be None or a list of Mesh object
self.submesh = list()
if type(submesh) is list:
for obj in submesh:
if obj is None: # Default value
self.submesh.append(Mesh())
elif isinstance(obj, dict):
self.submesh.append(Mesh(init_dict=obj))
else:
self.submesh.append(obj)
elif submesh is None:
self.submesh = list()
else:
self.submesh = submesh
# The class is frozen, for now it's impossible to add new properties
self._freeze()
def __str__(self):
"""Convert this objet in a readeable string (for print)"""
Mesh_str = ""
if self.parent is None:
Mesh_str += "parent = None " + linesep
else:
Mesh_str += "parent = " + str(type(self.parent)) + " object" + linesep
if len(self.element) == 0:
Mesh_str += "element = []"
for key, obj in self.element.items():
Mesh_str += (
"element["
+ key
+ "] = "
+ str(self.element[key].as_dict())
+ "\n"
+ linesep
+ linesep
)
if self.node is not None:
Mesh_str += "node = " + str(self.node.as_dict()) + linesep + linesep
else:
Mesh_str += "node = None" + linesep + linesep
if len(self.submesh) == 0:
Mesh_str += "submesh = []"
for ii in range(len(self.submesh)):
Mesh_str += (
"submesh[" + str(ii) + "] = " + str(self.submesh[ii].as_dict()) + "\n"
)
return Mesh_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
if other.element != self.element:
return False
if other.node != self.node:
return False
if other.submesh != self.submesh:
return False
return True
def as_dict(self):
"""Convert this objet in a json seriable dict (can be use in __init__)
"""
Mesh_dict = dict()
Mesh_dict["element"] = dict()
for key, obj in self.element.items():
Mesh_dict["element"][key] = obj.as_dict()
if self.node is None:
Mesh_dict["node"] = None
else:
Mesh_dict["node"] = self.node.as_dict()
Mesh_dict["submesh"] = list()
for obj in self.submesh:
Mesh_dict["submesh"].append(obj.as_dict())
# The class name is added to the dict fordeserialisation purpose
Mesh_dict["__class__"] = "Mesh"
return Mesh_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
for key, obj in self.element.items():
obj._set_None()
if self.node is not None:
self.node._set_None()
for obj in self.submesh:
obj._set_None()
def _get_element(self):
"""getter of element"""
for key, obj in self._element.items():
if obj is not None:
obj.parent = self
return self._element
def _set_element(self, value):
"""setter of element"""
check_var("element", value, "{Element}")
self._element = value
# Storing connectivity
# Type : {Element}
element = property(
fget=_get_element, fset=_set_element, doc=u"""Storing connectivity"""
)
def _get_node(self):
"""getter of node"""
return self._node
def _set_node(self, value):
"""setter of node"""
check_var("node", value, "Node")
self._node = value
if self._node is not None:
self._node.parent = self
# Storing nodes
# Type : Node
node = property(fget=_get_node, fset=_set_node, doc=u"""Storing nodes""")
def _get_submesh(self):
"""getter of submesh"""
for obj in self._submesh:
if obj is not None:
obj.parent = self
return self._submesh
def _set_submesh(self, value):
"""setter of submesh"""
check_var("submesh", value, "[Mesh]")
self._submesh = value
for obj in self._submesh:
if obj is not None:
obj.parent = self
# Storing submeshes. Node and element numbers/tags or group must be the same.
# Type : [Mesh]
submesh = property(
fget=_get_submesh,
fset=_set_submesh,
doc=u"""Storing submeshes. Node and element numbers/tags or group must be the same.""",
)
| 34.268097 | 95 | 0.577687 |
d79a6947adefe791d3ae73eede4a82f15c8793d2 | 3,692 | py | Python | superset/db_engine_specs/pinot.py | amitmiran137/incubator-superset | 8593a13f00754973117beb0a95e7aca7fec4b00e | [
"Apache-2.0"
] | 1 | 2021-02-01T00:18:23.000Z | 2021-02-01T00:18:23.000Z | superset/db_engine_specs/pinot.py | amitmiran137/incubator-superset | 8593a13f00754973117beb0a95e7aca7fec4b00e | [
"Apache-2.0"
] | 4 | 2021-03-02T01:53:30.000Z | 2021-10-06T22:56:01.000Z | superset/db_engine_specs/pinot.py | amitmiran137/incubator-superset | 8593a13f00754973117beb0a95e7aca7fec4b00e | [
"Apache-2.0"
] | 4 | 2020-09-23T05:48:37.000Z | 2020-10-22T09:31:20.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
from typing import Dict, List, Optional
from sqlalchemy.sql.expression import ColumnClause, ColumnElement
from superset.db_engine_specs.base import BaseEngineSpec, TimestampExpression
class PinotEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
engine = "pinot"
engine_name = "Apache Pinot"
allows_subqueries = False
allows_joins = False
allows_column_aliases = False
# Pinot does its own conversion below
_time_grain_expressions: Dict[Optional[str], str] = {
"PT1S": "1:SECONDS",
"PT1M": "1:MINUTES",
"PT1H": "1:HOURS",
"P1D": "1:DAYS",
"P1W": "1:WEEKS",
"P1M": "1:MONTHS",
"P0.25Y": "3:MONTHS",
"P1Y": "1:YEARS",
}
_python_to_java_time_patterns: Dict[str, str] = {
"%Y": "yyyy",
"%m": "MM",
"%d": "dd",
"%H": "HH",
"%M": "mm",
"%S": "ss",
}
@classmethod
def get_timestamp_expr(
cls,
col: ColumnClause,
pdf: Optional[str],
time_grain: Optional[str],
type_: Optional[str] = None,
) -> TimestampExpression:
is_epoch = pdf in ("epoch_s", "epoch_ms")
# The DATETIMECONVERT pinot udf is documented at
# Per https://github.com/apache/incubator-pinot/wiki/dateTimeConvert-UDF
# We are not really converting any time units, just bucketing them.
tf = ""
if not is_epoch:
try:
today = datetime.datetime.today()
today.strftime(str(pdf))
except ValueError:
raise ValueError(f"Invalid column datetime format:{str(pdf)}")
java_date_format = str(pdf)
for (
python_pattern,
java_pattern,
) in cls._python_to_java_time_patterns.items():
java_date_format.replace(python_pattern, java_pattern)
tf = f"1:SECONDS:SIMPLE_DATE_FORMAT:{java_date_format}"
else:
seconds_or_ms = "MILLISECONDS" if pdf == "epoch_ms" else "SECONDS"
tf = f"1:{seconds_or_ms}:EPOCH"
if time_grain:
granularity = cls.get_time_grain_expressions().get(time_grain)
if not granularity:
raise NotImplementedError("No pinot grain spec for " + str(time_grain))
else:
return TimestampExpression(
f"{{col}}", col # pylint: disable=f-string-without-interpolation
)
# In pinot the output is a string since there is no timestamp column like pg
time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')"
return TimestampExpression(time_expr, col)
@classmethod
def make_select_compatible(
cls, groupby_exprs: Dict[str, ColumnElement], select_exprs: List[ColumnElement]
) -> List[ColumnElement]:
return select_exprs
| 36.92 | 87 | 0.631636 |
7b0d12b6b2659304bdc6174401e3d97929ed727d | 1,532 | py | Python | tests/unit/test_keys/test_enums.py | joeblackwaslike/gpgkeyring | 65d595c2db2007eff400f929307fb3a036b7069c | [
"MIT"
] | null | null | null | tests/unit/test_keys/test_enums.py | joeblackwaslike/gpgkeyring | 65d595c2db2007eff400f929307fb3a036b7069c | [
"MIT"
] | null | null | null | tests/unit/test_keys/test_enums.py | joeblackwaslike/gpgkeyring | 65d595c2db2007eff400f929307fb3a036b7069c | [
"MIT"
] | null | null | null | import itertools
import pytest
import gpgkeyring
from ...helpers.unit import testdata
class TestKeyTypes:
type_values = gpgkeyring.keys._KEYTYPE_MAP.items()
@pytest.fixture(params=testdata.KEY_TYPES)
def key_type(self, request):
yield request.param
def test_key_type_value(self, key_type):
assert getattr(gpgkeyring.keys.Types, key_type.name) == key_type
def test_key_type_str(self, key_type):
assert str(key_type) == key_type.value
def test_key_type_eq_str(self, key_type):
assert key_type == str(key_type)
@pytest.mark.parametrize("raw, expected", type_values)
def test_coerce_keytype(self, raw, expected):
assert gpgkeyring.keys.coerce_keytype(raw) == expected
class TestKeyValidities:
value_map = gpgkeyring.keys._VALIDITY_MAP
trust_values = list(
itertools.chain(value_map.items(), gpgkeyring.trust._TRUST_MAP.items())
)
@pytest.fixture(params=testdata.KEY_VALIDITIES)
def validity(self, request):
yield request.param
def test_validity_value(self, validity):
assert getattr(gpgkeyring.keys.Validity, validity.name) == validity
def test_validity_str(self, validity):
assert str(validity) == validity.value
def test_validity_eq_str(self, validity):
assert validity == str(validity)
@pytest.mark.parametrize("raw, expected", trust_values)
def test_coerce_trust_validity(self, raw, expected):
assert gpgkeyring.keys.coerce_trust_validity(raw) == expected
| 29.461538 | 79 | 0.719321 |
064f41ccb03b52314ec61bd9671cb25d6cfb7752 | 3,375 | py | Python | obfsproxy/test/test_aes.py | Samdney/obfsproxy | 2bf9d096bb45a4e6c69f1cbdc3d2565f54a44efc | [
"BSD-3-Clause"
] | 101 | 2015-01-24T07:37:03.000Z | 2022-01-22T15:38:44.000Z | obfsproxy/test/test_aes.py | david415/obfsproxy | ea0e1b2b62be9113155f25f53baf5fce4392c430 | [
"BSD-3-Clause"
] | 1 | 2021-04-07T15:09:54.000Z | 2021-12-01T03:23:58.000Z | obfsproxy/test/test_aes.py | david415/obfsproxy | ea0e1b2b62be9113155f25f53baf5fce4392c430 | [
"BSD-3-Clause"
] | 29 | 2015-05-11T09:45:43.000Z | 2020-02-22T17:50:27.000Z | import unittest
from Crypto.Cipher import AES
from Crypto.Util import Counter
import obfsproxy.common.aes as aes
import twisted.trial.unittest
class testAES_CTR_128_NIST(twisted.trial.unittest.TestCase):
def _helper_test_vector(self, input_block, output_block, plaintext, ciphertext):
self.assertEqual(long(input_block.encode('hex'), 16), self.ctr.next_value())
ct = self.cipher.encrypt(plaintext)
self.assertEqual(ct, ciphertext)
# XXX how do we extract the keystream out of the AES object?
def test_nist(self):
# Prepare the cipher
key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c"
iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
self.ctr = Counter.new(128, initial_value=long(iv.encode('hex'), 16))
self.cipher = AES.new(key, AES.MODE_CTR, counter=self.ctr)
input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
output_block = "\xec\x8c\xdf\x73\x98\x60\x7c\xb0\xf2\xd2\x16\x75\xea\x9e\xa1\xe4"
plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
ciphertext = "\x87\x4d\x61\x91\xb6\x20\xe3\x26\x1b\xef\x68\x64\x99\x0d\xb6\xce"
self._helper_test_vector(input_block, output_block, plaintext, ciphertext)
input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x00"
output_block = "\x36\x2b\x7c\x3c\x67\x73\x51\x63\x18\xa0\x77\xd7\xfc\x50\x73\xae"
plaintext = "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
ciphertext = "\x98\x06\xf6\x6b\x79\x70\xfd\xff\x86\x17\x18\x7b\xb9\xff\xfd\xff"
self._helper_test_vector(input_block, output_block, plaintext, ciphertext)
input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x01"
output_block = "\x6a\x2c\xc3\x78\x78\x89\x37\x4f\xbe\xb4\xc8\x1b\x17\xba\x6c\x44"
plaintext = "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
ciphertext = "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e\x5b\x4f\x09\x02\x0d\xb0\x3e\xab"
self._helper_test_vector(input_block, output_block, plaintext, ciphertext)
input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x02"
output_block = "\xe8\x9c\x39\x9f\xf0\xf1\x98\xc6\xd4\x0a\x31\xdb\x15\x6c\xab\xfe"
plaintext = "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10"
ciphertext = "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1\x79\x21\x70\xa0\xf3\x00\x9c\xee"
self._helper_test_vector(input_block, output_block, plaintext, ciphertext)
class testAES_CTR_128_simple(twisted.trial.unittest.TestCase):
def test_encrypt_decrypt_small_ASCII(self):
"""
Validate that decryption and encryption work as intended on a small ASCII string.
"""
self.key = "\xe3\xb0\xc4\x42\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99\x6f\xb9\x24"
self.iv = "\x27\xae\x41\xe4\x64\x9b\x93\x4c\xa4\x95\x99\x1b\x78\x52\xb8\x55"
test_string = "This unittest kills fascists."
cipher1 = aes.AES_CTR_128(self.key, self.iv)
cipher2 = aes.AES_CTR_128(self.key, self.iv)
ct = cipher1.crypt(test_string)
pt = cipher2.crypt(ct)
self.assertEqual(test_string, pt)
if __name__ == '__main__':
unittest.main()
| 44.407895 | 89 | 0.680889 |
1799954bd6babf4d72bfe2bf8a2e40f5f9023747 | 3,664 | py | Python | retrain.py | zishansami102/Recommendation-Engine | 14bb4c010ea9801b429bd8477b211d0d67bad3f1 | [
"Apache-2.0"
] | 17 | 2017-09-08T21:16:19.000Z | 2021-01-18T09:50:55.000Z | retrain.py | zishansami102/Recommendation-Engine | 14bb4c010ea9801b429bd8477b211d0d67bad3f1 | [
"Apache-2.0"
] | null | null | null | retrain.py | zishansami102/Recommendation-Engine | 14bb4c010ea9801b429bd8477b211d0d67bad3f1 | [
"Apache-2.0"
] | 7 | 2017-11-07T16:01:46.000Z | 2020-04-21T13:51:03.000Z | import tensorflow as tf
import time
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from remtime import *
from collections import deque
from remtime import printTime
# from matplotlib import pyplot
# from mpl_toolkits.mplot3d import Axes3D
# top_param
LEARNING_RATE = 0.002
TS_BATCH_SIZE = 1000
N_EPOCHS = 2000
REG_PENALTY = 0.25
NUM_FEAT = 16
# LEARNING_RATE = 0.001
# BATCH_SIZE = 500
# TS_BATCH_SIZE = 1000
# N_EPOCHS = 15
# REG_PENALTY = 0.05
# NUM_FEAT = 50
#w_movie = tf.Variable(tf.random_normal([NUM_MOVIES, NUM_FEAT])/np.sqrt(NUM_MOVIES))
def CollabFilterring(user_batch, movie_batch):
# Access saved Variables directly
# print(sess.run('cost:0'))
# This will print 2, which is the value of bias that we saved
# Now, let's access and create placeholders variables and
# create feed-dict to feed new data
weights_ = np.loadtxt("wmovie.csv",delimiter=',').astype(np.float32)
biases_ = np.loadtxt("bmovie.csv",delimiter=',').astype(np.float32)
w_user = tf.Variable(tf.random_normal([1, NUM_FEAT])/np.sqrt(1))
w_movie = weights_
batch_w_user = tf.nn.embedding_lookup(w_user, user_batch)
batch_w_movie = tf.nn.embedding_lookup(w_movie, movie_batch)
bias = tf.Variable(tf.zeros([1]))
bias_user = tf.Variable(tf.zeros([1]))
#bias_movie = tf.Variable(tf.zeros([NUM_MOVIES]))
bias_movie = biases_
batch_bias_user = tf.nn.embedding_lookup(bias_user, user_batch)
batch_bias_movie = tf.nn.embedding_lookup(bias_movie, movie_batch)
output = tf.reduce_sum(tf.multiply(batch_w_user, batch_w_movie), 1)
output = tf.add(output, bias)
output = tf.add(output, batch_bias_movie)
output = tf.add(output, batch_bias_user, name='output')
cost_reg = REG_PENALTY*tf.add(tf.nn.l2_loss(batch_w_movie), tf.nn.l2_loss(batch_w_user))
# cost_l2 = tf.reduce_mean(tf.pow(output - rating_batch, 2))
# cost_reg = 0
return output, cost_reg
def train_nn(train_data, user_id):
with tf.Session() as sess:
user_batch = tf.placeholder(tf.int32, [None], name='user_batch')
movie_batch = tf.placeholder(tf.int32, [None], name='movie_batch')
rating_batch = tf.placeholder(tf.float32, [None], name='rating_batch')
prediction, cost_reg = CollabFilterring(user_batch, movie_batch)
cost_l2 = tf.nn.l2_loss(tf.subtract(prediction, rating_batch))
cost = tf.add(cost_l2, cost_reg)
#default learning rate = 0.001
optimizer = tf.train.AdamOptimizer(learning_rate=LEARNING_RATE).minimize(cost)
saver2 = tf.train.Saver()
sess.run(tf.global_variables_initializer())
for epoch in range(N_EPOCHS):
stime = time.time()
np.random.shuffle(train_data)
_, c, pred_batch = sess.run([optimizer, cost, prediction], feed_dict = {user_batch: train_data[:,2], movie_batch: train_data[:,0], rating_batch: train_data[:,1]})
pred_batch = np.clip(pred_batch, 0, 5.0)
print("E loss:"+str(round(np.sqrt(np.mean(np.power(pred_batch - train_data[:,1], 2))),2)))
print train_data[:,1]
print np.around(pred_batch*2)/2
pred_batch = sess.run([prediction], feed_dict = {user_batch: test_data[:,1], movie_batch: test_data[:,0]})
pred_batch = np.clip(pred_batch, 0, 5.0)
print np.around(pred_batch*2)/2
saver2.save(sess, str(user_id))
data = np.zeros(shape=(5,3))
user_Id = 0
ratings=np.array([2,4.0,4,5,5])
movieId=np.array([822, 895, 1129, 1176, 1942])
data[:,2] = user_Id
data[:,1] = ratings
data[:,0] = movieId.astype(int)
train_data = data
data = np.zeros(shape=(15,2))
user_Id = 0
movieId=np.array([10389, 11289, 470, 517, 8476, 6, 191, 312, 330, 481, 499, 524, 282, 577, 1331])
data[:,1] = user_Id
data[:,0] = movieId.astype(int)
test_data=data
print("Training starts here....")
train_nn(train_data, 0)
| 32.424779 | 165 | 0.727893 |
3dcc524b72bbf91e1ddda943deb98a736899044e | 9,930 | py | Python | chainer/links/connection/gru.py | disktnk/chainer | 133798db470f6fd95973b882b9ccbd0c9726ac13 | [
"MIT"
] | 90 | 2017-02-23T04:04:47.000Z | 2020-04-09T12:06:50.000Z | chainer/links/connection/gru.py | disktnk/chainer | 133798db470f6fd95973b882b9ccbd0c9726ac13 | [
"MIT"
] | 7 | 2017-07-23T13:38:06.000Z | 2018-07-10T07:09:03.000Z | chainer/links/connection/gru.py | disktnk/chainer | 133798db470f6fd95973b882b9ccbd0c9726ac13 | [
"MIT"
] | 32 | 2017-02-28T07:40:38.000Z | 2021-02-17T11:33:09.000Z | import numpy
from chainer.functions.activation import sigmoid
from chainer.functions.activation import tanh
from chainer.functions.math import linear_interpolate
from chainer import link
from chainer.links.connection import linear
from chainer import variable
class GRUBase(link.Chain):
def __init__(self, in_size, out_size, init=None,
inner_init=None, bias_init=None):
super(GRUBase, self).__init__()
with self.init_scope():
self.W_r = linear.Linear(
in_size, out_size, initialW=init, initial_bias=bias_init)
self.U_r = linear.Linear(
out_size, out_size, initialW=inner_init,
initial_bias=bias_init)
self.W_z = linear.Linear(
in_size, out_size, initialW=init, initial_bias=bias_init)
self.U_z = linear.Linear(
out_size, out_size, initialW=inner_init,
initial_bias=bias_init)
self.W = linear.Linear(
in_size, out_size, initialW=init, initial_bias=bias_init)
self.U = linear.Linear(
out_size, out_size, initialW=inner_init,
initial_bias=bias_init)
class StatelessGRU(GRUBase):
"""Stateless Gated Recurrent Unit function (GRU).
GRU function has six parameters :math:`W_r`, :math:`W_z`, :math:`W`,
:math:`U_r`, :math:`U_z`, and :math:`U`.
The three parameters :math:`W_r`, :math:`W_z`, and :math:`W` are
:math:`n \\times m` matrices, and the others :math:`U_r`, :math:`U_z`,
and :math:`U` are :math:`n \\times n` matrices, where :math:`m` is the
length of input vectors and :math:`n` is the length of hidden vectors.
Given two inputs a previous hidden vector :math:`h` and an input vector
:math:`x`, GRU returns the next hidden vector :math:`h'` defined as
.. math::
r &=& \\sigma(W_r x + U_r h), \\\\
z &=& \\sigma(W_z x + U_z h), \\\\
\\bar{h} &=& \\tanh(W x + U (r \\odot h)), \\\\
h' &=& (1 - z) \\odot h + z \\odot \\bar{h},
where :math:`\\sigma` is the sigmoid function, and :math:`\\odot` is the
element-wise product.
As the name indicates, :class:`~chainer.links.StatelessGRU` is *stateless*,
meaning that it does not hold the value of
hidden vector :math:`h`.
For a *stateful* GRU, use :class:`~chainer.links.StatefulGRU`.
Args:
in_size(int): Dimension of input vector :math:`x`.
If ``None``, parameter initialization will be deferred
until the first forward data pass
at which time the size will be determined.
out_size(int): Dimension of hidden vector :math:`h`,
:math:`\\bar{h}` and :math:`h'`.
See:
- `On the Properties of Neural Machine Translation: Encoder-Decoder
Approaches <https://www.aclweb.org/anthology/W14-4012>`_
[Cho+, SSST2014].
- `Empirical Evaluation of Gated Recurrent Neural Networks on Sequence
Modeling <https://arxiv.org/abs/1412.3555>`_
[Chung+NIPS2014 DLWorkshop].
.. seealso:: :class:`~chainer.links.StatefulGRU`
.. admonition:: Example
There are several ways to make a ``StatelessGRU`` link.
Let ``x`` be a two-dimensional input array:
>>> in_size = 10
>>> out_size = 20
>>> x = np.zeros((1, in_size), dtype=np.float32)
>>> h = np.zeros((1, out_size), dtype=np.float32)
1. Give both ``in_size`` and ``out_size`` arguments:
>>> l = L.StatelessGRU(in_size, out_size)
>>> h_new = l(h, x)
>>> h_new.shape
(1, 20)
2. Omit ``in_size`` argument or fill it with ``None``:
>>> l = L.StatelessGRU(None, out_size)
>>> h_new = l(h, x)
>>> h_new.shape
(1, 20)
"""
def __call__(self, h, x):
r = sigmoid.sigmoid(self.W_r(x) + self.U_r(h))
z = sigmoid.sigmoid(self.W_z(x) + self.U_z(h))
h_bar = tanh.tanh(self.W(x) + self.U(r * h))
h_new = linear_interpolate.linear_interpolate(z, h_bar, h)
return h_new
class StatefulGRU(GRUBase):
"""Stateful Gated Recurrent Unit function (GRU).
Stateful GRU function has six parameters :math:`W_r`, :math:`W_z`,
:math:`W`, :math:`U_r`, :math:`U_z`, and :math:`U`.
The three parameters :math:`W_r`, :math:`W_z`, and :math:`W` are
:math:`n \\times m` matrices, and the others :math:`U_r`, :math:`U_z`,
and :math:`U` are :math:`n \\times n` matrices, where :math:`m` is the
length of input vectors and :math:`n` is the length of hidden vectors.
Given input vector :math:`x`, Stateful GRU returns the next
hidden vector :math:`h'` defined as
.. math::
r &=& \\sigma(W_r x + U_r h), \\\\
z &=& \\sigma(W_z x + U_z h), \\\\
\\bar{h} &=& \\tanh(W x + U (r \\odot h)), \\\\
h' &=& (1 - z) \\odot h + z \\odot \\bar{h},
where :math:`h` is current hidden vector.
As the name indicates, :class:`~chainer.links.StatefulGRU` is *stateful*,
meaning that it also holds the next hidden vector `h'` as a state.
For a *stateless* GRU, use :class:`~chainer.links.StatelessGRU`.
Args:
in_size(int): Dimension of input vector :math:`x`.
out_size(int): Dimension of hidden vector :math:`h`.
init: Initializer for GRU's input units (:math:`W`).
It is a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
If it is ``None``, the default initializer is used.
inner_init: Initializer for the GRU's inner
recurrent units (:math:`U`).
It is a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
If it is ``None``, the default initializer is used.
bias_init: Bias initializer.
It is a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
If ``None``, the bias is set to zero.
Attributes:
h(~chainer.Variable): Hidden vector that indicates the state of
:class:`~chainer.links.StatefulGRU`.
.. seealso::
* :class:`~chainer.links.StatelessGRU`
* :class:`~chainer.links.GRU`: an alias of
:class:`~chainer.links.StatefulGRU`
.. admonition:: Example
There are several ways to make a ``StatefulGRU`` link.
Let ``x`` be a two-dimensional input array:
>>> in_size = 10
>>> out_size = 20
>>> x = np.zeros((1, in_size), dtype=np.float32)
1. Give only ``in_size`` and ``out_size`` arguments:
>>> l = L.StatefulGRU(in_size, out_size)
>>> h_new = l(x)
>>> h_new.shape
(1, 20)
2. Give all optional arguments:
>>> init = np.zeros((out_size, in_size), dtype=np.float32)
>>> inner_init = np.zeros((out_size, out_size), dtype=np.float32)
>>> bias = np.zeros((1, out_size), dtype=np.float32)
>>> l = L.StatefulGRU(in_size, out_size, init=init,
... inner_init=inner_init, bias_init=bias)
>>> h_new = l(x)
>>> h_new.shape
(1, 20)
"""
def __init__(self, in_size, out_size, init=None,
inner_init=None, bias_init=0):
super(StatefulGRU, self).__init__(
in_size, out_size, init, inner_init, bias_init)
self.state_size = out_size
self.reset_state()
def to_cpu(self):
super(StatefulGRU, self).to_cpu()
if self.h is not None:
self.h.to_cpu()
def to_gpu(self, device=None):
super(StatefulGRU, self).to_gpu(device)
if self.h is not None:
self.h.to_gpu(device)
def set_state(self, h):
assert isinstance(h, variable.Variable)
h_ = h
if self.xp == numpy:
h_.to_cpu()
else:
h_.to_gpu(self._device_id)
self.h = h_
def reset_state(self):
self.h = None
def __call__(self, x):
z = self.W_z(x)
h_bar = self.W(x)
if self.h is not None:
r = sigmoid.sigmoid(self.W_r(x) + self.U_r(self.h))
z += self.U_z(self.h)
h_bar += self.U(r * self.h)
z = sigmoid.sigmoid(z)
h_bar = tanh.tanh(h_bar)
if self.h is not None:
h_new = linear_interpolate.linear_interpolate(z, h_bar, self.h)
else:
h_new = z * h_bar
self.h = h_new
return self.h
class GRU(StatefulGRU):
"""Stateful Gated Recurrent Unit function (GRU)
This is an alias of :class:`~chainer.links.StatefulGRU`.
.. warning::
In Chainer v1, ``GRU`` was *stateless*,
as opposed to the current implementation.
To align with LSTM links, we have changed
the naming convention from Chainer v2 so that the shorthand name
points the stateful links.
You can use :class:`~chainer.links.StatelessGRU` for stateless version,
whose implementation is identical to ``GRU`` in v1.
See issue `#2537 <https://github.com/chainer/chainer/issues/2537>`_
for details.
"""
def __call__(self, *args):
"""__call__(self, x)
Does forward propagation.
"""
n_args = len(args)
msg = ("Invalid argument. The length of GRU.__call__ must be 1. "
"But %d is given. " % n_args)
if n_args == 0 or n_args >= 3:
raise ValueError(msg)
elif n_args == 2:
msg += ("In Chainer v2, chainer.links.GRU is changed "
"from stateless to stateful. "
"One possiblity is you assume GRU to be stateless. "
"Use chainer.links.StatelessGRU instead.")
raise ValueError(msg)
return super(GRU, self).__call__(args[0])
| 34.842105 | 79 | 0.576032 |
0878e4e49c50c3dbb198350718a94ae89cb65b3a | 1,215 | py | Python | scripts/hasher.py | rspeer/exquisite-corpus | 3a6201583f40e4f29edb1d913eeb35f164511d78 | [
"MIT"
] | 28 | 2017-03-17T02:48:57.000Z | 2022-02-09T22:37:22.000Z | scripts/hasher.py | rspeer/exquisite-corpus | 3a6201583f40e4f29edb1d913eeb35f164511d78 | [
"MIT"
] | 17 | 2018-05-21T16:00:55.000Z | 2021-07-01T09:41:31.000Z | scripts/hasher.py | rspeer/exquisite-corpus | 3a6201583f40e4f29edb1d913eeb35f164511d78 | [
"MIT"
] | 4 | 2019-06-22T14:51:03.000Z | 2021-06-19T10:00:15.000Z | """
We would like to ignore a list of subreddits that would be sources of very bad
data. So that we don't have to do extensive, subjective research to make the
decision of which subreddits are bad, we exclude only the very worst:
subreddits that have been banned by the Reddit admins. Comments from these
subreddits that appear in the archive (from before they were banned) are
probably bad data.
This list _itself_ is bad. The list of banned subreddit names contains a high
density of hateful ideas, including racial slurs. We don't want to enshrine
this list in our code.
Therefore, what we commit to the code is the list of mmh3 hashes of the
subreddit names. The input is the text from
https://www.reddit.com/r/ListOfSubreddits/wiki/banned (which Reddit
unfortunately does not allow scripts to access), and the output is a list of
hashes suitable for pasting into reddit_ban_data.py.
"""
import mmh3
bad_hashes = set()
for line in open('extra/reddit-ban-list.txt'):
if line.startswith('/r/'):
name = line.strip()[3:].casefold()
name_hash = mmh3.hash(name)
bad_hashes.add(name_hash)
if __name__ == '__main__':
for ahash in sorted(bad_hashes):
print(f' {ahash},')
| 36.818182 | 78 | 0.742387 |
d027e76e36eb188fa47924c67e86a044d17e281d | 3,784 | py | Python | setup.py | mattaustin/django-thummer | 3eb06fd251a31da983417fefc906d30d0af16844 | [
"Apache-2.0"
] | null | null | null | setup.py | mattaustin/django-thummer | 3eb06fd251a31da983417fefc906d30d0af16844 | [
"Apache-2.0"
] | null | null | null | setup.py | mattaustin/django-thummer | 3eb06fd251a31da983417fefc906d30d0af16844 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011-2018 Matt Austin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals
import codecs
from os import path
from distutils.core import Command
from setuptools import find_packages, setup
from thummer import __license__, __title__, __url__, __version__
BASE_DIR = path.dirname(path.abspath(__file__))
# Get the long description from the README file
with codecs.open(path.join(BASE_DIR, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
class DjangoCommand(Command):
django_command_args = []
django_settings = {
'DATABASES': {'default': {'ENGINE': 'django.db.backends.sqlite3'}},
'INSTALLED_APPS': ['thummer'],
'MEDIA_ROOT': '/tmp/django-thummer/media/',
'MIDDLEWARE_CLASSES': [],
'ROOT_URLCONF': 'thummer.urls',
}
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(**self.django_settings)
django.setup()
return call_command(*self.django_command_args, verbosity=3)
class CheckCommand(DjangoCommand):
django_command_args = ['check']
class MakeMigrationsCommand(DjangoCommand):
django_command_args = ['makemigrations', 'thummer']
class TestCommand(DjangoCommand):
django_command_args = ['test', 'thummer.tests']
setup(
name=__title__,
version=__version__,
description='A website screenshot and thumbnailing app for Django.',
long_description=long_description,
url=__url__,
author='Matt Austin',
author_email='devops@mattaustin.com.au',
license=__license__,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics :: Capture',
],
keywords='thummer django website snapshot screenshot thumbnail',
packages=find_packages(),
cmdclass={
'djangocheck': CheckCommand,
'makemigrations': MakeMigrationsCommand,
'test': TestCommand
},
install_requires=[
'django>=1.8,!=1.9.*,!=1.10.*,<=2.1',
'pillow~=5.0',
'python-dateutil~=2.6',
'selenium~=3.9',
'sorl-thumbnail~=12.4',
],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4',
extras_require={
'tests': [
'coverage~=4.5',
'freezegun~=0.3',
'mock~=2.0',
'pytz',
],
},
)
| 25.395973 | 75 | 0.635307 |
a7c8062fad5c6dae22c36079f3e889bfe77254c3 | 20,130 | py | Python | lib/spack/spack/architecture.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | lib/spack/spack/architecture.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | lib/spack/spack/architecture.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This module contains all the elements that are required to create an
architecture object. These include, the target processor, the operating system,
and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
On a multiple architecture machine, the architecture spec field can be set to
build a package against any target and operating system that is present on the
platform. On Cray platforms or any other architecture that has different front
and back end environments, the operating system will determine the method of
compiler
detection.
There are two different types of compiler detection:
1. Through the $PATH env variable (front-end detection)
2. Through the tcl module system. (back-end detection)
Depending on which operating system is specified, the compiler will be detected
using one of those methods.
For platforms such as linux and darwin, the operating system is autodetected
and the target is set to be x86_64.
The command line syntax for specifying an architecture is as follows:
target=<Target name> os=<OperatingSystem name>
If the user wishes to use the defaults, either target or os can be left out of
the command line and Spack will concretize using the default. These defaults
are set in the 'platforms/' directory which contains the different subclasses
for platforms. If the machine has multiple architectures, the user can
also enter front-end, or fe or back-end or be. These settings will concretize
to their respective front-end and back-end targets and operating systems.
Additional platforms can be added by creating a subclass of Platform
and adding it inside the platform directory.
Platforms are an abstract class that are extended by subclasses. If the user
wants to add a new type of platform (such as cray_xe), they can create a
subclass and set all the class attributes such as priority, front_target,
back_target, front_os, back_os. Platforms also contain a priority class
attribute. A lower number signifies higher priority. These numbers are
arbitrarily set and can be changed though often there isn't much need unless a
new platform is added and the user wants that to be detected first.
Targets are created inside the platform subclasses. Most architecture
(like linux, and darwin) will have only one target (x86_64) but in the case of
Cray machines, there is both a frontend and backend processor. The user can
specify which targets are present on front-end and back-end architecture
Depending on the platform, operating systems are either auto-detected or are
set. The user can set the front-end and back-end operating setting by the class
attributes front_os and back_os. The operating system as described earlier,
will be responsible for compiler detection.
"""
import functools
import inspect
import warnings
import six
import llnl.util.cpu as cpu
import llnl.util.tty as tty
from llnl.util.lang import memoized, list_modules, key_ordering
import spack.compiler
import spack.paths
import spack.error as serr
import spack.util.executable
import spack.version
from spack.util.naming import mod_to_class
from spack.util.spack_yaml import syaml_dict
class NoPlatformError(serr.SpackError):
def __init__(self):
super(NoPlatformError, self).__init__(
"Could not determine a platform for this machine.")
def _ensure_other_is_target(method):
"""Decorator to be used in dunder methods taking a single argument to
ensure that the argument is an instance of ``Target`` too.
"""
@functools.wraps(method)
def _impl(self, other):
if isinstance(other, six.string_types):
other = Target(other)
if not isinstance(other, Target):
return NotImplemented
return method(self, other)
return _impl
class Target(object):
def __init__(self, name, module_name=None):
"""Target models microarchitectures and their compatibility.
Args:
name (str or Microarchitecture):micro-architecture of the
target
module_name (str): optional module name to get access to the
current target. This is typically used on machines
like Cray (e.g. craype-compiler)
"""
if not isinstance(name, cpu.Microarchitecture):
name = cpu.targets.get(
name, cpu.generic_microarchitecture(name)
)
self.microarchitecture = name
self.module_name = module_name
@property
def name(self):
return self.microarchitecture.name
@_ensure_other_is_target
def __eq__(self, other):
return self.microarchitecture == other.microarchitecture and \
self.module_name == other.module_name
def __ne__(self, other):
# This method is necessary as long as we support Python 2. In Python 3
# __ne__ defaults to the implementation below
return not self == other
@_ensure_other_is_target
def __lt__(self, other):
# TODO: In the future it would be convenient to say
# TODO: `spec.architecture.target < other.architecture.target`
# TODO: and change the semantic of the comparison operators
# This is needed to sort deterministically specs in a list.
# It doesn't implement a total ordering semantic.
return self.microarchitecture.name < other.microarchitecture.name
def __hash__(self):
return hash((self.name, self.module_name))
@staticmethod
def from_dict_or_value(dict_or_value):
# A string here represents a generic target (like x86_64 or ppc64) or
# a custom micro-architecture
if isinstance(dict_or_value, six.string_types):
return Target(dict_or_value)
# TODO: From a dict we actually retrieve much more information than
# TODO: just the name. We can use that information to reconstruct an
# TODO: "old" micro-architecture or check the current definition.
target_info = dict_or_value
return Target(target_info['name'])
def to_dict_or_value(self):
"""Returns a dict or a value representing the current target.
String values are used to keep backward compatibility with generic
targets, like e.g. x86_64 or ppc64. More specific micro-architectures
will return a dictionary which contains information on the name,
features, vendor, generation and parents of the current target.
"""
# Generic targets represent either an architecture
# family (like x86_64) or a custom micro-architecture
if self.microarchitecture.vendor == 'generic':
return str(self)
return syaml_dict(
self.microarchitecture.to_dict(return_list_of_items=True)
)
def __repr__(self):
cls_name = self.__class__.__name__
fmt = cls_name + '({0}, {1})'
return fmt.format(repr(self.microarchitecture),
repr(self.module_name))
def __str__(self):
return str(self.microarchitecture)
def __contains__(self, cpu_flag):
return cpu_flag in self.microarchitecture
def optimization_flags(self, compiler):
"""Returns the flags needed to optimize for this target using
the compiler passed as argument.
Args:
compiler (CompilerSpec or Compiler): object that contains both the
name and the version of the compiler we want to use
"""
# Mixed toolchains are not supported yet
import spack.compilers
if isinstance(compiler, spack.compiler.Compiler):
if spack.compilers.is_mixed_toolchain(compiler):
msg = ('microarchitecture specific optimizations are not '
'supported yet on mixed compiler toolchains [check'
' {0.name}@{0.version} for further details]')
warnings.warn(msg.format(compiler))
return ''
# Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a
# custom spec.
compiler_version = compiler.version
version_number, suffix = cpu.version_components(compiler.version)
if not version_number or suffix not in ('', 'apple'):
# Try to deduce the underlying version of the compiler, regardless
# of its name in compilers.yaml. Depending on where this function
# is called we might get either a CompilerSpec or a fully fledged
# compiler object.
import spack.spec
if isinstance(compiler, spack.spec.CompilerSpec):
compiler = spack.compilers.compilers_for_spec(compiler).pop()
try:
compiler_version = compiler.get_real_version()
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
return self.microarchitecture.optimization_flags(
compiler.name, str(compiler_version)
)
@key_ordering
class Platform(object):
""" Abstract class that each type of Platform will subclass.
Will return a instance of it once it is returned.
"""
priority = None # Subclass sets number. Controls detection order
front_end = None
back_end = None
default = None # The default back end target. On cray ivybridge
front_os = None
back_os = None
default_os = None
reserved_targets = ['default_target', 'frontend', 'fe', 'backend', 'be']
reserved_oss = ['default_os', 'frontend', 'fe', 'backend', 'be']
def __init__(self, name):
self.targets = {}
self.operating_sys = {}
self.name = name
def add_target(self, name, target):
"""Used by the platform specific subclass to list available targets.
Raises an error if the platform specifies a name
that is reserved by spack as an alias.
"""
if name in Platform.reserved_targets:
raise ValueError(
"%s is a spack reserved alias "
"and cannot be the name of a target" % name)
self.targets[name] = target
def target(self, name):
"""This is a getter method for the target dictionary
that handles defaulting based on the values provided by default,
front-end, and back-end. This can be overwritten
by a subclass for which we want to provide further aliasing options.
"""
# TODO: Check if we can avoid using strings here
name = str(name)
if name == 'default_target':
name = self.default
elif name == 'frontend' or name == 'fe':
name = self.front_end
elif name == 'backend' or name == 'be':
name = self.back_end
return self.targets.get(name, None)
def add_operating_system(self, name, os_class):
""" Add the operating_system class object into the
platform.operating_sys dictionary
"""
if name in Platform.reserved_oss:
raise ValueError(
"%s is a spack reserved alias "
"and cannot be the name of an OS" % name)
self.operating_sys[name] = os_class
def operating_system(self, name):
if name == 'default_os':
name = self.default_os
if name == 'frontend' or name == "fe":
name = self.front_os
if name == 'backend' or name == 'be':
name = self.back_os
return self.operating_sys.get(name, None)
@classmethod
def setup_platform_environment(cls, pkg, env):
""" Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
@classmethod
def detect(cls):
""" Subclass is responsible for implementing this method.
Returns True if the Platform class detects that
it is the current platform
and False if it's not.
"""
raise NotImplementedError()
def __repr__(self):
return self.__str__()
def __str__(self):
return self.name
def _cmp_key(self):
t_keys = ''.join(str(t._cmp_key()) for t in
sorted(self.targets.values()))
o_keys = ''.join(str(o._cmp_key()) for o in
sorted(self.operating_sys.values()))
return (self.name,
self.default,
self.front_end,
self.back_end,
self.default_os,
self.front_os,
self.back_os,
t_keys,
o_keys)
@key_ordering
class OperatingSystem(object):
""" Operating System will be like a class similar to platform extended
by subclasses for the specifics. Operating System will contain the
compiler finding logic. Instead of calling two separate methods to
find compilers we call find_compilers method for each operating system
"""
def __init__(self, name, version):
self.name = name.replace('-', '_')
self.version = str(version).replace('-', '_')
def __str__(self):
return "%s%s" % (self.name, self.version)
def __repr__(self):
return self.__str__()
def _cmp_key(self):
return (self.name, self.version)
def to_dict(self):
return syaml_dict([
('name', self.name),
('version', self.version)
])
@key_ordering
class Arch(object):
"""Architecture is now a class to help with setting attributes.
TODO: refactor so that we don't need this class.
"""
def __init__(self, plat=None, os=None, target=None):
self.platform = plat
if plat and os:
os = self.platform.operating_system(os)
self.os = os
if plat and target:
target = self.platform.target(target)
self.target = target
# Hooks for parser to use when platform is set after target or os
self.target_string = None
self.os_string = None
@property
def concrete(self):
return all((self.platform is not None,
isinstance(self.platform, Platform),
self.os is not None,
isinstance(self.os, OperatingSystem),
self.target is not None, isinstance(self.target, Target)))
def __str__(self):
if self.platform or self.os or self.target:
if self.platform.name == 'darwin':
os_name = self.os.name if self.os else "None"
else:
os_name = str(self.os)
return (str(self.platform) + "-" +
os_name + "-" + str(self.target))
else:
return ''
def __contains__(self, string):
return string in str(self)
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
def __nonzero__(self):
return (self.platform is not None or
self.os is not None or
self.target is not None)
__bool__ = __nonzero__
def _cmp_key(self):
if isinstance(self.platform, Platform):
platform = self.platform.name
else:
platform = self.platform
if isinstance(self.os, OperatingSystem):
os = self.os.name
else:
os = self.os
if isinstance(self.target, Target):
target = self.target.microarchitecture
else:
target = self.target
return (platform, os, target)
def to_dict(self):
str_or_none = lambda v: str(v) if v else None
d = syaml_dict([
('platform', str_or_none(self.platform)),
('platform_os', str_or_none(self.os)),
('target', self.target.to_dict_or_value())])
return syaml_dict([('arch', d)])
def to_spec(self):
"""Convert this Arch to an anonymous Spec with architecture defined."""
spec = spack.spec.Spec()
spec.architecture = spack.spec.ArchSpec(str(self))
return spec
@staticmethod
def from_dict(d):
spec = spack.spec.ArchSpec.from_dict(d)
return arch_for_spec(spec)
@memoized
def get_platform(platform_name):
"""Returns a platform object that corresponds to the given name."""
platform_list = all_platforms()
for p in platform_list:
if platform_name.replace("_", "").lower() == p.__name__.lower():
return p()
def verify_platform(platform_name):
""" Determines whether or not the platform with the given name is supported
in Spack. For more information, see the 'spack.platforms' submodule.
"""
platform_name = platform_name.replace("_", "").lower()
platform_names = [p.__name__.lower() for p in all_platforms()]
if platform_name not in platform_names:
tty.die("%s is not a supported platform; supported platforms are %s" %
(platform_name, platform_names))
def arch_for_spec(arch_spec):
"""Transforms the given architecture spec into an architecture object."""
arch_spec = spack.spec.ArchSpec(arch_spec)
assert arch_spec.concrete
arch_plat = get_platform(arch_spec.platform)
if not (arch_plat.operating_system(arch_spec.os) and
arch_plat.target(arch_spec.target)):
raise ValueError(
"Can't recreate arch for spec %s on current arch %s; "
"spec architecture is too different" % (arch_spec, sys_type()))
return Arch(arch_plat, arch_spec.os, arch_spec.target)
@memoized
def all_platforms():
classes = []
mod_path = spack.paths.platform_path
parent_module = "spack.platforms"
for name in list_modules(mod_path):
mod_name = '%s.%s' % (parent_module, name)
class_name = mod_to_class(name)
mod = __import__(mod_name, fromlist=[class_name])
if not hasattr(mod, class_name):
tty.die('No class %s defined in %s' % (class_name, mod_name))
cls = getattr(mod, class_name)
if not inspect.isclass(cls):
tty.die('%s.%s is not a class' % (mod_name, class_name))
classes.append(cls)
return classes
@memoized
def platform():
"""Detects the platform for this machine.
Gather a list of all available subclasses of platforms.
Sorts the list according to their priority looking. Priority is
an arbitrarily set number. Detects platform either using uname or
a file path (/opt/cray...)
"""
# Try to create a Platform object using the config file FIRST
platform_list = all_platforms()
platform_list.sort(key=lambda a: a.priority)
for platform_cls in platform_list:
if platform_cls.detect():
return platform_cls()
@memoized
def default_arch():
"""Default ``Arch`` object for this machine.
See ``sys_type()``.
"""
return Arch(platform(), 'default_os', 'default_target')
def sys_type():
"""Print out the "default" platform-os-target tuple for this machine.
On machines with only one target OS/target, prints out the
platform-os-target for the frontend. For machines with a frontend
and a backend, prints the default backend.
TODO: replace with use of more explicit methods to get *all* the
backends, as client code should really be aware of cross-compiled
architectures.
"""
return str(default_arch())
@memoized
def compatible_sys_types():
"""Returns a list of all the systypes compatible with the current host."""
compatible_archs = []
current_host = cpu.host()
compatible_targets = [current_host] + current_host.ancestors
for target in compatible_targets:
arch = Arch(platform(), 'default_os', target)
compatible_archs.append(str(arch))
return compatible_archs
| 35.946429 | 79 | 0.652807 |
9317278ac926c727d170dd1df8f458a6cb513845 | 4,325 | py | Python | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711201101.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711201101.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711201101.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | """ Closuers
Free variables and closures
Remember: Functions defined inside another function can access the outer (nonLocal) variables
"""
def outer():
x = 'python'
/ this x refers to the one in outer's scope', this nonlocal variable x is called a free variable
def inner(): /
print("{0} rocks!".format(x)) when we consider inner, we are really looking at:
The function inner
the free variable x (with the current value python)
This is called a closure, # x thru the print statement
inner()
outer() # python rocks!
""" Returning the inner function
What happens if, instead of calling(running) inner from inside outer, we rune it?
def outer():
x = 'python' # x is a free variable in inner, it is bound to the variable x in outer, this happens when outer runs
def inner():
print("{0} rocks!".format(x))
return inner # when returning inner, we are actually 'returning' the closure
We can assign that return value to a variable name: fn = outer()
fn() # python rocks!
When we called fn
at that time Python determined the value of x in the extended scope
But notice that outer had finished running before we called fn - it's scope was gone
Python cells and Multi-Scopped Variables
def outer(): # Here the value of x is shared between two scoped
x = 'python' # outer
def inner(): # inner
print(x)
return inner # The label x is in two different scopes
Python does this by creating a cell as an intermediary object
outer,x ----> # cell 0xA500 / str 0xFF100 # indirect reference
inner.x ----> # OxFF199 / python
# they are pointing to the same cell
# when requesting the value of the variable, Python will "double-hop" to get the final value
Closures
I can think of the closure as a function plus an EXTENDED SCOPE THAT CONTAINS THE FREE VARIABLES
The free variables's value is the object the cell points to - so that could change over time!
def outer():
a = 100
_______________________closure______
|x = 'python' |
|
def inner():
a = 10 # local variable |
print("{0} rocks!".format(x)) |
|___________________________________|
return inner
fn = outer() fn -> inner + extended scope x
Introspection
def outer():
a = 100
x = 'python'
def inner():
a = 10 # local variable
print("{0} rocks!".format(x))
return inner
fn = outer()
fn.__code__.co_freevars ->('x',) (a is not a free variable)
fn.__closure__ -> (<cell at )xA500: str object at 0xFF199>,
def outer():
x = 'python'
print(hex(id(x))) -----------------> 0xFF100 indirect reference
def inner():
print(hex(id(x))) -----------------> 0xFF100 indirect reference
print("{0} rocks!".format(x))
return inner
fn = outer()
fn()
def counter(): # closure
------------------------
count = 0 / count is a free variable
/ it is bound to the cell count
def inc(): /
nonlocal count
count += 1
return count
------------------------
return inc
fn -> inc + count -> 0
/
fn = counter() /
fn() => 1 count's (indirect) reference changed from object 0 to the object 1
fn() => 2
Every time we run a function a new scope is created.
If that function generates a closure, a new closure is created every time as well
def counter(): # closure f1 = counter()
------------------------ f2 = counter()
count = 0
f1() # 1
def onc(): f1() # 2 f1 and f2 do not have the same extended scope
nonlocal count f1() # 3
count += 1
return count f2() # 1 they are different instances of the closure
-------------------------
return inc the cells are different
""" | 31.34058 | 132 | 0.541734 |
0245cd631110f3c612a7b236b510d335517de249 | 27,182 | py | Python | mihifepe/simulation/simulation.py | cloudbopper/feature-importance-analysis | 7f03e4413d1e42e9265d790cb64c41f34dbea5ab | [
"MIT"
] | 1 | 2019-11-01T12:31:45.000Z | 2019-11-01T12:31:45.000Z | mihifepe/simulation/simulation.py | cloudbopper/feature-importance-analysis | 7f03e4413d1e42e9265d790cb64c41f34dbea5ab | [
"MIT"
] | 10 | 2018-11-14T17:44:39.000Z | 2020-01-02T03:25:14.000Z | mihifepe/simulation/simulation.py | cloudbopper/mihifepe | 7f03e4413d1e42e9265d790cb64c41f34dbea5ab | [
"MIT"
] | 3 | 2018-11-14T04:17:17.000Z | 2020-01-04T20:32:00.000Z | """Generates simulated data and model to test mihifepe algorithm"""
import argparse
from collections import namedtuple
import csv
import functools
import itertools
import os
import pickle
import sys
from unittest.mock import patch
import anytree
from anytree.importer import JsonImporter
import h5py
import numpy as np
from scipy.cluster.hierarchy import linkage
import sympy
from sympy.utilities.lambdify import lambdify
from sklearn.metrics import precision_recall_fscore_support
from mihifepe import constants, master, utils
from mihifepe.fdr import hierarchical_fdr_control
# TODO maybe: write arguments to separate readme.txt for documentating runs
# Simulation results object
Results = namedtuple(constants.SIMULATION_RESULTS, [constants.FDR, constants.POWER,
constants.OUTER_NODES_FDR, constants.OUTER_NODES_POWER,
constants.BASE_FEATURES_FDR, constants.BASE_FEATURES_POWER,
constants.INTERACTIONS_FDR, constants.INTERACTIONS_POWER])
def main():
"""Main"""
parser = argparse.ArgumentParser()
parser.add_argument("-seed", type=int, default=constants.SEED)
parser.add_argument("-num_instances", type=int, default=10000)
parser.add_argument("-num_features", type=int, default=100)
parser.add_argument("-output_dir", help="Name of output directory")
parser.add_argument("-fraction_relevant_features", type=float, default=.05)
parser.add_argument("-noise_multiplier", type=float, default=.05,
help="Multiplicative factor for noise added to polynomial computation for irrelevant features")
parser.add_argument("-noise_type", choices=[constants.ADDITIVE_GAUSSIAN, constants.EPSILON_IRRELEVANT, constants.NO_NOISE],
default=constants.EPSILON_IRRELEVANT)
parser.add_argument("-hierarchy_type", help="Choice of hierarchy to generate", default=constants.CLUSTER_FROM_DATA,
choices=[constants.CLUSTER_FROM_DATA, constants.RANDOM])
parser.add_argument("-clustering_instance_count", type=int, help="If provided, uses this number of instances to "
"cluster the data to generate a hierarchy, allowing the hierarchy to remain same across multiple "
"sets of instances", default=0)
parser.add_argument("-num_interactions", type=int, default=0, help="number of interaction pairs in model")
parser.add_argument("-exclude_interaction_only_features", help="exclude interaction-only features in model"
" in addition to linear + interaction features (default included)", action="store_false",
dest="include_interaction_only_features")
parser.set_defaults(include_interaction_only_features=True)
parser.add_argument("-contiguous_node_names", action="store_true", help="enable to change node names in hierarchy "
"to be contiguous for better visualization (but creating mismatch between node names and features indices)")
# Arguments used to qualify output directory, then passed to mihifepe.master
parser.add_argument("-perturbation", default=constants.SHUFFLING, choices=[constants.ZEROING, constants.SHUFFLING])
parser.add_argument("-num_shuffling_trials", type=int, default=100, help="Number of shuffling trials to average over, "
"when shuffling perturbations are selected")
parser.add_argument("-analyze_interactions", help="enable analyzing interactions", action="store_true")
args, pass_args = parser.parse_known_args()
pass_args = " ".join(pass_args)
if not args.output_dir:
args.output_dir = ("sim_outputs_inst_%d_feat_%d_noise_%.3f_relfraction_%.3f_pert_%s_shufftrials_%d" %
(args.num_instances, args.num_features, args.noise_multiplier,
args.fraction_relevant_features, args.perturbation, args.num_shuffling_trials))
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
args.rng = np.random.RandomState(args.seed)
args.logger = utils.get_logger(__name__, "%s/simulation.log" % args.output_dir)
pipeline(args, pass_args)
def pipeline(args, pass_args):
"""Simulation pipeline"""
# TODO: Features other than binary
args.logger.info("Begin mihifepe simulation with args: %s" % args)
# Synthesize polynomial that generates ground truth
sym_vars, relevant_feature_map, polynomial_fn = gen_polynomial(args, get_relevant_features(args))
# Synthesize data
probs, test_data, clustering_data = synthesize_data(args)
# Generate hierarchy using clustering
hierarchy_root, feature_id_map = gen_hierarchy(args, clustering_data)
# Update hierarchy descriptions for future visualization
update_hierarchy_relevance(hierarchy_root, relevant_feature_map, probs)
# Generate targets (ground truth)
targets = gen_targets(polynomial_fn, test_data)
# Write outputs - data, gen_model.py, hierarchy
data_filename = write_data(args, test_data, targets)
hierarchy_filename = write_hierarchy(args, hierarchy_root)
gen_model_filename = write_model(args, sym_vars)
# Invoke feature importance algorithm
run_mihifepe(args, pass_args, data_filename, hierarchy_filename, gen_model_filename)
# Compare mihifepe outputs with ground truth outputs
compare_with_ground_truth(args, hierarchy_root)
# Evaluate mihifepe outputs - power/FDR for all nodes/outer nodes/base features
results = evaluate(args, relevant_feature_map, feature_id_map)
args.logger.info("Results:\n%s" % str(results))
write_results(args, results)
args.logger.info("End mihifepe simulation")
def synthesize_data(args):
"""Synthesize data"""
# TODO: Correlations between features
args.logger.info("Begin generating data")
probs = args.rng.uniform(size=args.num_features)
data = args.rng.binomial(1, probs, size=(max(args.num_instances, args.clustering_instance_count), args.num_features))
test_data = data
clustering_data = data
if args.clustering_instance_count:
clustering_data = data[:args.clustering_instance_count, :]
if args.clustering_instance_count > args.num_instances:
test_data = data[:args.num_instances, :]
args.logger.info("End generating data")
return probs, test_data, clustering_data
def gen_hierarchy(args, clustering_data):
"""
Generate hierarchy over features
Args:
args: Command-line arguments
clustering_data: Data potentially used to cluster features
(depending on hierarchy generation method)
Returns:
hierarchy_root: root fo resulting hierarchy over features
"""
# Generate hierarchy
hierarchy_root = None
if args.hierarchy_type == constants.CLUSTER_FROM_DATA:
clusters = cluster_data(args, clustering_data)
hierarchy_root = gen_hierarchy_from_clusters(args, clusters)
elif args.hierarchy_type == constants.RANDOM:
hierarchy_root = gen_random_hierarchy(args)
else:
raise NotImplementedError("Need valid hierarchy type")
# Improve visualization - contiguous feature names
feature_id_map = {} # mapping from visual feature ids to original ids
if args.contiguous_node_names:
for idx, node in enumerate(anytree.PostOrderIter(hierarchy_root)):
node.idx = idx
if node.is_leaf:
node.min_child_idx = idx
node.max_child_idx = idx
node.num_base_features = 1
node.name = str(idx)
feature_id_map[idx] = int(node.static_indices)
else:
node.min_child_idx = min([child.min_child_idx for child in node.children])
node.max_child_idx = max([child.idx for child in node.children])
node.num_base_features = sum([child.num_base_features for child in node.children])
node.name = "[%d-%d] (size: %d)" % (node.min_child_idx, node.max_child_idx, node.num_base_features)
return hierarchy_root, feature_id_map
def gen_random_hierarchy(args):
"""Generates balanced random hierarchy"""
args.logger.info("Begin generating hierarchy")
nodes = [anytree.Node(str(idx), static_indices=str(idx)) for idx in range(args.num_features)]
args.rng.shuffle(nodes)
node_count = len(nodes)
while len(nodes) > 1:
parents = []
for left_idx in range(0, len(nodes), 2):
parent = anytree.Node(str(node_count))
node_count += 1
nodes[left_idx].parent = parent
right_idx = left_idx + 1
if right_idx < len(nodes):
nodes[right_idx].parent = parent
parents.append(parent)
nodes = parents
hierarchy_root = nodes[0]
args.logger.info("End generating hierarchy")
return hierarchy_root
def cluster_data(args, data):
"""Cluster data using hierarchical clustering with Hamming distance"""
# Cluster data
args.logger.info("Begin clustering data")
clusters = linkage(data.transpose(), metric="hamming", method="complete")
args.logger.info("End clustering data")
return clusters
def gen_hierarchy_from_clusters(args, clusters):
"""
Organize clusters into hierarchy
Args:
clusters: linkage matrix (num_features-1 X 4)
rows indicate successive clustering iterations
columns, respectively: 1st cluster index, 2nd cluster index, distance, sample count
Returns:
hierarchy_root: root of resulting hierarchy over features
"""
# Generate hierarchy from clusters
nodes = [anytree.Node(str(idx), static_indices=str(idx)) for idx in range(args.num_features)]
for idx, cluster in enumerate(clusters):
cluster_idx = idx + args.num_features
left_idx, right_idx, _, _ = cluster
left_idx = int(left_idx)
right_idx = int(right_idx)
cluster_node = anytree.Node(str(cluster_idx))
nodes[left_idx].parent = cluster_node
nodes[right_idx].parent = cluster_node
nodes.append(cluster_node)
hierarchy_root = nodes[-1]
return hierarchy_root
def gen_polynomial(args, relevant_features):
"""Generate polynomial which decides the ground truth and noisy model"""
# Note: using sympy to build function appears to be 1.5-2x slower than erstwhile raw numpy implementation (for linear terms)
# TODO: possibly negative coefficients
sym_features = sympy.symbols(["x%d" % x for x in range(args.num_features)])
relevant_feature_map = {} # map of relevant feature sets to coefficients
# Generate polynomial expression
# Pairwise interaction terms
sym_polynomial_fn = 0
sym_polynomial_fn = update_interaction_terms(args, relevant_features, relevant_feature_map, sym_features, sym_polynomial_fn)
# Linear terms
sym_polynomial_fn = update_linear_terms(args, relevant_features, relevant_feature_map, sym_features, sym_polynomial_fn)
args.logger.info("Ground truth polynomial:\ny = %s" % sym_polynomial_fn)
# Generate model expression
polynomial_fn = lambdify([sym_features], sym_polynomial_fn, "numpy")
# Add noise terms
sym_noise = []
sym_model_fn = sym_polynomial_fn
if args.noise_type == constants.NO_NOISE:
pass
elif args.noise_type == constants.EPSILON_IRRELEVANT:
sym_noise = sympy.symbols(["noise%d" % x for x in range(args.num_features)])
irrelevant_features = np.array([0 if x in relevant_features else 1 for x in range(args.num_features)])
sym_model_fn = sym_polynomial_fn + (sym_noise * irrelevant_features).dot(sym_features)
elif args.noise_type == constants.ADDITIVE_GAUSSIAN:
sym_noise = sympy.symbols("noise")
sym_model_fn = sym_polynomial_fn + sym_noise
else:
raise NotImplementedError("Unknown noise type")
sym_vars = (sym_features, sym_noise, sym_model_fn)
return sym_vars, relevant_feature_map, polynomial_fn
def get_relevant_features(args):
"""Get set of relevant feature identifiers"""
num_relevant_features = max(1, round(args.num_features * args.fraction_relevant_features))
coefficients = np.zeros(args.num_features)
coefficients[:num_relevant_features] = 1
args.rng.shuffle(coefficients)
relevant_features = {idx for idx in range(args.num_features) if coefficients[idx]}
return relevant_features
def update_interaction_terms(args, relevant_features, relevant_feature_map, sym_features, sym_polynomial_fn):
"""Pairwise interaction terms for polynomial"""
# TODO: higher-order interactions
num_relevant_features = len(relevant_features)
num_interactions = min(args.num_interactions, num_relevant_features * (num_relevant_features - 1) / 2)
if not num_interactions:
return sym_polynomial_fn
potential_pairs = list(itertools.combinations(sorted(relevant_features), 2))
potential_pairs_arr = np.empty(len(potential_pairs), dtype=np.object)
potential_pairs_arr[:] = potential_pairs
interaction_pairs = args.rng.choice(potential_pairs_arr, size=num_interactions, replace=False)
for interaction_pair in interaction_pairs:
coefficient = args.rng.uniform()
relevant_feature_map[frozenset(interaction_pair)] = coefficient
sym_polynomial_fn += coefficient * functools.reduce(lambda sym_x, y: sym_x * sym_features[y], interaction_pair, 1)
return sym_polynomial_fn
def update_linear_terms(args, relevant_features, relevant_feature_map, sym_features, sym_polynomial_fn):
"""Order one terms for polynomial"""
interaction_features = set()
for interaction in relevant_feature_map.keys():
interaction_features.update(interaction)
# Let half the interaction features have nonzero interaction coefficients but zero linear coefficients
interaction_only_features = []
if interaction_features and args.include_interaction_only_features:
interaction_only_features = args.rng.choice(sorted(interaction_features),
len(interaction_features) // 2,
replace=False)
linear_features = sorted(relevant_features.difference(interaction_only_features))
coefficients = np.zeros(args.num_features)
coefficients[linear_features] = args.rng.uniform(size=len(linear_features))
for linear_feature in linear_features:
relevant_feature_map[frozenset([linear_feature])] = coefficients[linear_feature]
sym_polynomial_fn += coefficients.dot(sym_features)
return sym_polynomial_fn
def update_hierarchy_relevance(hierarchy_root, relevant_feature_map, probs):
"""
Add feature relevance information to nodes of hierarchy:
their probabilty of being enabled,
their polynomial coefficient
"""
relevant_features = set()
for key in relevant_feature_map:
relevant_features.update(key)
for node in anytree.PostOrderIter(hierarchy_root):
node.description = constants.IRRELEVANT
if node.is_leaf:
idx = int(node.static_indices)
node.poly_coeff = 0.0
node.bin_prob = probs[idx]
coeff = relevant_feature_map.get(frozenset([idx]))
if coeff:
node.poly_coeff = coeff
node.description = ("%s feature:\nPolynomial coefficient: %f\nBinomial probability: %f"
% (constants.RELEVANT, coeff, probs[idx]))
elif idx in relevant_features:
node.description = ("%s feature\n(Interaction-only)" % constants.RELEVANT)
else:
for child in node.children:
if child.description != constants.IRRELEVANT:
node.description = constants.RELEVANT
def gen_targets(polynomial_fn, data):
"""Generate targets (ground truth) from polynomial"""
return [polynomial_fn(instance) for instance in data]
def write_data(args, data, targets):
"""
Write data in HDF5 format.
Groups:
/temporal (Group containing temporal data)
Datasets:
/record_ids (List of record identifiers (strings) of length M = number of records/instances)
/targets (vector of target values (regression/classification outputs) of length M)
/static (matrix of static data of size M x L)
/temporal/<record_id> (One dataset per record_id) (List (of variable length V) of vectors (of fixed length W))
"""
data_filename = "%s/%s" % (args.output_dir, "data.hdf5")
root = h5py.File(data_filename, "w")
record_ids = [str(idx).encode("utf8") for idx in range(args.num_instances)]
root.create_dataset(constants.RECORD_IDS, data=record_ids)
root.create_dataset(constants.TARGETS, data=targets)
root.create_dataset(constants.STATIC, data=data)
root.close()
return data_filename
def write_hierarchy(args, hierarchy_root):
"""
Write hierarchy in CSV format.
Columns: *name*: feature name, must be unique across features
*parent_name*: name of parent if it exists, else '' (root node)
*description*: node description
*static_indices*: [only required for leaf nodes] list of tab-separated indices corresponding to the indices
of these features in the static data
*temporal_indices*: [only required for leaf nodes] list of tab-separated indices corresponding to the indices
of these features in the temporal data
"""
hierarchy_filename = "%s/%s" % (args.output_dir, "hierarchy.csv")
with open(hierarchy_filename, "w", newline="") as hierarchy_file:
writer = csv.writer(hierarchy_file, delimiter=",")
writer.writerow([constants.NODE_NAME, constants.PARENT_NAME,
constants.DESCRIPTION, constants.STATIC_INDICES, constants.TEMPORAL_INDICES])
for node in anytree.PreOrderIter(hierarchy_root):
static_indices = node.static_indices if node.is_leaf else ""
parent_name = node.parent.name if node.parent else ""
writer.writerow([node.name, parent_name, node.description, static_indices, ""])
return hierarchy_filename
def write_model(args, sym_vars):
"""
Write model to file in output directory.
Write model_filename to config file in script directory.
gen_model.py uses config file to load model.
"""
# Write model to file
model_filename = "%s/%s" % (args.output_dir, constants.MODEL_FILENAME)
with open(model_filename, "wb") as model_file:
pickle.dump(sym_vars, model_file)
# Write model_filename to config
gen_model_config_filename = "%s/%s" % (args.output_dir, constants.GEN_MODEL_CONFIG_FILENAME)
with open(gen_model_config_filename, "wb") as gen_model_config_file:
pickle.dump(model_filename, gen_model_config_file)
pickle.dump(args.noise_multiplier, gen_model_config_file)
pickle.dump(args.noise_type, gen_model_config_file)
# Write gen_model.py to output_dir
gen_model_filename = "%s/%s" % (args.output_dir, constants.GEN_MODEL_FILENAME)
gen_model_template_filename = "%s/%s" % (os.path.dirname(os.path.abspath(__file__)), constants.GEN_MODEL_TEMPLATE_FILENAME)
gen_model_file = open(gen_model_filename, "w")
with open(gen_model_template_filename, "r") as gen_model_template_file:
for line in gen_model_template_file:
line = line.replace(constants.GEN_MODEL_CONFIG_FILENAME_PLACEHOLDER, gen_model_config_filename)
gen_model_file.write(line)
gen_model_file.close()
return gen_model_filename
def run_mihifepe(args, pass_args, data_filename, hierarchy_filename, gen_model_filename):
"""Run mihifepe algorithm"""
args.logger.info("Begin running mihifepe")
analyze_interactions = "-analyze_interactions" if args.analyze_interactions else ""
args.logger.info("Passing the following arguments to mihifepe.master without parsing: %s" % pass_args)
memory_requirement = 1 + (os.stat(data_filename).st_size // (2 ** 30)) # Compute approximate memory requirement in GB
cmd = ("python -m mihifepe.master -data_filename %s -hierarchy_filename %s -model_generator_filename %s -output_dir %s "
"-perturbation %s -num_shuffling_trials %d -memory_requirement %d %s %s"
% (data_filename, hierarchy_filename, gen_model_filename, args.output_dir,
args.perturbation, args.num_shuffling_trials, memory_requirement, analyze_interactions, pass_args))
args.logger.info("Running cmd: %s" % cmd)
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
master.main()
args.logger.info("End running mihifepe")
def compare_with_ground_truth(args, hierarchy_root):
"""Compare results from mihifepe with ground truth results"""
# Generate ground truth results
# Write hierarchical FDR input file for ground truth values
args.logger.info("Compare mihifepe results to ground truth")
input_filename = "%s/ground_truth_pvalues.csv" % args.output_dir
with open(input_filename, "w", newline="") as input_file:
writer = csv.writer(input_file)
writer.writerow([constants.NODE_NAME, constants.PARENT_NAME, constants.PVALUE_LOSSES, constants.DESCRIPTION])
for node in anytree.PostOrderIter(hierarchy_root):
parent_name = node.parent.name if node.parent else ""
# Decide p-values based on rough heuristic for relevance
node.pvalue = 1.0
if node.description != constants.IRRELEVANT:
if node.is_leaf:
node.pvalue = 0.001
if node.poly_coeff:
node.pvalue = min(node.pvalue, 1e-10 / (node.poly_coeff * node.bin_prob) ** 3)
else:
node.pvalue = 0.999 * min([child.pvalue for child in node.children])
writer.writerow([node.name, parent_name, node.pvalue, node.description])
# Generate hierarchical FDR results for ground truth values
ground_truth_dir = "%s/ground_truth_fdr" % args.output_dir
cmd = ("python -m mihifepe.fdr.hierarchical_fdr_control -output_dir %s -procedure yekutieli "
"-rectangle_leaves %s" % (ground_truth_dir, input_filename))
args.logger.info("Running cmd: %s" % cmd)
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
hierarchical_fdr_control.main()
# Compare results
ground_truth_outputs_filename = "%s/%s.png" % (ground_truth_dir, constants.TREE)
args.logger.info("Ground truth results: %s" % ground_truth_outputs_filename)
mihifepe_outputs_filename = "%s/%s/%s.png" % (args.output_dir, constants.HIERARCHICAL_FDR_DIR, constants.TREE)
args.logger.info("mihifepe results: %s" % mihifepe_outputs_filename)
def evaluate(args, relevant_feature_map, feature_id_map):
"""
Evaluate mihifepe results - obtain power/FDR measures for all nodes/outer nodes/base features/interactions
"""
# pylint: disable = too-many-locals
def get_relevant_rejected(nodes, outer=False, leaves=False):
"""Get set of relevant and rejected nodes"""
assert not (outer and leaves)
if outer:
nodes = [node for node in nodes if node.rejected and all([not child.rejected for child in node.children])]
elif leaves:
nodes = [node for node in nodes if node.is_leaf]
relevant = [0 if node.description == constants.IRRELEVANT else 1 for node in nodes]
rejected = [1 if node.rejected else 0 for node in nodes]
return relevant, rejected
tree_filename = "%s/%s/%s.json" % (args.output_dir, constants.HIERARCHICAL_FDR_DIR, constants.HIERARCHICAL_FDR_OUTPUTS)
with open(tree_filename, "r") as tree_file:
tree = JsonImporter().read(tree_file)
nodes = list(anytree.PreOrderIter(tree))
# All nodes FDR/power
relevant, rejected = get_relevant_rejected(nodes)
precision, recall, _, _ = precision_recall_fscore_support(relevant, rejected, average="binary")
# Outer nodes FDR/power
outer_relevant, outer_rejected = get_relevant_rejected(nodes, outer=True)
outer_precision, outer_recall, _, _ = precision_recall_fscore_support(outer_relevant, outer_rejected, average="binary")
# Base features FDR/power
bf_relevant, bf_rejected = get_relevant_rejected(nodes, leaves=True)
bf_precision, bf_recall, _, _ = precision_recall_fscore_support(bf_relevant, bf_rejected, average="binary")
# Interactions FDR/power
interaction_precision, interaction_recall = get_precision_recall_interactions(args, relevant_feature_map, feature_id_map)
return Results(1 - precision, recall, 1 - outer_precision, outer_recall,
1 - bf_precision, bf_recall, 1 - interaction_precision, interaction_recall)
def get_precision_recall_interactions(args, relevant_feature_map, feature_id_map):
"""Computes precision (1 - FDR) and recall (power) for detecting interactions"""
# pylint: disable = invalid-name, too-many-locals
# The set of all possible interactions might be very big, so don't construct label vector for all
# possible interactions - compute precision/recall from basics
# TODO: alter to handle higher-order interactions
if not args.analyze_interactions:
return (0.0, 0.0)
true_interactions = {key for key in relevant_feature_map.keys() if len(key) > 1}
tree_filename = "%s/%s/%s.json" % (args.output_dir, constants.INTERACTIONS_FDR_DIR, constants.HIERARCHICAL_FDR_OUTPUTS)
tp = 0
fp = 0
tn = 0
fn = 0
tested = set()
with open(tree_filename, "r") as tree_file:
tree = JsonImporter().read(tree_file)
# Two-level tree with tested interactions on level 2
for node in tree.children:
pair = frozenset({int(idx) for idx in node.name.split(" + ")})
if feature_id_map:
pair = frozenset({feature_id_map[visual_id] for visual_id in pair})
tested.add(pair)
if node.rejected:
if relevant_feature_map.get(pair):
tp += 1
else:
fp += 1
else:
if relevant_feature_map.get(pair):
fn += 1
else:
tn += 1
if not tp > 0:
return (0.0, 0.0)
missed = true_interactions.difference(tested)
fn += len(missed)
precision = tp / (tp + fp)
recall = tp / (tp + fn)
return precision, recall
def write_results(args, results):
"""Write results to pickle file"""
results_filename = "%s/%s" % (args.output_dir, constants.SIMULATION_RESULTS_FILENAME)
with open(results_filename, "wb") as results_file:
pickle.dump(results._asdict(), results_file)
if __name__ == "__main__":
main()
| 49.51184 | 132 | 0.693547 |
a3a400ae42fa33d588a26adf29f2233f8fc60b36 | 251 | py | Python | languages/default/normalizer.py | borisdayma/wav2vec-toolkit | 595500997cc3b71ec05995345fca888ae515b516 | [
"Apache-2.0"
] | 24 | 2021-04-02T20:54:45.000Z | 2021-11-22T05:37:34.000Z | languages/default/normalizer.py | borisdayma/wav2vec-toolkit | 595500997cc3b71ec05995345fca888ae515b516 | [
"Apache-2.0"
] | 2 | 2021-04-04T03:45:38.000Z | 2021-04-19T12:59:37.000Z | languages/default/normalizer.py | borisdayma/wav2vec-toolkit | 595500997cc3b71ec05995345fca888ae515b516 | [
"Apache-2.0"
] | 10 | 2021-04-02T20:57:38.000Z | 2021-11-17T07:35:59.000Z | from typing import Any
from wav2vec_toolkit.text_preprocessing.normalizers import NormalizerOperation
class Normalizer(NormalizerOperation):
_whitelist = r"[0-9\w]+"
_dictionary = {}
_do_lowercase = True
_text_key_name = "sentence"
| 22.818182 | 78 | 0.756972 |
8b33736f9d91500ec566cf4d428289caa2f8e832 | 4,042 | py | Python | examples/model_compress/pruning/v2/simulated_anealing_pruning_torch.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | 1 | 2022-02-17T06:12:30.000Z | 2022-02-17T06:12:30.000Z | examples/model_compress/pruning/v2/simulated_anealing_pruning_torch.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | null | null | null | examples/model_compress/pruning/v2/simulated_anealing_pruning_torch.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
'''
NNI example for simulated anealing pruning algorithm.
In this example, we show the end-to-end iterative pruning process: pre-training -> pruning -> fine-tuning.
'''
import sys
import argparse
from tqdm import tqdm
import torch
from torchvision import datasets, transforms
from nni.algorithms.compression.v2.pytorch.pruning import SimulatedAnnealingPruner
from pathlib import Path
sys.path.append(str(Path(__file__).absolute().parents[2] / 'models'))
from cifar10.vgg import VGG
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
normalize = transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10('./data', train=True, transform=transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ToTensor(),
normalize,
]), download=True),
batch_size=128, shuffle=True)
test_loader = torch.utils.data.DataLoader(
datasets.CIFAR10('./data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=128, shuffle=False)
criterion = torch.nn.CrossEntropyLoss()
def trainer(model, optimizer, criterion, epoch):
model.train()
for data, target in tqdm(iterable=train_loader, desc='Epoch {}'.format(epoch)):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
def finetuner(model):
model.train()
optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9, weight_decay=5e-4)
criterion = torch.nn.CrossEntropyLoss()
for data, target in tqdm(iterable=train_loader, desc='Epoch PFs'):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
def evaluator(model):
model.eval()
correct = 0
with torch.no_grad():
for data, target in tqdm(iterable=test_loader, desc='Test'):
data, target = data.to(device), target.to(device)
output = model(data)
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
acc = 100 * correct / len(test_loader.dataset)
print('Accuracy: {}%\n'.format(acc))
return acc
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='PyTorch Iterative Example for model comporession')
parser.add_argument('--pretrain-epochs', type=int, default=10,
help='number of epochs to pretrain the model')
parser.add_argument('--pruning-algo', type=str, default='l1',
choices=['level', 'l1', 'l2', 'fpgm', 'slim', 'apoz',
'mean_activation', 'taylorfo', 'admm'],
help='algorithm to evaluate weights to prune')
parser.add_argument('--cool-down-rate', type=float, default=0.9,
help='Cool down rate of the temperature.')
args = parser.parse_args()
model = VGG().to(device)
optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9, weight_decay=5e-4)
criterion = torch.nn.CrossEntropyLoss()
# pre-train the model
for i in range(args.pretrain_epochs):
trainer(model, optimizer, criterion, i)
evaluator(model)
config_list = [{'op_types': ['Conv2d'], 'total_sparsity': 0.8}]
# evaluator in 'SimulatedAnnealingPruner' could not be None.
pruner = SimulatedAnnealingPruner(model, config_list, pruning_algorithm=args.pruning_algo,
evaluator=evaluator, cool_down_rate=args.cool_down_rate, finetuner=finetuner)
pruner.compress()
_, model, masks, _, _ = pruner.get_best_result()
evaluator(model)
| 36.745455 | 115 | 0.657595 |
1571af9f8367b4a680241694cbd3e0e82bc0df2c | 26,911 | py | Python | keystone/token/providers/fernet/token_formatters.py | trananhkma/keystone | 1d34614121cbe694bfd107f1ce7a9c402d6a30b4 | [
"Apache-2.0"
] | null | null | null | keystone/token/providers/fernet/token_formatters.py | trananhkma/keystone | 1d34614121cbe694bfd107f1ce7a9c402d6a30b4 | [
"Apache-2.0"
] | null | null | null | keystone/token/providers/fernet/token_formatters.py | trananhkma/keystone | 1d34614121cbe694bfd107f1ce7a9c402d6a30b4 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import datetime
import struct
import uuid
from cryptography import fernet
import msgpack
from oslo_log import log
from oslo_utils import timeutils
import six
from six.moves import map
from six.moves import urllib
from keystone.auth import plugins as auth_plugins
from keystone.common import utils as ks_utils
import keystone.conf
from keystone import exception
from keystone.i18n import _, _LI
from keystone.token import provider
from keystone.token.providers.fernet import utils
CONF = keystone.conf.CONF
LOG = log.getLogger(__name__)
# Fernet byte indexes as as computed by pypi/keyless_fernet and defined in
# https://github.com/fernet/spec
TIMESTAMP_START = 1
TIMESTAMP_END = 9
class TokenFormatter(object):
"""Packs and unpacks payloads into tokens for transport."""
@property
def crypto(self):
"""Return a cryptography instance.
You can extend this class with a custom crypto @property to provide
your own token encoding / decoding. For example, using a different
cryptography library (e.g. ``python-keyczar``) or to meet arbitrary
security requirements.
This @property just needs to return an object that implements
``encrypt(plaintext)`` and ``decrypt(ciphertext)``.
"""
keys = utils.load_keys()
if not keys:
raise exception.KeysNotFound()
fernet_instances = [fernet.Fernet(key) for key in keys]
return fernet.MultiFernet(fernet_instances)
def pack(self, payload):
"""Pack a payload for transport as a token.
:type payload: six.binary_type
:rtype: six.text_type
"""
# base64 padding (if any) is not URL-safe
return self.crypto.encrypt(payload).rstrip(b'=').decode('utf-8')
def unpack(self, token):
"""Unpack a token, and validate the payload.
:type token: six.text_type
:rtype: six.binary_type
"""
# TODO(lbragstad): Restore padding on token before decoding it.
# Initially in Kilo, Fernet tokens were returned to the user with
# padding appended to the token. Later in Liberty this padding was
# removed and restored in the Fernet provider. The following if
# statement ensures that we can validate tokens with and without token
# padding, in the event of an upgrade and the tokens that are issued
# throughout the upgrade. Remove this if statement when Mitaka opens
# for development and exclusively use the restore_padding() class
# method.
if token.endswith('%3D'):
token = urllib.parse.unquote(token)
else:
token = TokenFormatter.restore_padding(token)
try:
return self.crypto.decrypt(token.encode('utf-8'))
except fernet.InvalidToken:
raise exception.ValidationError(
_('This is not a recognized Fernet token %s') % token)
@classmethod
def restore_padding(cls, token):
"""Restore padding based on token size.
:param token: token to restore padding on
:type token: six.text_type
:returns: token with correct padding
"""
# Re-inflate the padding
mod_returned = len(token) % 4
if mod_returned:
missing_padding = 4 - mod_returned
token += '=' * missing_padding
return token
@classmethod
def creation_time(cls, fernet_token):
"""Return the creation time of a valid Fernet token.
:type fernet_token: six.text_type
"""
fernet_token = TokenFormatter.restore_padding(fernet_token)
# fernet_token is six.text_type
# Fernet tokens are base64 encoded, so we need to unpack them first
# urlsafe_b64decode() requires six.binary_type
token_bytes = base64.urlsafe_b64decode(fernet_token.encode('utf-8'))
# slice into the byte array to get just the timestamp
timestamp_bytes = token_bytes[TIMESTAMP_START:TIMESTAMP_END]
# convert those bytes to an integer
# (it's a 64-bit "unsigned long long int" in C)
timestamp_int = struct.unpack(">Q", timestamp_bytes)[0]
# and with an integer, it's trivial to produce a datetime object
created_at = datetime.datetime.utcfromtimestamp(timestamp_int)
return created_at
def create_token(self, user_id, expires_at, audit_ids, methods=None,
domain_id=None, project_id=None, trust_id=None,
federated_info=None, access_token_id=None):
"""Given a set of payload attributes, generate a Fernet token."""
for payload_class in PAYLOAD_CLASSES:
if payload_class.create_arguments_apply(
project_id=project_id, domain_id=domain_id,
trust_id=trust_id, federated_info=federated_info,
access_token_id=access_token_id):
break
version = payload_class.version
payload = payload_class.assemble(
user_id, methods, project_id, domain_id, expires_at, audit_ids,
trust_id, federated_info, access_token_id
)
versioned_payload = (version,) + payload
serialized_payload = msgpack.packb(versioned_payload)
token = self.pack(serialized_payload)
# NOTE(lbragstad): We should warn against Fernet tokens that are over
# 255 characters in length. This is mostly due to persisting the tokens
# in a backend store of some kind that might have a limit of 255
# characters. Even though Keystone isn't storing a Fernet token
# anywhere, we can't say it isn't being stored somewhere else with
# those kind of backend constraints.
if len(token) > 255:
LOG.info(_LI('Fernet token created with length of %d '
'characters, which exceeds 255 characters'),
len(token))
return token
def validate_token(self, token):
"""Validate a Fernet token and returns the payload attributes.
:type token: six.text_type
"""
serialized_payload = self.unpack(token)
versioned_payload = msgpack.unpackb(serialized_payload)
version, payload = versioned_payload[0], versioned_payload[1:]
for payload_class in PAYLOAD_CLASSES:
if version == payload_class.version:
(user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id) = (
payload_class.disassemble(payload))
break
else:
# If the token_format is not recognized, raise ValidationError.
raise exception.ValidationError(_(
'This is not a recognized Fernet payload version: %s') %
version)
# rather than appearing in the payload, the creation time is encoded
# into the token format itself
created_at = TokenFormatter.creation_time(token)
created_at = ks_utils.isotime(at=created_at, subsecond=True)
expires_at = timeutils.parse_isotime(expires_at)
expires_at = ks_utils.isotime(at=expires_at, subsecond=True)
return (user_id, methods, audit_ids, domain_id, project_id, trust_id,
federated_info, access_token_id, created_at, expires_at)
class BasePayload(object):
# each payload variant should have a unique version
version = None
@classmethod
def create_arguments_apply(cls, **kwargs):
"""Check the arguments to see if they apply to this payload variant.
:returns: True if the arguments indicate that this payload class is
needed for the token otherwise returns False.
:rtype: bool
"""
raise NotImplementedError()
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
"""Assemble the payload of a token.
:param user_id: identifier of the user in the token request
:param methods: list of authentication methods used
:param project_id: ID of the project to scope to
:param domain_id: ID of the domain to scope to
:param expires_at: datetime of the token's expiration
:param audit_ids: list of the token's audit IDs
:param trust_id: ID of the trust in effect
:param federated_info: dictionary containing group IDs, the identity
provider ID, protocol ID, and federated domain
ID
:param access_token_id: ID of the secret in OAuth1 authentication
:returns: the payload of a token
"""
raise NotImplementedError()
@classmethod
def disassemble(cls, payload):
"""Disassemble an unscoped payload into the component data.
The tuple consists of::
(user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
* ``methods`` are the auth methods.
* federated_info is a dict contains the group IDs, the identity
provider ID, the protocol ID, and the federated domain ID
Fields will be set to None if they didn't apply to this payload type.
:param payload: this variant of payload
:returns: a tuple of the payloads component data
"""
raise NotImplementedError()
@classmethod
def convert_uuid_hex_to_bytes(cls, uuid_string):
"""Compress UUID formatted strings to bytes.
:param uuid_string: uuid string to compress to bytes
:returns: a byte representation of the uuid
"""
uuid_obj = uuid.UUID(uuid_string)
return uuid_obj.bytes
@classmethod
def convert_uuid_bytes_to_hex(cls, uuid_byte_string):
"""Generate uuid.hex format based on byte string.
:param uuid_byte_string: uuid string to generate from
:returns: uuid hex formatted string
"""
uuid_obj = uuid.UUID(bytes=uuid_byte_string)
return uuid_obj.hex
@classmethod
def _convert_time_string_to_float(cls, time_string):
"""Convert a time formatted string to a float.
:param time_string: time formatted string
:returns: a timestamp as a float
"""
time_object = timeutils.parse_isotime(time_string)
return (timeutils.normalize_time(time_object) -
datetime.datetime.utcfromtimestamp(0)).total_seconds()
@classmethod
def _convert_float_to_time_string(cls, time_float):
"""Convert a floating point timestamp to a string.
:param time_float: integer representing timestamp
:returns: a time formatted strings
"""
time_object = datetime.datetime.utcfromtimestamp(time_float)
return ks_utils.isotime(time_object, subsecond=True)
@classmethod
def attempt_convert_uuid_hex_to_bytes(cls, value):
"""Attempt to convert value to bytes or return value.
:param value: value to attempt to convert to bytes
:returns: tuple containing boolean indicating whether user_id was
stored as bytes and uuid value as bytes or the original value
"""
try:
return (True, cls.convert_uuid_hex_to_bytes(value))
except ValueError:
# this might not be a UUID, depending on the situation (i.e.
# federation)
return (False, value)
class UnscopedPayload(BasePayload):
version = 0
@classmethod
def create_arguments_apply(cls, **kwargs):
return True
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
expires_at_str = cls._convert_float_to_time_string(payload[2])
audit_ids = list(map(provider.base64_encode, payload[3]))
project_id = None
domain_id = None
trust_id = None
federated_info = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class DomainScopedPayload(BasePayload):
version = 1
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['domain_id']
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
try:
b_domain_id = cls.convert_uuid_hex_to_bytes(domain_id)
except ValueError:
# the default domain ID is configurable, and probably isn't a UUID
if domain_id == CONF.identity.default_domain_id:
b_domain_id = domain_id
else:
raise
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_domain_id, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
try:
domain_id = cls.convert_uuid_bytes_to_hex(payload[2])
except ValueError:
# the default domain ID is configurable, and probably isn't a UUID
if payload[2] == CONF.identity.default_domain_id:
domain_id = payload[2]
else:
raise
expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
project_id = None
trust_id = None
federated_info = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class ProjectScopedPayload(BasePayload):
version = 2
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['project_id']
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_project_id, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, project_id) = payload[2]
if is_stored_as_bytes:
project_id = cls.convert_uuid_bytes_to_hex(project_id)
expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
domain_id = None
trust_id = None
federated_info = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class TrustScopedPayload(BasePayload):
version = 3
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['trust_id']
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
b_trust_id = cls.convert_uuid_hex_to_bytes(trust_id)
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_project_id, expires_at_int, b_audit_ids,
b_trust_id)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, project_id) = payload[2]
if is_stored_as_bytes:
project_id = cls.convert_uuid_bytes_to_hex(project_id)
expires_at_str = cls._convert_float_to_time_string(payload[3])
audit_ids = list(map(provider.base64_encode, payload[4]))
trust_id = cls.convert_uuid_bytes_to_hex(payload[5])
domain_id = None
federated_info = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class FederatedUnscopedPayload(BasePayload):
version = 4
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['federated_info']
@classmethod
def pack_group_id(cls, group_dict):
return cls.attempt_convert_uuid_hex_to_bytes(group_dict['id'])
@classmethod
def unpack_group_id(cls, group_id_in_bytes):
(is_stored_as_bytes, group_id) = group_id_in_bytes
if is_stored_as_bytes:
group_id = cls.convert_uuid_bytes_to_hex(group_id)
return {'id': group_id}
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_group_ids = list(map(cls.pack_group_id,
federated_info['group_ids']))
b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
federated_info['idp_id'])
protocol_id = federated_info['protocol_id']
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_group_ids, b_idp_id, protocol_id,
expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
group_ids = list(map(cls.unpack_group_id, payload[2]))
(is_stored_as_bytes, idp_id) = payload[3]
if is_stored_as_bytes:
idp_id = cls.convert_uuid_bytes_to_hex(idp_id)
else:
idp_id = idp_id.decode('utf-8')
protocol_id = payload[4]
if isinstance(protocol_id, six.binary_type):
protocol_id = protocol_id.decode('utf-8')
expires_at_str = cls._convert_float_to_time_string(payload[5])
audit_ids = list(map(provider.base64_encode, payload[6]))
federated_info = dict(group_ids=group_ids, idp_id=idp_id,
protocol_id=protocol_id)
project_id = None
domain_id = None
trust_id = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class FederatedScopedPayload(FederatedUnscopedPayload):
version = None
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_scope_id = cls.attempt_convert_uuid_hex_to_bytes(
project_id or domain_id)
b_group_ids = list(map(cls.pack_group_id,
federated_info['group_ids']))
b_idp_id = cls.attempt_convert_uuid_hex_to_bytes(
federated_info['idp_id'])
protocol_id = federated_info['protocol_id']
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
return (b_user_id, methods, b_scope_id, b_group_ids, b_idp_id,
protocol_id, expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, scope_id) = payload[2]
if is_stored_as_bytes:
scope_id = cls.convert_uuid_bytes_to_hex(scope_id)
project_id = (
scope_id
if cls.version == FederatedProjectScopedPayload.version else None)
domain_id = (
scope_id
if cls.version == FederatedDomainScopedPayload.version else None)
group_ids = list(map(cls.unpack_group_id, payload[3]))
(is_stored_as_bytes, idp_id) = payload[4]
if is_stored_as_bytes:
idp_id = cls.convert_uuid_bytes_to_hex(idp_id)
protocol_id = payload[5]
expires_at_str = cls._convert_float_to_time_string(payload[6])
audit_ids = list(map(provider.base64_encode, payload[7]))
federated_info = dict(idp_id=idp_id, protocol_id=protocol_id,
group_ids=group_ids)
trust_id = None
access_token_id = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
class FederatedProjectScopedPayload(FederatedScopedPayload):
version = 5
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['project_id'] and kwargs['federated_info']
class FederatedDomainScopedPayload(FederatedScopedPayload):
version = 6
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['domain_id'] and kwargs['federated_info']
class OauthScopedPayload(BasePayload):
version = 7
@classmethod
def create_arguments_apply(cls, **kwargs):
return kwargs['access_token_id']
@classmethod
def assemble(cls, user_id, methods, project_id, domain_id, expires_at,
audit_ids, trust_id, federated_info, access_token_id):
b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id)
methods = auth_plugins.convert_method_list_to_integer(methods)
b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id)
expires_at_int = cls._convert_time_string_to_float(expires_at)
b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes,
audit_ids))
b_access_token_id = cls.attempt_convert_uuid_hex_to_bytes(
access_token_id)
return (b_user_id, methods, b_project_id, b_access_token_id,
expires_at_int, b_audit_ids)
@classmethod
def disassemble(cls, payload):
(is_stored_as_bytes, user_id) = payload[0]
if is_stored_as_bytes:
user_id = cls.convert_uuid_bytes_to_hex(user_id)
methods = auth_plugins.convert_integer_to_method_list(payload[1])
(is_stored_as_bytes, project_id) = payload[2]
if is_stored_as_bytes:
project_id = cls.convert_uuid_bytes_to_hex(project_id)
(is_stored_as_bytes, access_token_id) = payload[3]
if is_stored_as_bytes:
access_token_id = cls.convert_uuid_bytes_to_hex(access_token_id)
expires_at_str = cls._convert_float_to_time_string(payload[4])
audit_ids = list(map(provider.base64_encode, payload[5]))
domain_id = None
trust_id = None
federated_info = None
return (user_id, methods, project_id, domain_id, expires_at_str,
audit_ids, trust_id, federated_info, access_token_id)
# For now, the order of the classes in the following list is important. This
# is because the way they test that the payload applies to them in
# the create_arguments_apply method requires that the previous ones rejected
# the payload arguments. For example, UnscopedPayload must be last since it's
# the catch-all after all the other payloads have been checked.
# TODO(blk-u): Clean up the create_arguments_apply methods so that they don't
# depend on the previous classes then these can be in any order.
PAYLOAD_CLASSES = [
OauthScopedPayload,
TrustScopedPayload,
FederatedProjectScopedPayload,
FederatedDomainScopedPayload,
FederatedUnscopedPayload,
ProjectScopedPayload,
DomainScopedPayload,
UnscopedPayload,
]
| 39.401171 | 79 | 0.673554 |
1898bf65982781baf51df951598614b614156a8f | 12,361 | py | Python | tools/unit-test-app/tools/UnitTestParser.py | ulfalizer/esp-idf-1 | 6835bfc741bf15e98fb7971293913f770df6081f | [
"Apache-2.0"
] | 2 | 2018-06-27T02:28:03.000Z | 2020-12-08T19:33:44.000Z | tools/unit-test-app/tools/UnitTestParser.py | ulfalizer/esp-idf-1 | 6835bfc741bf15e98fb7971293913f770df6081f | [
"Apache-2.0"
] | 3 | 2020-01-15T16:48:07.000Z | 2020-12-08T14:51:19.000Z | tools/unit-test-app/tools/UnitTestParser.py | ulfalizer/esp-idf-1 | 6835bfc741bf15e98fb7971293913f770df6081f | [
"Apache-2.0"
] | 7 | 2019-11-06T01:29:28.000Z | 2021-08-21T09:51:21.000Z | import yaml
import os
import re
import shutil
import subprocess
import hashlib
from copy import deepcopy
import CreateSectionTable
TEST_CASE_PATTERN = {
"initial condition": "UTINIT1",
"SDK": "ESP32_IDF",
"level": "Unit",
"execution time": 0,
"auto test": "Yes",
"category": "Function",
"test point 1": "basic function",
"version": "v1 (2016-12-06)",
"test environment": "UT_T1_1",
"reset": "",
"expected result": "1. set succeed",
"cmd set": "test_unit_test_case",
"Test App": "UT",
}
class Parser(object):
""" parse unit test cases from build files and create files for test bench """
TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?")
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]")
# file path (relative to idf path)
TAG_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "TagDefinition.yml")
MODULE_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "ModuleDefinition.yml")
CONFIG_DEPENDENCY_FILE = os.path.join("tools", "unit-test-app", "tools", "ConfigDependency.yml")
MODULE_ARTIFACT_FILE = os.path.join("components", "idf_test", "ModuleDefinition.yml")
TEST_CASE_FILE = os.path.join("components", "idf_test", "unit_test", "TestCaseAll.yml")
UT_BIN_FOLDER = os.path.join("tools", "unit-test-app", "output")
ELF_FILE = "unit-test-app.elf"
SDKCONFIG_FILE = "sdkconfig"
def __init__(self, idf_path=os.getenv("IDF_PATH")):
self.test_env_tags = {}
self.unit_jobs = {}
self.file_name_cache = {}
self.idf_path = idf_path
self.tag_def = yaml.load(open(os.path.join(idf_path, self.TAG_DEF_FILE), "r"))
self.module_map = yaml.load(open(os.path.join(idf_path, self.MODULE_DEF_FILE), "r"))
self.config_dependency = yaml.load(open(os.path.join(idf_path, self.CONFIG_DEPENDENCY_FILE), "r"))
# used to check if duplicated test case names
self.test_case_names = set()
self.parsing_errors = []
def parse_test_cases_for_one_config(self, config_output_folder, config_name):
"""
parse test cases from elf and save test cases need to be executed to unit test folder
:param config_output_folder: build folder of this config
:param config_name: built unit test config name
"""
elf_file = os.path.join(config_output_folder, self.ELF_FILE)
subprocess.check_output('xtensa-esp32-elf-objdump -t {} | grep test_desc > case_address.tmp'.format(elf_file),
shell=True)
subprocess.check_output('xtensa-esp32-elf-objdump -s {} > section_table.tmp'.format(elf_file), shell=True)
table = CreateSectionTable.SectionTable("section_table.tmp")
tags = self.parse_tags(os.path.join(config_output_folder, self.SDKCONFIG_FILE))
test_cases = []
with open("case_address.tmp", "r") as f:
for line in f:
# process symbol table like: "3ffb4310 l O .dram0.data 00000018 test_desc_33$5010"
line = line.split()
test_addr = int(line[0], 16)
section = line[3]
name_addr = table.get_unsigned_int(section, test_addr, 4)
desc_addr = table.get_unsigned_int(section, test_addr + 4, 4)
file_name_addr = table.get_unsigned_int(section, test_addr + 12, 4)
function_count = table.get_unsigned_int(section, test_addr+20, 4)
name = table.get_string("any", name_addr)
desc = table.get_string("any", desc_addr)
file_name = table.get_string("any", file_name_addr)
tc = self.parse_one_test_case(name, desc, file_name, config_name, tags)
# check if duplicated case names
# we need to use it to select case,
# if duplicated IDs, Unity could select incorrect case to run
# and we need to check all cases no matter if it's going te be executed by CI
# also add app_name here, we allow same case for different apps
if (tc["summary"] + config_name) in self.test_case_names:
self.parsing_errors.append("duplicated test case ID: " + tc["summary"])
else:
self.test_case_names.add(tc["summary"] + config_name)
if tc["CI ready"] == "Yes":
# update test env list and the cases of same env list
if tc["test environment"] in self.test_env_tags:
self.test_env_tags[tc["test environment"]].append(tc["ID"])
else:
self.test_env_tags.update({tc["test environment"]: [tc["ID"]]})
if function_count > 1:
tc.update({"child case num": function_count})
# only add cases need to be executed
test_cases.append(tc)
os.remove("section_table.tmp")
os.remove("case_address.tmp")
return test_cases
def parse_case_properities(self, tags_raw):
"""
parse test case tags (properities) with the following rules:
* first tag is always group of test cases, it's mandatory
* the rest tags should be [type=value].
* if the type have default value, then [type] equal to [type=default_value].
* if the type don't don't exist, then equal to [type=omitted_value]
default_value and omitted_value are defined in TagDefinition.yml
:param tags_raw: raw tag string
:return: tag dict
"""
tags = self.DESCRIPTION_PATTERN.findall(tags_raw)
assert len(tags) > 0
p = dict([(k, self.tag_def[k]["omitted"]) for k in self.tag_def])
p["module"] = tags[0]
if p["module"] not in self.module_map:
p["module"] = "misc"
# parsing rest tags, [type=value], =value is optional
for tag in tags[1:]:
match = self.TAG_PATTERN.search(tag)
assert match is not None
tag_type = match.group(1)
tag_value = match.group(3)
if match.group(2) == "=" and tag_value is None:
# [tag_type=] means tag_value is empty string
tag_value = ""
if tag_type in p:
if tag_value is None:
p[tag_type] = self.tag_def[tag_type]["default"]
else:
p[tag_type] = tag_value
else:
# ignore not defined tag type
pass
return p
def parse_tags(self, sdkconfig_file):
"""
Some test configs could requires different DUTs.
For example, if CONFIG_SPIRAM_SUPPORT is enabled, we need WROVER-Kit to run test.
This method will get tags for runners according to ConfigDependency.yml(maps tags to sdkconfig).
:param sdkconfig_file: sdkconfig file of the unit test config
:return: required tags for runners
"""
required_tags = []
with open(sdkconfig_file, "r") as f:
configs_raw_data = f.read()
configs = configs_raw_data.splitlines(False)
for tag in self.config_dependency:
if self.config_dependency[tag] in configs:
required_tags.append(tag)
return required_tags
def parse_one_test_case(self, name, description, file_name, config_name, tags):
"""
parse one test case
:param name: test case name (summary)
:param description: test case description (tag string)
:param file_name: the file defines this test case
:param config_name: built unit test app name
:param tags: tags to select runners
:return: parsed test case
"""
prop = self.parse_case_properities(description)
idf_path = os.getenv("IDF_PATH")
# use relative file path to IDF_PATH, to make sure file path is consist
relative_file_path = os.path.relpath(file_name, idf_path)
file_name_hash = int(hashlib.sha256(relative_file_path).hexdigest(), base=16) % 1000
if file_name_hash in self.file_name_cache:
self.file_name_cache[file_name_hash] += 1
else:
self.file_name_cache[file_name_hash] = 1
tc_id = "UT_%s_%s_%03d%02d" % (self.module_map[prop["module"]]['module abbr'],
self.module_map[prop["module"]]['sub module abbr'],
file_name_hash,
self.file_name_cache[file_name_hash])
test_case = deepcopy(TEST_CASE_PATTERN)
test_case.update({"config": config_name,
"module": self.module_map[prop["module"]]['module'],
"CI ready": "No" if prop["ignore"] == "Yes" else "Yes",
"ID": tc_id,
"test point 2": prop["module"],
"steps": name,
"test environment": prop["test_env"],
"reset": prop["reset"],
"sub module": self.module_map[prop["module"]]['sub module'],
"summary": name,
"multi_device": prop["multi_device"],
"multi_stage": prop["multi_stage"],
"timeout": int(prop["timeout"]),
"tags": tags})
return test_case
def dump_test_cases(self, test_cases):
"""
dump parsed test cases to YAML file for test bench input
:param test_cases: parsed test cases
"""
with open(os.path.join(self.idf_path, self.TEST_CASE_FILE), "wb+") as f:
yaml.dump({"test cases": test_cases}, f, allow_unicode=True, default_flow_style=False)
def copy_module_def_file(self):
""" copy module def file to artifact path """
src = os.path.join(self.idf_path, self.MODULE_DEF_FILE)
dst = os.path.join(self.idf_path, self.MODULE_ARTIFACT_FILE)
shutil.copy(src, dst)
def parse_test_cases(self):
""" parse test cases from multiple built unit test apps """
test_cases = []
output_folder = os.path.join(self.idf_path, self.UT_BIN_FOLDER)
test_configs = os.listdir(output_folder)
for config in test_configs:
config_output_folder = os.path.join(output_folder, config)
if os.path.exists(config_output_folder):
test_cases.extend(self.parse_test_cases_for_one_config(config_output_folder, config))
self.dump_test_cases(test_cases)
def test_parser():
parser = Parser()
# test parsing tags
# parsing module only and module in module list
prop = parser.parse_case_properities("[esp32]")
assert prop["module"] == "esp32"
# module not in module list
prop = parser.parse_case_properities("[not_in_list]")
assert prop["module"] == "misc"
# parsing a default tag, a tag with assigned value
prop = parser.parse_case_properities("[esp32][ignore][test_env=ABCD][not_support1][not_support2=ABCD]")
assert prop["ignore"] == "Yes" and prop["test_env"] == "ABCD" \
and "not_support1" not in prop and "not_supported2" not in prop
# parsing omitted value
prop = parser.parse_case_properities("[esp32]")
assert prop["ignore"] == "No" and prop["test_env"] == "UT_T1_1"
# parsing with incorrect format
try:
parser.parse_case_properities("abcd")
assert False
except AssertionError:
pass
# skip invalid data parse, [type=] assigns empty string to type
prop = parser.parse_case_properities("[esp32]abdc aaaa [ignore=]")
assert prop["module"] == "esp32" and prop["ignore"] == ""
# skip mis-paired []
prop = parser.parse_case_properities("[esp32][[ignore=b]][]][test_env=AAA]]")
assert prop["module"] == "esp32" and prop["ignore"] == "b" and prop["test_env"] == "AAA"
def main():
test_parser()
idf_path = os.getenv("IDF_PATH")
parser = Parser(idf_path)
parser.parse_test_cases()
parser.copy_module_def_file()
if len(parser.parsing_errors) > 0:
for error in parser.parsing_errors:
print error
exit(-1)
if __name__ == '__main__':
main()
| 42.624138 | 118 | 0.598091 |
c8ae76757efa1c9501ba2246a331d69dd128077c | 103,009 | py | Python | google/net/proto2/proto/descriptor_pb2.py | theosp/google_appengine | 9ce87a20684dc99cf5968e6f488c060e1530c159 | [
"Apache-2.0"
] | 3 | 2019-01-28T03:57:20.000Z | 2020-02-20T01:37:33.000Z | google/net/proto2/proto/descriptor_pb2.py | theosp/google_appengine | 9ce87a20684dc99cf5968e6f488c060e1530c159 | [
"Apache-2.0"
] | null | null | null | google/net/proto2/proto/descriptor_pb2.py | theosp/google_appengine | 9ce87a20684dc99cf5968e6f488c060e1530c159 | [
"Apache-2.0"
] | 3 | 2019-01-18T11:33:56.000Z | 2020-01-05T10:44:05.000Z | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.net.proto2.python.public import descriptor as _descriptor
from google.net.proto2.python.public import message as _message
from google.net.proto2.python.public import reflection as _reflection
from google.net.proto2.python.public import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='net/proto2/proto/descriptor.proto',
package='proto2',
serialized_pb=_b('\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\xa5\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xc6\n\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12+\n#java_multiple_files_mutable_package\x18\x1d \x01(\t\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x12\x65xperimental_style\x18\x1e \x01(\t\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe4\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x82\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\t\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12^\n\tlog_level\x18\x1b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01\"\x9f\x01\n\x08LogLevel\x12\x0c\n\x08LOG_NONE\x10\x00\x12\x13\n\x0fLOG_HEADER_ONLY\x10\x01\x12/\n+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x10\x02\x12#\n\x1fLOG_HEADER_AND_FILTERED_PAYLOAD\x10\x03\x12\x1a\n\x16LOG_HEADER_AND_PAYLOAD\x10\x04*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe7\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12^\n\tlog_level\x18\x0b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='proto2.FieldDescriptorProto.Type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TYPE_DOUBLE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_FLOAT', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_INT64', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_UINT64', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_INT32', index=4, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_FIXED64', index=5, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_FIXED32', index=6, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_BOOL', index=7, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_STRING', index=8, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_GROUP', index=9, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_MESSAGE', index=10, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_BYTES', index=11, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_UINT32', index=12, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_ENUM', index=13, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_SFIXED32', index=14, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_SFIXED64', index=15, number=16,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_SINT32', index=16, number=17,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TYPE_SINT64', index=17, number=18,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1233,
serialized_end=1543,
)
_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
_FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
name='Label',
full_name='proto2.FieldDescriptorProto.Label',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LABEL_OPTIONAL', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LABEL_REQUIRED', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LABEL_REPEATED', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1545,
serialized_end=1612,
)
_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
_FILEOPTIONS_COMPATIBILITYLEVEL = _descriptor.EnumDescriptor(
name='CompatibilityLevel',
full_name='proto2.FileOptions.CompatibilityLevel',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_COMPATIBILITY', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PROTO1_COMPATIBLE', index=1, number=100,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_PROTO1_COMPATIBLE', index=2, number=50,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3492,
serialized_end=3591,
)
_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_COMPATIBILITYLEVEL)
_FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
name='OptimizeMode',
full_name='proto2.FileOptions.OptimizeMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SPEED', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CODE_SIZE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LITE_RUNTIME', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3593,
serialized_end=3651,
)
_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
_FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
name='CType',
full_name='proto2.FieldOptions.CType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRING', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CORD', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STRING_PIECE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4491,
serialized_end=4538,
)
_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
_FIELDOPTIONS_JTYPE = _descriptor.EnumDescriptor(
name='JType',
full_name='proto2.FieldOptions.JType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NORMAL', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BYTES', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXPERIMENTAL_BYTE_BUFFER', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4540,
serialized_end=4600,
)
_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JTYPE)
_FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
name='JSType',
full_name='proto2.FieldOptions.JSType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='JS_NORMAL', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='JS_STRING', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='JS_NUMBER', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4602,
serialized_end=4655,
)
_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
_METHODOPTIONS_PROTOCOL = _descriptor.EnumDescriptor(
name='Protocol',
full_name='proto2.MethodOptions.Protocol',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TCP', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UDP', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6009,
serialized_end=6037,
)
_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_PROTOCOL)
_METHODOPTIONS_SECURITYLEVEL = _descriptor.EnumDescriptor(
name='SecurityLevel',
full_name='proto2.MethodOptions.SecurityLevel',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INTEGRITY', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PRIVACY_AND_INTEGRITY', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STRONG_PRIVACY_AND_INTEGRITY', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6039,
serialized_end=6140,
)
_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_SECURITYLEVEL)
_METHODOPTIONS_FORMAT = _descriptor.EnumDescriptor(
name='Format',
full_name='proto2.MethodOptions.Format',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNCOMPRESSED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ZIPPY_COMPRESSED', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6142,
serialized_end=6190,
)
_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_FORMAT)
_METHODOPTIONS_LOGLEVEL = _descriptor.EnumDescriptor(
name='LogLevel',
full_name='proto2.MethodOptions.LogLevel',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOG_HEADER_ONLY', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOG_HEADER_AND_FILTERED_PAYLOAD', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOG_HEADER_AND_PAYLOAD', index=4, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6193,
serialized_end=6352,
)
_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_LOGLEVEL)
_STREAMOPTIONS_TOKENUNIT = _descriptor.EnumDescriptor(
name='TokenUnit',
full_name='proto2.StreamOptions.TokenUnit',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MESSAGE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BYTE', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6936,
serialized_end=6970,
)
_sym_db.RegisterEnumDescriptor(_STREAMOPTIONS_TOKENUNIT)
_FILEDESCRIPTORSET = _descriptor.Descriptor(
name='FileDescriptorSet',
full_name='proto2.FileDescriptorSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='file', full_name='proto2.FileDescriptorSet.file', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=45,
serialized_end=107,
)
_FILEDESCRIPTORPROTO = _descriptor.Descriptor(
name='FileDescriptorProto',
full_name='proto2.FileDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.FileDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='package', full_name='proto2.FileDescriptorProto.package', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dependency', full_name='proto2.FileDescriptorProto.dependency', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='public_dependency', full_name='proto2.FileDescriptorProto.public_dependency', index=3,
number=10, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weak_dependency', full_name='proto2.FileDescriptorProto.weak_dependency', index=4,
number=11, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message_type', full_name='proto2.FileDescriptorProto.message_type', index=5,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enum_type', full_name='proto2.FileDescriptorProto.enum_type', index=6,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='service', full_name='proto2.FileDescriptorProto.service', index=7,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extension', full_name='proto2.FileDescriptorProto.extension', index=8,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.FileDescriptorProto.options', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source_code_info', full_name='proto2.FileDescriptorProto.source_code_info', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='syntax', full_name='proto2.FileDescriptorProto.syntax', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=110,
serialized_end=531,
)
_DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
name='ExtensionRange',
full_name='proto2.DescriptorProto.ExtensionRange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start', full_name='proto2.DescriptorProto.ExtensionRange.start', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end', full_name='proto2.DescriptorProto.ExtensionRange.end', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=911,
serialized_end=955,
)
_DESCRIPTORPROTO = _descriptor.Descriptor(
name='DescriptorProto',
full_name='proto2.DescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.DescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='field', full_name='proto2.DescriptorProto.field', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extension', full_name='proto2.DescriptorProto.extension', index=2,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nested_type', full_name='proto2.DescriptorProto.nested_type', index=3,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='enum_type', full_name='proto2.DescriptorProto.enum_type', index=4,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extension_range', full_name='proto2.DescriptorProto.extension_range', index=5,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='oneof_decl', full_name='proto2.DescriptorProto.oneof_decl', index=6,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.DescriptorProto.options', index=7,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=534,
serialized_end=955,
)
_FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
name='FieldDescriptorProto',
full_name='proto2.FieldDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.FieldDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='number', full_name='proto2.FieldDescriptorProto.number', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='proto2.FieldDescriptorProto.label', index=2,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='proto2.FieldDescriptorProto.type', index=3,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type_name', full_name='proto2.FieldDescriptorProto.type_name', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='extendee', full_name='proto2.FieldDescriptorProto.extendee', index=5,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_value', full_name='proto2.FieldDescriptorProto.default_value', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='oneof_index', full_name='proto2.FieldDescriptorProto.oneof_index', index=7,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.FieldDescriptorProto.options', index=8,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FIELDDESCRIPTORPROTO_TYPE,
_FIELDDESCRIPTORPROTO_LABEL,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=958,
serialized_end=1612,
)
_ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
name='OneofDescriptorProto',
full_name='proto2.OneofDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.OneofDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1614,
serialized_end=1650,
)
_ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
name='EnumDescriptorProto',
full_name='proto2.EnumDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.EnumDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='proto2.EnumDescriptorProto.value', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.EnumDescriptorProto.options', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1652,
serialized_end=1774,
)
_ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
name='EnumValueDescriptorProto',
full_name='proto2.EnumValueDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.EnumValueDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='number', full_name='proto2.EnumValueDescriptorProto.number', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.EnumValueDescriptorProto.options', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1776,
serialized_end=1875,
)
_SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
name='ServiceDescriptorProto',
full_name='proto2.ServiceDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.ServiceDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='method', full_name='proto2.ServiceDescriptorProto.method', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stream', full_name='proto2.ServiceDescriptorProto.stream', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.ServiceDescriptorProto.options', index=3,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1878,
serialized_end=2051,
)
_METHODDESCRIPTORPROTO = _descriptor.Descriptor(
name='MethodDescriptorProto',
full_name='proto2.MethodDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.MethodDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_type', full_name='proto2.MethodDescriptorProto.input_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_type', full_name='proto2.MethodDescriptorProto.output_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.MethodDescriptorProto.options', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2053,
serialized_end=2171,
)
_STREAMDESCRIPTORPROTO = _descriptor.Descriptor(
name='StreamDescriptorProto',
full_name='proto2.StreamDescriptorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.StreamDescriptorProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='client_message_type', full_name='proto2.StreamDescriptorProto.client_message_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_message_type', full_name='proto2.StreamDescriptorProto.server_message_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='options', full_name='proto2.StreamDescriptorProto.options', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2174,
serialized_end=2309,
)
_FILEOPTIONS = _descriptor.Descriptor(
name='FileOptions',
full_name='proto2.FileOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cc_api_version', full_name='proto2.FileOptions.cc_api_version', index=0,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_api_compatibility', full_name='proto2.FileOptions.cc_api_compatibility', index=1,
number=15, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_proto_array_compatible', full_name='proto2.FileOptions.cc_proto_array_compatible', index=2,
number=22, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_utf8_verification', full_name='proto2.FileOptions.cc_utf8_verification', index=3,
number=24, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_proto1_text_format', full_name='proto2.FileOptions.cc_proto1_text_format', index=4,
number=25, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_package', full_name='proto2.FileOptions.java_package', index=5,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='py_api_version', full_name='proto2.FileOptions.py_api_version', index=6,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_api_version', full_name='proto2.FileOptions.java_api_version', index=7,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_use_javaproto2', full_name='proto2.FileOptions.java_use_javaproto2', index=8,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_java5_enums', full_name='proto2.FileOptions.java_java5_enums', index=9,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_generate_rpc_baseimpl', full_name='proto2.FileOptions.java_generate_rpc_baseimpl', index=10,
number=13, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_use_javastrings', full_name='proto2.FileOptions.java_use_javastrings', index=11,
number=21, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_alt_api_package', full_name='proto2.FileOptions.java_alt_api_package', index=12,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_enable_dual_generate_mutable_api', full_name='proto2.FileOptions.java_enable_dual_generate_mutable_api', index=13,
number=26, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_outer_classname', full_name='proto2.FileOptions.java_outer_classname', index=14,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_multiple_files', full_name='proto2.FileOptions.java_multiple_files', index=15,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_generate_equals_and_hash', full_name='proto2.FileOptions.java_generate_equals_and_hash', index=16,
number=20, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_string_check_utf8', full_name='proto2.FileOptions.java_string_check_utf8', index=17,
number=27, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_mutable_api', full_name='proto2.FileOptions.java_mutable_api', index=18,
number=28, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_multiple_files_mutable_package', full_name='proto2.FileOptions.java_multiple_files_mutable_package', index=19,
number=29, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='optimize_for', full_name='proto2.FileOptions.optimize_for', index=20,
number=9, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='go_package', full_name='proto2.FileOptions.go_package', index=21,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='javascript_package', full_name='proto2.FileOptions.javascript_package', index=22,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='szl_api_version', full_name='proto2.FileOptions.szl_api_version', index=23,
number=14, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_generic_services', full_name='proto2.FileOptions.cc_generic_services', index=24,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='java_generic_services', full_name='proto2.FileOptions.java_generic_services', index=25,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='py_generic_services', full_name='proto2.FileOptions.py_generic_services', index=26,
number=18, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.FileOptions.deprecated', index=27,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='experimental_style', full_name='proto2.FileOptions.experimental_style', index=28,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cc_enable_arenas', full_name='proto2.FileOptions.cc_enable_arenas', index=29,
number=31, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.FileOptions.uninterpreted_option', index=30,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FILEOPTIONS_COMPATIBILITYLEVEL,
_FILEOPTIONS_OPTIMIZEMODE,
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=2312,
serialized_end=3662,
)
_MESSAGEOPTIONS = _descriptor.Descriptor(
name='MessageOptions',
full_name='proto2.MessageOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='experimental_java_message_interface', full_name='proto2.MessageOptions.experimental_java_message_interface', index=0,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='experimental_java_builder_interface', full_name='proto2.MessageOptions.experimental_java_builder_interface', index=1,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='experimental_java_interface_extends', full_name='proto2.MessageOptions.experimental_java_interface_extends', index=2,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message_set_wire_format', full_name='proto2.MessageOptions.message_set_wire_format', index=3,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='no_standard_descriptor_accessor', full_name='proto2.MessageOptions.no_standard_descriptor_accessor', index=4,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.MessageOptions.deprecated', index=5,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_entry', full_name='proto2.MessageOptions.map_entry', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.MessageOptions.uninterpreted_option', index=7,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=3665,
serialized_end=4021,
)
_FIELDOPTIONS_UPGRADEDOPTION = _descriptor.Descriptor(
name='UpgradedOption',
full_name='proto2.FieldOptions.UpgradedOption',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.FieldOptions.UpgradedOption.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='proto2.FieldOptions.UpgradedOption.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=4444,
serialized_end=4489,
)
_FIELDOPTIONS = _descriptor.Descriptor(
name='FieldOptions',
full_name='proto2.FieldOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ctype', full_name='proto2.FieldOptions.ctype', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='packed', full_name='proto2.FieldOptions.packed', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='jtype', full_name='proto2.FieldOptions.jtype', index=2,
number=4, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='jstype', full_name='proto2.FieldOptions.jstype', index=3,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lazy', full_name='proto2.FieldOptions.lazy', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.FieldOptions.deprecated', index=5,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weak', full_name='proto2.FieldOptions.weak', index=6,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='upgraded_option', full_name='proto2.FieldOptions.upgraded_option', index=7,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated_raw_message', full_name='proto2.FieldOptions.deprecated_raw_message', index=8,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.FieldOptions.uninterpreted_option', index=9,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_FIELDOPTIONS_UPGRADEDOPTION, ],
enum_types=[
_FIELDOPTIONS_CTYPE,
_FIELDOPTIONS_JTYPE,
_FIELDOPTIONS_JSTYPE,
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=4024,
serialized_end=4666,
)
_ENUMOPTIONS = _descriptor.Descriptor(
name='EnumOptions',
full_name='proto2.EnumOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proto1_name', full_name='proto2.EnumOptions.proto1_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='allow_alias', full_name='proto2.EnumOptions.allow_alias', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.EnumOptions.deprecated', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.EnumOptions.uninterpreted_option', index=3,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=4669,
serialized_end=4822,
)
_ENUMVALUEOPTIONS = _descriptor.Descriptor(
name='EnumValueOptions',
full_name='proto2.EnumValueOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.EnumValueOptions.deprecated', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.EnumValueOptions.uninterpreted_option', index=1,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=4824,
serialized_end=4940,
)
_SERVICEOPTIONS = _descriptor.Descriptor(
name='ServiceOptions',
full_name='proto2.ServiceOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='multicast_stub', full_name='proto2.ServiceOptions.multicast_stub', index=0,
number=20, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='failure_detection_delay', full_name='proto2.ServiceOptions.failure_detection_delay', index=1,
number=16, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.ServiceOptions.deprecated', index=2,
number=33, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.ServiceOptions.uninterpreted_option', index=3,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=4943,
serialized_end=5125,
)
_METHODOPTIONS = _descriptor.Descriptor(
name='MethodOptions',
full_name='proto2.MethodOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='protocol', full_name='proto2.MethodOptions.protocol', index=0,
number=7, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deadline', full_name='proto2.MethodOptions.deadline', index=1,
number=8, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duplicate_suppression', full_name='proto2.MethodOptions.duplicate_suppression', index=2,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fail_fast', full_name='proto2.MethodOptions.fail_fast', index=3,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_user_creds_requested', full_name='proto2.MethodOptions.end_user_creds_requested', index=4,
number=26, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='client_logging', full_name='proto2.MethodOptions.client_logging', index=5,
number=11, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_logging', full_name='proto2.MethodOptions.server_logging', index=6,
number=12, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='security_level', full_name='proto2.MethodOptions.security_level', index=7,
number=13, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='response_format', full_name='proto2.MethodOptions.response_format', index=8,
number=15, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='request_format', full_name='proto2.MethodOptions.request_format', index=9,
number=17, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stream_type', full_name='proto2.MethodOptions.stream_type', index=10,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='security_label', full_name='proto2.MethodOptions.security_label', index=11,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='client_streaming', full_name='proto2.MethodOptions.client_streaming', index=12,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_streaming', full_name='proto2.MethodOptions.server_streaming', index=13,
number=21, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_stream_type', full_name='proto2.MethodOptions.legacy_stream_type', index=14,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_result_type', full_name='proto2.MethodOptions.legacy_result_type', index=15,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_client_initial_tokens', full_name='proto2.MethodOptions.legacy_client_initial_tokens', index=16,
number=24, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_server_initial_tokens', full_name='proto2.MethodOptions.legacy_server_initial_tokens', index=17,
number=25, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='log_level', full_name='proto2.MethodOptions.log_level', index=18,
number=27, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.MethodOptions.deprecated', index=19,
number=33, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.MethodOptions.uninterpreted_option', index=20,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_METHODOPTIONS_PROTOCOL,
_METHODOPTIONS_SECURITYLEVEL,
_METHODOPTIONS_FORMAT,
_METHODOPTIONS_LOGLEVEL,
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=5128,
serialized_end=6363,
)
_STREAMOPTIONS = _descriptor.Descriptor(
name='StreamOptions',
full_name='proto2.StreamOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='client_initial_tokens', full_name='proto2.StreamOptions.client_initial_tokens', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_initial_tokens', full_name='proto2.StreamOptions.server_initial_tokens', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='token_unit', full_name='proto2.StreamOptions.token_unit', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='security_level', full_name='proto2.StreamOptions.security_level', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='security_label', full_name='proto2.StreamOptions.security_label', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='client_logging', full_name='proto2.StreamOptions.client_logging', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_logging', full_name='proto2.StreamOptions.server_logging', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deadline', full_name='proto2.StreamOptions.deadline', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fail_fast', full_name='proto2.StreamOptions.fail_fast', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_user_creds_requested', full_name='proto2.StreamOptions.end_user_creds_requested', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='log_level', full_name='proto2.StreamOptions.log_level', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deprecated', full_name='proto2.StreamOptions.deprecated', index=11,
number=33, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uninterpreted_option', full_name='proto2.StreamOptions.uninterpreted_option', index=12,
number=999, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_STREAMOPTIONS_TOKENUNIT,
],
options=None,
is_extendable=True,
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=6366,
serialized_end=6981,
)
_UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
name='NamePart',
full_name='proto2.UninterpretedOption.NamePart',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name_part', full_name='proto2.UninterpretedOption.NamePart.name_part', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_extension', full_name='proto2.UninterpretedOption.NamePart.is_extension', index=1,
number=2, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=7210,
serialized_end=7261,
)
_UNINTERPRETEDOPTION = _descriptor.Descriptor(
name='UninterpretedOption',
full_name='proto2.UninterpretedOption',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='proto2.UninterpretedOption.name', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='identifier_value', full_name='proto2.UninterpretedOption.identifier_value', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='positive_int_value', full_name='proto2.UninterpretedOption.positive_int_value', index=2,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='negative_int_value', full_name='proto2.UninterpretedOption.negative_int_value', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='double_value', full_name='proto2.UninterpretedOption.double_value', index=4,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='string_value', full_name='proto2.UninterpretedOption.string_value', index=5,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aggregate_value', full_name='proto2.UninterpretedOption.aggregate_value', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=6984,
serialized_end=7261,
)
_SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
name='Location',
full_name='proto2.SourceCodeInfo.Location',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='path', full_name='proto2.SourceCodeInfo.Location.path', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='span', full_name='proto2.SourceCodeInfo.Location.span', index=1,
number=2, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='leading_comments', full_name='proto2.SourceCodeInfo.Location.leading_comments', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='trailing_comments', full_name='proto2.SourceCodeInfo.Location.trailing_comments', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=7333,
serialized_end=7432,
)
_SOURCECODEINFO = _descriptor.Descriptor(
name='SourceCodeInfo',
full_name='proto2.SourceCodeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location', full_name='proto2.SourceCodeInfo.location', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_SOURCECODEINFO_LOCATION, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=7264,
serialized_end=7432,
)
_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
_FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
_DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
_SERVICEDESCRIPTORPROTO.fields_by_name['stream'].message_type = _STREAMDESCRIPTORPROTO
_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
_STREAMDESCRIPTORPROTO.fields_by_name['options'].message_type = _STREAMOPTIONS
_FILEOPTIONS.fields_by_name['cc_api_compatibility'].enum_type = _FILEOPTIONS_COMPATIBILITYLEVEL
_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_FILEOPTIONS_COMPATIBILITYLEVEL.containing_type = _FILEOPTIONS
_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_FIELDOPTIONS_UPGRADEDOPTION.containing_type = _FIELDOPTIONS
_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
_FIELDOPTIONS.fields_by_name['jtype'].enum_type = _FIELDOPTIONS_JTYPE
_FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE
_FIELDOPTIONS.fields_by_name['upgraded_option'].message_type = _FIELDOPTIONS_UPGRADEDOPTION
_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
_FIELDOPTIONS_JTYPE.containing_type = _FIELDOPTIONS
_FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS
_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_METHODOPTIONS.fields_by_name['protocol'].enum_type = _METHODOPTIONS_PROTOCOL
_METHODOPTIONS.fields_by_name['security_level'].enum_type = _METHODOPTIONS_SECURITYLEVEL
_METHODOPTIONS.fields_by_name['response_format'].enum_type = _METHODOPTIONS_FORMAT
_METHODOPTIONS.fields_by_name['request_format'].enum_type = _METHODOPTIONS_FORMAT
_METHODOPTIONS.fields_by_name['log_level'].enum_type = _METHODOPTIONS_LOGLEVEL
_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_METHODOPTIONS_PROTOCOL.containing_type = _METHODOPTIONS
_METHODOPTIONS_SECURITYLEVEL.containing_type = _METHODOPTIONS
_METHODOPTIONS_FORMAT.containing_type = _METHODOPTIONS
_METHODOPTIONS_LOGLEVEL.containing_type = _METHODOPTIONS
_STREAMOPTIONS.fields_by_name['token_unit'].enum_type = _STREAMOPTIONS_TOKENUNIT
_STREAMOPTIONS.fields_by_name['security_level'].enum_type = _METHODOPTIONS_SECURITYLEVEL
_STREAMOPTIONS.fields_by_name['log_level'].enum_type = _METHODOPTIONS_LOGLEVEL
_STREAMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
_STREAMOPTIONS_TOKENUNIT.containing_type = _STREAMOPTIONS
_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
_SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['StreamDescriptorProto'] = _STREAMDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
DESCRIPTOR.message_types_by_name['StreamOptions'] = _STREAMOPTIONS
DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict(
DESCRIPTOR = _FILEDESCRIPTORSET,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(FileDescriptorSet)
FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _FILEDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(FileDescriptorProto)
DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict(
DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
,
DESCRIPTOR = _DESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(DescriptorProto)
_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _FIELDDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(FieldDescriptorProto)
OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(OneofDescriptorProto)
EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _ENUMDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(EnumDescriptorProto)
EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(EnumValueDescriptorProto)
ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(ServiceDescriptorProto)
MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _METHODDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(MethodDescriptorProto)
StreamDescriptorProto = _reflection.GeneratedProtocolMessageType('StreamDescriptorProto', (_message.Message,), dict(
DESCRIPTOR = _STREAMDESCRIPTORPROTO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(StreamDescriptorProto)
FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
DESCRIPTOR = _FILEOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(FileOptions)
MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
DESCRIPTOR = _MESSAGEOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(MessageOptions)
FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
UpgradedOption = _reflection.GeneratedProtocolMessageType('UpgradedOption', (_message.Message,), dict(
DESCRIPTOR = _FIELDOPTIONS_UPGRADEDOPTION,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
,
DESCRIPTOR = _FIELDOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(FieldOptions)
_sym_db.RegisterMessage(FieldOptions.UpgradedOption)
EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
DESCRIPTOR = _ENUMOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(EnumOptions)
EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
DESCRIPTOR = _ENUMVALUEOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(EnumValueOptions)
ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
DESCRIPTOR = _SERVICEOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(ServiceOptions)
MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
DESCRIPTOR = _METHODOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(MethodOptions)
StreamOptions = _reflection.GeneratedProtocolMessageType('StreamOptions', (_message.Message,), dict(
DESCRIPTOR = _STREAMOPTIONS,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(StreamOptions)
UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict(
DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
,
DESCRIPTOR = _UNINTERPRETEDOPTION,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(UninterpretedOption)
_sym_db.RegisterMessage(UninterpretedOption.NamePart)
SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict(
DESCRIPTOR = _SOURCECODEINFO_LOCATION,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
,
DESCRIPTOR = _SOURCECODEINFO,
__module__ = 'google.net.proto2.proto.descriptor_pb2'
))
_sym_db.RegisterMessage(SourceCodeInfo)
_sym_db.RegisterMessage(SourceCodeInfo.Location)
| 43.759133 | 12,863 | 0.743032 |
50ec4f2e039ceef742cadaa73777ccd97588517d | 6,528 | py | Python | tensorflow_probability/python/distributions/chi.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | 1 | 2021-07-21T15:54:17.000Z | 2021-07-21T15:54:17.000Z | tensorflow_probability/python/distributions/chi.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/chi.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | 1 | 2020-10-19T11:24:40.000Z | 2020-10-19T11:24:40.000Z | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Chi distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import math as tfp_math
from tensorflow_probability.python.bijectors import invert as invert_bijector
from tensorflow_probability.python.bijectors import softplus as softplus_bijector
from tensorflow_probability.python.bijectors import square as square_bijector
from tensorflow_probability.python.distributions import chi2
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.distributions import transformed_distribution
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import parameter_properties
from tensorflow_probability.python.internal import tensor_util
class Chi(transformed_distribution.TransformedDistribution):
"""Chi distribution.
The Chi distribution is defined over nonnegative real numbers and uses a
degrees of freedom ('df') parameter.
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; df, x >= 0) = x**(df - 1) exp(-0.5 x**2) / Z
Z = 2**(0.5 df - 1) Gamma(0.5 df)
```
where:
* `df` denotes the degrees of freedom,
* `Z` is the normalization constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The Chi distribution is a transformation of the Chi2 distribution; it is the
distribution of the positive square root of a variable obeying a Chi2
distribution.
"""
def __init__(self,
df,
validate_args=False,
allow_nan_stats=True,
name='Chi'):
"""Construct Chi distributions with parameter `df`.
Args:
df: Floating point tensor, the degrees of freedom of the
distribution(s). `df` must contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value `NaN` to indicate the result
is undefined. When `False`, an exception is raised if one or more of the
statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Default value: `'Chi'`.
"""
parameters = dict(locals())
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([df], dtype_hint=tf.float32)
self._df = tensor_util.convert_nonref_to_tensor(
df, name='df', dtype=dtype)
super(Chi, self).__init__(
distribution=chi2.Chi2(df=self._df,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats),
bijector=invert_bijector.Invert(
square_bijector.Square(validate_args=validate_args)),
validate_args=validate_args,
parameters=parameters,
name=name)
@classmethod
def _parameter_properties(cls, dtype, num_classes=None):
# pylint: disable=g-long-lambda
return dict(
df=parameter_properties.ParameterProperties(
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))))
# pylint: enable=g-long-lambda
@property
def df(self):
"""Distribution parameter for degrees of freedom."""
return self._df
experimental_is_sharded = False
def _mean(self, df=None):
df = tf.convert_to_tensor(self.df if df is None else df)
return np.sqrt(2.) * tf.exp(
-tfp_math.log_gamma_difference(0.5, 0.5 * df))
def _variance(self):
df = tf.convert_to_tensor(self.df)
return df - self._mean(df) ** 2
def _entropy(self):
df = tf.convert_to_tensor(self.df)
return (tf.math.lgamma(0.5 * df) +
0.5 * (df - np.log(2.) -
(df - 1.) * tf.math.digamma(0.5 * df)))
def _default_event_space_bijector(self):
return softplus_bijector.Softplus(validate_args=self.validate_args)
def _parameter_control_dependencies(self, is_init):
if not self.validate_args:
return []
assertions = []
if is_init != tensor_util.is_ref(self._df):
assertions.append(assert_util.assert_positive(
self._df, message='Argument `df` must be positive.'))
return assertions
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_non_negative(
x, message='Sample must be non-negative.'))
return assertions
@kullback_leibler.RegisterKL(Chi, Chi)
def _kl_chi_chi(a, b, name=None):
"""Calculate the batched KL divergence KL(a || b) with a and b Chi.
Args:
a: instance of a Chi distribution object.
b: instance of a Chi distribution object.
name: (optional) Name to use for created operations.
default is 'kl_chi_chi'.
Returns:
Batchwise KL(a || b)
"""
with tf.name_scope(name or 'kl_chi_chi'):
a_df = tf.convert_to_tensor(a.df)
b_df = tf.convert_to_tensor(b.df)
# Consistent with
# https://mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 118
# The paper introduces an additional scaling parameter; setting that
# parameter to 1 and simplifying yields the expression we use here.
return (0.5 * tf.math.digamma(0.5 * a_df) * (a_df - b_df) +
tf.math.lgamma(0.5 * b_df) - tf.math.lgamma(0.5 * a_df))
| 37.302857 | 81 | 0.6947 |
a7b8f0d71bc7b8ea915f8295c3d4d34f9b0ef55b | 1,624 | py | Python | aliyun-python-sdk-ddoscoo/aliyunsdkddoscoo/request/v20200101/DescribeAutoCcListCountRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-ddoscoo/aliyunsdkddoscoo/request/v20200101/DescribeAutoCcListCountRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-ddoscoo/aliyunsdkddoscoo/request/v20200101/DescribeAutoCcListCountRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkddoscoo.endpoint import endpoint_data
class DescribeAutoCcListCountRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ddoscoo', '2020-01-01', 'DescribeAutoCcListCount')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_QueryType(self):
return self.get_query_params().get('QueryType')
def set_QueryType(self,QueryType):
self.add_query_param('QueryType',QueryType) | 36.909091 | 80 | 0.77032 |
41efd253edde5bc0c2ff08ab9b23c62852edf46d | 1,949 | py | Python | test.py | vasudevgupta7/tf-lightning | a4ec2f0877f3461acf8a5759b6b3017a81754207 | [
"Apache-2.0"
] | null | null | null | test.py | vasudevgupta7/tf-lightning | a4ec2f0877f3461acf8a5759b6b3017a81754207 | [
"Apache-2.0"
] | null | null | null | test.py | vasudevgupta7/tf-lightning | a4ec2f0877f3461acf8a5759b6b3017a81754207 | [
"Apache-2.0"
] | 1 | 2020-08-29T15:07:57.000Z | 2020-08-29T15:07:57.000Z | # __author__ = 'Vasudev Gupta'
import tf_lightning as tl
import tensorflow as tf
class TestModel(tl.LightningModule):
# just a random model with random dataset
def __init__(self):
# simple test model
super().__init__()
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(5),
tf.keras.layers.Dense(2)
])
def call(self, dataset):
return self.model(dataset)
def configure_optimizers(self):
return tf.keras.optimizers.Adam(0.1),
def training_step(self, batch, batch_idx, optimizer_idx):
pred = self(batch)
loss = tf.reduce_mean(pred)
log = {'batch_idx': batch_idx, 'tr_loss': loss}
result = tl.TrainResult(
loss, self.model.trainable_variables, log=log)
return result
def validation_step(self, batch, batch_idx, optimizer_idx):
pred = self(batch)
loss = tf.reduce_mean(pred)
log = {'batch_idx': batch_idx, 'val_loss': loss}
result = tl.EvalResult(loss, log=log)
return result
def checkpointer(self):
return tf.train.Checkpoint(m=self.model,
opt0=self.optimizer_0)
class TestDataLoader(tl.LightningDataModule):
# using random dataset
def __init__(self):
self.batch_size = 32
def setup(self):
self.tr_dataset = tf.random.normal((256, 7))
self.val_dataset = tf.random.normal((64, 7))
def train_dataloader(self):
dataset = tf.data.Dataset.from_tensor_slices(
self.tr_dataset).batch(self.batch_size)
return dataset
def val_dataloader(self):
dataset = tf.data.Dataset.from_tensor_slices(
self.val_dataset).batch(self.batch_size)
return dataset
if __name__ == '__main__':
model = TestModel()
dataloader = TestDataLoader()
trainer = tl.Trainer()
trainer.fit(model, dataloader)
| 24.061728 | 63 | 0.623397 |
034d68b341358301e201a56c082815d9b74c0e47 | 1,932 | py | Python | Util.py | jakedoesgithub/prog_langs_project_scheme_interpreter | 87bc03ab291abd7691db067ab8d5c72282b13c1a | [
"MIT"
] | null | null | null | Util.py | jakedoesgithub/prog_langs_project_scheme_interpreter | 87bc03ab291abd7691db067ab8d5c72282b13c1a | [
"MIT"
] | null | null | null | Util.py | jakedoesgithub/prog_langs_project_scheme_interpreter | 87bc03ab291abd7691db067ab8d5c72282b13c1a | [
"MIT"
] | null | null | null | # Util -- Utility functions
from Tree import Nil
from Tree import Cons
from Special import Begin
from Special import Cond
from Special import Define
from Special import If
from Special import Lambda
from Special import Let
from Special import Quote
from Special import Set
from Special import Regular
class Util:
# parseList selects the Special node to attach to a Cons node
def parseList(self, t):
if not t.getCar().isSymbol():
return Regular()
else:
name = t.getCar().getName()
if name == "quote":
return Quote()
elif name == "lambda":
return Lambda()
elif name == "begin":
return Begin()
elif name == "if":
return If()
elif name == "let":
return Let()
elif name == "cond":
return Cond()
elif name == "define":
return Define()
elif name == "set!":
return Set()
else:
return Regular()
# length returns the length of a well-formed list exp and -1 otherwise
def length(self, exp):
if exp.isNull():
return 0
if not exp.isPair():
return -1
n = self.length(exp.getCdr())
if n == -1:
return -1
return n + 1
# mapeval calls eval on every list element of exp
def mapeval(self, exp, env):
if exp.isNull():
return Nil.getInstance()
return Cons(exp.getCar().eval(env), self.mapeval(exp.getCdr(), env))
# begin calls eval on all list elements and returns the last value
def begin(self, exp, env):
res = exp.getCar().eval(env)
cdr = exp.getCdr()
if cdr.isNull():
return res
return self.begin(cdr, env)
| 29.723077 | 77 | 0.520186 |
a9e14d769dcb8603f06831a3379ca54f07315fe3 | 1,124 | py | Python | tests/test_spatstat_basic_usage.py | For-a-few-DPPs-more/spatstat-interface | cdcaf39c3dcd4d66abdbd78c502109987a8749f8 | [
"MIT"
] | 2 | 2021-07-12T16:32:47.000Z | 2022-03-01T16:11:40.000Z | tests/test_spatstat_basic_usage.py | For-a-few-DPPs-more/spatstat-interface | cdcaf39c3dcd4d66abdbd78c502109987a8749f8 | [
"MIT"
] | 1 | 2021-07-12T21:43:11.000Z | 2021-10-06T12:27:37.000Z | tests/test_spatstat_basic_usage.py | For-a-few-DPPs-more/spatstat-interface | cdcaf39c3dcd4d66abdbd78c502109987a8749f8 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import pytest
import rpy2.robjects as robjects
from spatstat_interface.interface import SpatstatInterface
from spatstat_interface.utils import to_pandas_data_frame
@pytest.fixture
def spatstat():
spatstat = SpatstatInterface(update=True)
spatstat.import_package("core", "geom", update=True)
return spatstat
def test_spatstat_ppp_to_pandas_df(spatstat):
B = [0, 1]
bound_r = robjects.FloatVector(B)
window = spatstat.geom.owin(xrange=bound_r, yrange=bound_r)
X_np = list(B[0] + (B[1] - B[0]) * np.random.rand(100))
X = robjects.FloatVector(X_np)
points_ppp = spatstat.geom.ppp(X, X, window=window)
pcf_ppp = spatstat.core.pcf_ppp(points_ppp)
pcf_pd = to_pandas_data_frame(pcf_ppp)
assert isinstance(pcf_pd, pd.DataFrame)
def test_simulate_dpp_gaussian(spatstat):
params = {"lambda": 100, "alpha": 0.05, "d": 2}
my_dpp = spatstat.core.dppGauss(**params)
bound = robjects.FloatVector([0, 1])
window = spatstat.geom.owin(xrange=bound, yrange=bound)
spatstat.core.simulate_dppm(my_dpp, W=window)
assert True
| 28.820513 | 63 | 0.725979 |
6f99f4cf4c30fd552cbcad8a251609fb4590b38d | 1,865 | py | Python | setup.py | vituocgia/boxme-users | 0e8238b7852e697643a35c8e306d63446f27a6d2 | [
"BSD-3-Clause"
] | null | null | null | setup.py | vituocgia/boxme-users | 0e8238b7852e697643a35c8e306d63446f27a6d2 | [
"BSD-3-Clause"
] | null | null | null | setup.py | vituocgia/boxme-users | 0e8238b7852e697643a35c8e306d63446f27a6d2 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import find_packages, setup
import sys
if sys.argv[-1] == 'publish':
print("Remember to update the Changelog's version and date in README.rst and stage the changes")
input("Press Enter to continue...")
os.system("bumpversion --allow-dirty minor")
os.system("python setup.py sdist bdist_wheel")
os.system("twine upload dist/*")
print("You probably want to update the repo now:")
print(" git push origin master")
print(" git push --tags")
sys.exit()
readme = open('README.rst').read()
setup(
name='boxme-users',
version='0.1',
description="""Custom user model for Django >= 1.5 with the same behaviour as Django's default User but with email instead of username.""",
long_description=readme,
author='DiepDT',
author_email='diepdt@peacesoft.net',
url='https://github.com/vituocgia/boxme-users',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
install_requires=[
"Django >= 1.5",
],
license='BSD License',
zip_safe=False,
keywords='django custom user auth model email without username',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| 33.909091 | 143 | 0.627882 |
ab8ab0bee7bdc9677450360c5d6c60f646709da6 | 705 | py | Python | taiga/projects/migrations/0048_auto_20160615_1508.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
] | 1 | 2017-05-29T19:01:06.000Z | 2017-05-29T19:01:06.000Z | docker-images/taigav2/taiga-back/taiga/projects/migrations/0048_auto_20160615_1508.py | mattcongy/itshop | 6be025a9eaa7fe7f495b5777d1f0e5a3184121c9 | [
"MIT"
] | 12 | 2019-11-25T14:08:32.000Z | 2021-06-24T10:35:51.000Z | taiga/projects/migrations/0048_auto_20160615_1508.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-06-15 15:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0047_auto_20160614_1201'),
]
operations = [
migrations.AlterModelOptions(
name='projecttemplate',
options={'ordering': ['order', 'name'], 'verbose_name': 'project template', 'verbose_name_plural': 'project templates'},
),
migrations.AddField(
model_name='projecttemplate',
name='order',
field=models.IntegerField(default=10000, verbose_name='user order'),
),
]
| 28.2 | 132 | 0.624113 |
479cbaf6cbf98ff6de69c97ee90cde5782536b63 | 975 | py | Python | fire/test_components_py3.py | adamruth/python-fire | 6912ccd56f50e0f4bb30a0725d95858ef29f3bde | [
"Apache-2.0"
] | 1 | 2020-02-05T04:43:03.000Z | 2020-02-05T04:43:03.000Z | fire/test_components_py3.py | chesnjak/python-fire | 72604f40314008e562ba47936dcc183b51166b72 | [
"Apache-2.0"
] | null | null | null | fire/test_components_py3.py | chesnjak/python-fire | 72604f40314008e562ba47936dcc183b51166b72 | [
"Apache-2.0"
] | 1 | 2020-07-15T22:58:25.000Z | 2020-07-15T22:58:25.000Z | # Copyright (C) 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module has components that use Python 3 specific syntax."""
def identity(arg1, arg2: int, arg3=10, arg4: int = 20, *arg5,
arg6, arg7: int, arg8=30, arg9: int = 40, **arg10):
return arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10
class KeywordOnly(object):
def double(self, *, count):
return count * 2
def triple(self, *, count):
return count * 3
| 32.5 | 74 | 0.708718 |
f0d3a6260a12fd1226d48ba2da678d55ce1baa5f | 4,885 | py | Python | tests/queries/test_query.py | beshrkayali/django | 84633905273fc916e3d17883810d9969c03f73c2 | [
"PSF-2.0",
"BSD-3-Clause"
] | 7 | 2020-01-13T18:26:41.000Z | 2021-04-20T04:22:26.000Z | tests/queries/test_query.py | beshrkayali/django | 84633905273fc916e3d17883810d9969c03f73c2 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/queries/test_query.py | beshrkayali/django | 84633905273fc916e3d17883810d9969c03f73c2 | [
"PSF-2.0",
"BSD-3-Clause"
] | 4 | 2019-11-07T01:22:16.000Z | 2020-09-16T22:02:16.000Z | from datetime import datetime
from django.core.exceptions import FieldError
from django.db.models import CharField, F, Q
from django.db.models.expressions import SimpleCol
from django.db.models.fields.related_lookups import RelatedIsNull
from django.db.models.functions import Lower
from django.db.models.lookups import Exact, GreaterThan, IsNull, LessThan
from django.db.models.sql.query import Query
from django.db.models.sql.where import OR
from django.test import SimpleTestCase
from django.test.utils import register_lookup
from .models import Author, Item, ObjectC, Ranking
class TestQuery(SimpleTestCase):
def test_simple_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
def test_simplecol_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2, name__isnull=False) | Q(num__lt=F('id')))
name_isnull_lookup, num_gt_lookup = where.children[0].children
self.assertIsInstance(num_gt_lookup, GreaterThan)
self.assertIsInstance(num_gt_lookup.lhs, SimpleCol)
self.assertIsInstance(name_isnull_lookup, IsNull)
self.assertIsInstance(name_isnull_lookup.lhs, SimpleCol)
num_lt_lookup = where.children[1]
self.assertIsInstance(num_lt_lookup, LessThan)
self.assertIsInstance(num_lt_lookup.rhs, SimpleCol)
self.assertIsInstance(num_lt_lookup.lhs, SimpleCol)
def test_complex_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2) | Q(num__lt=0))
self.assertEqual(where.connector, OR)
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
lookup = where.children[1]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.rhs, 0)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
def test_multiple_fields(self):
query = Query(Item)
where = query.build_where(Q(modified__gt=F('created')))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertIsInstance(lookup.rhs, SimpleCol)
self.assertIsInstance(lookup.lhs, SimpleCol)
self.assertEqual(lookup.rhs.target, Item._meta.get_field('created'))
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
def test_transform(self):
query = Query(Author)
with register_lookup(CharField, Lower):
where = query.build_where(~Q(name__lower='foo'))
lookup = where.children[0]
self.assertIsInstance(lookup, Exact)
self.assertIsInstance(lookup.lhs, Lower)
self.assertIsInstance(lookup.lhs.lhs, SimpleCol)
self.assertEqual(lookup.lhs.lhs.target, Author._meta.get_field('name'))
def test_negated_nullable(self):
query = Query(Item)
where = query.build_where(~Q(modified__lt=datetime(2017, 1, 1)))
self.assertTrue(where.negated)
lookup = where.children[0]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
lookup = where.children[1]
self.assertIsInstance(lookup, IsNull)
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
def test_foreign_key(self):
query = Query(Item)
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
query.build_where(Q(creator__num__gt=2))
def test_foreign_key_f(self):
query = Query(Ranking)
with self.assertRaises(FieldError):
query.build_where(Q(rank__gt=F('author__num')))
def test_foreign_key_exclusive(self):
query = Query(ObjectC)
where = query.build_where(Q(objecta=None) | Q(objectb=None))
a_isnull = where.children[0]
self.assertIsInstance(a_isnull, RelatedIsNull)
self.assertIsInstance(a_isnull.lhs, SimpleCol)
self.assertEqual(a_isnull.lhs.target, ObjectC._meta.get_field('objecta'))
b_isnull = where.children[1]
self.assertIsInstance(b_isnull, RelatedIsNull)
self.assertIsInstance(b_isnull.lhs, SimpleCol)
self.assertEqual(b_isnull.lhs.target, ObjectC._meta.get_field('objectb'))
def test_clone_select_related(self):
query = Query(Item)
query.add_select_related(['creator'])
clone = query.clone()
clone.add_select_related(['note', 'creator__extra'])
self.assertEqual(query.select_related, {'creator': {}})
| 42.112069 | 88 | 0.697441 |
5394b2dad4ced26ea593398527c64194440b6e50 | 16,885 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/cisco/aci/plugins/modules/aci_static_binding_to_epg.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/cisco/aci/plugins/modules/aci_static_binding_to_epg.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/cisco/aci/plugins/modules/aci_static_binding_to_epg.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_static_binding_to_epg
short_description: Bind static paths to EPGs (fv:RsPathAtt)
description:
- Bind static paths to EPGs on Cisco ACI fabrics.
options:
tenant:
description:
- Name of an existing tenant.
type: str
aliases: [ tenant_name ]
ap:
description:
- Name of an existing application network profile, that will contain the EPGs.
type: str
aliases: [ app_profile, app_profile_name ]
epg:
description:
- The name of the end point group.
type: str
aliases: [ epg_name ]
description:
description:
- Description for the static path to EPG binding.
type: str
aliases: [ descr ]
encap_id:
description:
- The encapsulation ID associating the C(epg) with the interface path.
- This acts as the secondary C(encap_id) when using micro-segmentation.
- Accepted values are any valid encap ID for specified encap, currently ranges between C(1) and C(4096).
type: int
aliases: [ vlan, vlan_id ]
primary_encap_id:
description:
- Determines the primary encapsulation ID associating the C(epg)
with the interface path when using micro-segmentation.
- Accepted values are any valid encap ID for specified encap, currently ranges between C(1) and C(4096) and C(unknown.
- C(unknown) is the default value and using C(unknown) disables the Micro-Segmentation.
type: str
aliases: [ primary_vlan, primary_vlan_id ]
deploy_immediacy:
description:
- The Deployment Immediacy of Static EPG on PC, VPC or Interface.
- The APIC defaults to C(lazy) when unset during creation.
type: str
choices: [ immediate, lazy ]
interface_mode:
description:
- Determines how layer 2 tags will be read from and added to frames.
- Values C(802.1p) and C(native) are identical.
- Values C(access) and C(untagged) are identical.
- Values C(regular), C(tagged) and C(trunk) are identical.
- The APIC defaults to C(trunk) when unset during creation.
type: str
choices: [ 802.1p, access, native, regular, tagged, trunk, untagged ]
aliases: [ interface_mode_name, mode ]
interface_type:
description:
- The type of interface for the static EPG deployment.
type: str
choices: [ fex, port_channel, switch_port, vpc, fex_port_channel, fex_vpc ]
default: switch_port
pod_id:
description:
- The pod number part of the tDn.
- C(pod_id) is usually an integer below C(10).
type: int
aliases: [ pod, pod_number ]
leafs:
description:
- The switch ID(s) that the C(interface) belongs to.
- When C(interface_type) is C(switch_port), C(port_channel), or C(fex), then C(leafs) is a string of the leaf ID.
- When C(interface_type) is C(vpc), then C(leafs) is a list with both leaf IDs.
- The C(leafs) value is usually something like '101' or '101-102' depending on C(connection_type).
type: list
elements: str
aliases: [ leaves, nodes, paths, switches ]
interface:
description:
- The C(interface) string value part of the tDn.
- Usually a policy group like C(test-IntPolGrp) or an interface of the following format C(1/7) depending on C(interface_type).
type: str
extpaths:
description:
- The C(extpaths) integer value part of the tDn.
- C(extpaths) is only used if C(interface_type) is C(fex), C(fex_vpc) or C(fex_port_channel).
- When C(interface_type) is C(fex_vpc), then C(extpaths) is a list with both fex IDs.
- Usually something like C(1011).
type: list
elements: str
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment:
- cisco.aci.aci
notes:
- The C(tenant), C(ap), C(epg) used must exist before using this module in your playbook.
The M(cisco.aci.aci_tenant), M(cisco.aci.aci_ap), M(cisco.aci.aci_epg) modules can be used for this.
seealso:
- module: cisco.aci.aci_tenant
- module: cisco.aci.aci_ap
- module: cisco.aci.aci_epg
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(fv:RsPathAtt).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Bruno Calogero (@brunocalogero)
- Marcel Zehnder (@maercu)
'''
EXAMPLES = r'''
- name: Deploy Static Path binding for given EPG
cisco.aci.aci_static_binding_to_epg:
host: apic
username: admin
password: SomeSecretPassword
tenant: accessport-code-cert
ap: accessport_code_app
epg: accessport_epg1
encap_id: 222
deploy_immediacy: lazy
interface_mode: untagged
interface_type: switch_port
pod_id: 1
leafs: 101
interface: '1/7'
state: present
delegate_to: localhost
- name: Remove Static Path binding for given EPG
cisco.aci.aci_static_binding_to_epg:
host: apic
username: admin
password: SomeSecretPassword
tenant: accessport-code-cert
ap: accessport_code_app
epg: accessport_epg1
interface_type: switch_port
pod: 1
leafs: 101
interface: '1/7'
state: absent
delegate_to: localhost
- name: Get specific Static Path binding for given EPG
cisco.aci.aci_static_binding_to_epg:
host: apic
username: admin
password: SomeSecretPassword
tenant: accessport-code-cert
ap: accessport_code_app
epg: accessport_epg1
interface_type: switch_port
pod: 1
leafs: 101
interface: '1/7'
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule, aci_argument_spec
INTERFACE_MODE_MAPPING = {
'802.1p': 'native',
'access': 'untagged',
'native': 'native',
'regular': 'regular',
'tagged': 'regular',
'trunk': 'regular',
'untagged': 'untagged',
}
INTERFACE_TYPE_MAPPING = dict(
fex='topology/pod-{pod_id}/paths-{leafs}/extpaths-{extpaths}/pathep-[eth{interface}]',
fex_port_channel='topology/pod-{pod_id}/paths-{leafs}/extpaths-{extpaths}/pathep-[{interface}]',
fex_vpc='topology/pod-{pod_id}/protpaths-{leafs}/extprotpaths-{extpaths}/pathep-[{interface}]',
port_channel='topology/pod-{pod_id}/paths-{leafs}/pathep-[{interface}]',
switch_port='topology/pod-{pod_id}/paths-{leafs}/pathep-[eth{interface}]',
vpc='topology/pod-{pod_id}/protpaths-{leafs}/pathep-[{interface}]',
)
# TODO: change 'deploy_immediacy' to 'resolution_immediacy' (as seen in aci_epg_to_domain)?
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
ap=dict(type='str', aliases=['app_profile', 'app_profile_name']), # Not required for querying all objects
epg=dict(type='str', aliases=['epg_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
encap_id=dict(type='int', aliases=['vlan', 'vlan_id']),
primary_encap_id=dict(type='str', aliases=['primary_vlan', 'primary_vlan_id']),
deploy_immediacy=dict(type='str', choices=['immediate', 'lazy']),
interface_mode=dict(type='str', choices=['802.1p', 'access', 'native', 'regular', 'tagged', 'trunk', 'untagged'],
aliases=['interface_mode_name', 'mode']),
interface_type=dict(type='str', default='switch_port', choices=['fex', 'port_channel', 'switch_port', 'vpc', 'fex_port_channel', 'fex_vpc']),
pod_id=dict(type='int', aliases=['pod', 'pod_number']), # Not required for querying all objects
leafs=dict(type='list', elements='str', aliases=['leaves', 'nodes', 'paths', 'switches']), # Not required for querying all objects
interface=dict(type='str'), # Not required for querying all objects
extpaths=dict(type='list', elements='str'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['interface_type', 'fex', ['extpaths']],
['interface_type', 'fex_vpc', ['extpaths']],
['interface_type', 'fex_port_channel', ['extpaths']],
['state', 'absent', ['ap', 'epg', 'interface', 'leafs', 'pod_id', 'tenant']],
['state', 'present', ['ap', 'encap_id', 'epg', 'interface', 'leafs', 'pod_id', 'tenant']],
],
)
tenant = module.params.get('tenant')
ap = module.params.get('ap')
epg = module.params.get('epg')
description = module.params.get('description')
encap_id = module.params.get('encap_id')
primary_encap_id = module.params.get('primary_encap_id')
deploy_immediacy = module.params.get('deploy_immediacy')
interface_mode = module.params.get('interface_mode')
interface_type = module.params.get('interface_type')
pod_id = module.params.get('pod_id')
leafs = module.params.get('leafs')
interface = module.params.get('interface')
extpaths = module.params.get('extpaths')
state = module.params.get('state')
aci = ACIModule(module)
if leafs is not None:
# Process leafs, and support dash-delimited leafs
leafs = []
for leaf in module.params.get('leafs'):
# Users are likely to use integers for leaf IDs, which would raise an exception when using the join method
leafs.extend(str(leaf).split('-'))
if len(leafs) == 1:
if interface_type in ['vpc', 'fex_vpc']:
aci.fail_json(msg='A interface_type of "vpc" requires 2 leafs')
leafs = leafs[0]
elif len(leafs) == 2:
if interface_type not in ['vpc', 'fex_vpc']:
aci.fail_json(msg='The interface_types "switch_port", "port_channel", and "fex" \
do not support using multiple leafs for a single binding')
leafs = "-".join(leafs)
else:
aci.fail_json(msg='The "leafs" parameter must not have more than 2 entries')
if extpaths is not None:
# Process extpaths, and support dash-delimited extpaths
extpaths = []
for extpath in module.params.get('extpaths'):
# Users are likely to use integers for extpaths IDs, which would raise an exception when using the join method
extpaths.extend(str(extpath).split('-'))
if len(extpaths) == 1:
if interface_type == 'fex_vpc':
aci.fail_json(msg='A interface_type of "fex_vpc" requires 2 extpaths')
extpaths = extpaths[0]
elif len(extpaths) == 2:
if interface_type != 'fex_vpc':
aci.fail_json(msg='The interface_types "fex" \
and "fex_port_channel" do not support using multiple extpaths for a single binding')
extpaths = "-".join(extpaths)
else:
aci.fail_json(msg='The "extpaths" parameter must not have more than 2 entries')
if encap_id is not None:
if encap_id not in range(1, 4097):
aci.fail_json(msg='Valid VLAN assignments are from 1 to 4096')
encap_id = 'vlan-{0}'.format(encap_id)
if primary_encap_id is not None:
try:
primary_encap_id = int(primary_encap_id)
if isinstance(primary_encap_id, int) and primary_encap_id in range(1, 4097):
primary_encap_id = 'vlan-{0}'.format(primary_encap_id)
else:
aci.fail_json(msg='Valid VLAN assignments are from 1 to 4096 or unknown.')
except Exception as e:
if isinstance(primary_encap_id, str) and primary_encap_id != 'unknown':
aci.fail_json(msg='Valid VLAN assignments are from 1 to 4096 or unknown. %s' % e)
static_path = INTERFACE_TYPE_MAPPING[interface_type].format(pod_id=pod_id, leafs=leafs, extpaths=extpaths, interface=interface)
path_target_filter = {}
if pod_id is not None and leafs is not None and interface is not None and (interface_type != 'fex' or extpaths is not None):
path_target_filter = {'tDn': static_path}
if interface_mode is not None:
interface_mode = INTERFACE_MODE_MAPPING[interface_mode]
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='fvAp',
aci_rn='ap-{0}'.format(ap),
module_object=ap,
target_filter={'name': ap},
),
subclass_2=dict(
aci_class='fvAEPg',
aci_rn='epg-{0}'.format(epg),
module_object=epg,
target_filter={'name': epg},
),
subclass_3=dict(
aci_class='fvRsPathAtt',
aci_rn='rspathAtt-[{0}]'.format(static_path),
module_object=static_path,
target_filter=path_target_filter,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='fvRsPathAtt',
class_config=dict(
descr=description,
encap=encap_id,
primaryEncap=primary_encap_id,
instrImedcy=deploy_immediacy,
mode=interface_mode,
tDn=static_path,
),
)
aci.get_diff(aci_class='fvRsPathAtt')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| 35.472689 | 149 | 0.635475 |
ab3e073223720ec40f3b539fd81cd54fbda54fcb | 3,713 | py | Python | src/Lossfunctions/LeastSquares.py | Yutong-Dai/ML-Algorithm-Pytorch | ab0eb8fa0c9c44dbf6847f6c44feaf4f3a18ec2d | [
"MIT"
] | null | null | null | src/Lossfunctions/LeastSquares.py | Yutong-Dai/ML-Algorithm-Pytorch | ab0eb8fa0c9c44dbf6847f6c44feaf4f3a18ec2d | [
"MIT"
] | null | null | null | src/Lossfunctions/LeastSquares.py | Yutong-Dai/ML-Algorithm-Pytorch | ab0eb8fa0c9c44dbf6847f6c44feaf4f3a18ec2d | [
"MIT"
] | null | null | null | '''
File: LeastSquares.py
Author: Yutong Dai (yutongdai95@gmail.com)
File Created: 2021-03-10 00:33
Last Modified: 2021-03-11 15:28
--------------------------------------------
Description:
'''
import torch
class LeastSquares:
def __init__(self, A, b):
"""
@input:
A: torch.tensor of shape (m, n)
b: torch.tensor of shape (m, 1)
"""
self.m, self.n = A.shape
self.A, self.b = A, b
self.x = torch.zeros(1)
if A.device.type == 'cuda':
self.x = self.x.cuda()
self.minibatch = None
def forward(self, x, minibatch=None):
"""
@input:
@ x: torch tensor of shape (n, 1)
@return: torch tensor; the function value evaluated a the point x
"""
self.x = x
self.minibatch = minibatch
if minibatch is None:
self.AxMinusb = torch.matmul(self.A, x) - self.b
self.loss = torch.sum(self.AxMinusb ** 2) / (self.m * 2)
self.loss_on_minibacth = False
self.minibatch_id = None
else:
self.AxMinusb = torch.matmul(self.A[minibatch, :], x) - self.b[minibatch, :]
self.loss = torch.sum(self.AxMinusb ** 2) / (len(minibatch) * 2)
self.loss_on_minibacth = True
return self.loss
def grad(self, x, minibatch=None):
"""
@input:
@ x: torch tensor of shape (n, 1)
@return: torch tensor; the gradient value evaluated a the point x
"""
if (not torch.equal(self.x, x)) or (self.minibatch != minibatch):
self.forward(x, minibatch)
# clear gradient
if x.grad is not None:
x.grad.data.zero_()
if (minibatch is None) and (self.loss_on_minibacth == False):
self.loss.backward()
elif (minibatch is not None) and (self.loss_on_minibacth == True):
self.loss.backward()
else:
if self.loss_on_minibacth:
fplace = 'mini-batch'
else:
fplace = 'full-batch'
if minibatch is None:
gplace = 'full-bacth'
else:
gplace = 'mini-batch'
raise ValueError(f'Inconsistency: function is evaluated on {fplace} while attempting to evaluate gradient on {gplace}!')
return x.grad
def _mgrad(self, x, minibatch=None):
"""
Just for sanity check purpose. Won't be used later.
@input:
@ x: torch tensor of shape (n, 1)
@return: torch tensor; the gradient value evaluated a the point x
"""
if minibatch is None:
return torch.matmul(self.A.T, self.AxMinusb) / self.m
else:
return torch.matmul(self.A[minibatch, :].T, self.AxMinusb) / len(minibatch)
if __name__ == "__main__":
torch.manual_seed(0)
m, n = 5, 3
if torch.cuda.is_available():
A = torch.randn(m, n).cuda()
b = torch.randn(m, 1).cuda()
x = torch.randn(n, 1).cuda().requires_grad_()
else:
A = torch.randn(m, n)
b = torch.randn(m, 1)
x = torch.randn(n, 1).requires_grad_()
ls = LeastSquares(A, b)
# fval = ls.forward(x)
grada = ls.grad(x)
gradm = ls._mgrad(x)
print(f'autodiff:{grada.detach().view(1,-1)} | manualdiff:{gradm.detach().view(1,-1)}')
print(f' test minibatch ...')
minibatch = [0, 1, 4]
# fval_minibacth = ls.forward(x, minibatch)
grada_minibacth = ls.grad(x, minibatch)
gradm_minibacth = ls._mgrad(x, minibatch)
print(f'autodiff:{grada_minibacth.detach().view(1,-1)} | manualdiff:{gradm_minibacth.detach().view(1,-1)}')
| 34.06422 | 132 | 0.544842 |
7456f6159e5d7c0a1df336ac038062a02feb678c | 918 | py | Python | testing/motor_dose.py | cameron-woodard/PiDose | 0cbdcac2f22293d810bff4686148766f154e2ab9 | [
"MIT"
] | 1 | 2022-02-21T14:17:11.000Z | 2022-02-21T14:17:11.000Z | testing/motor_dose.py | cameron-woodard/PiDose | 0cbdcac2f22293d810bff4686148766f154e2ab9 | [
"MIT"
] | null | null | null | testing/motor_dose.py | cameron-woodard/PiDose | 0cbdcac2f22293d810bff4686148766f154e2ab9 | [
"MIT"
] | 1 | 2022-02-21T14:17:24.000Z | 2022-02-21T14:17:24.000Z | """Script for dispensing test drop from the syringe pump.
Written by Cameron Woodard
"""
import RPi.GPIO as GPIO
from time import sleep
PIN_MOTOR_STEP = 12
PIN_MOTOR_DIR = 16
PIN_MOTOR_MS1 = 23
PIN_MOTOR_MS2 = 24
GPIO.setmode(GPIO.BCM)
GPIO.setup(PIN_MOTOR_STEP, GPIO.OUT)
GPIO.setup(PIN_MOTOR_DIR, GPIO.OUT)
GPIO.setup(PIN_MOTOR_MS1, GPIO.OUT)
GPIO.setup(PIN_MOTOR_MS2, GPIO.OUT)
GPIO.output(PIN_MOTOR_STEP, False)
GPIO.output(PIN_MOTOR_DIR, False)
GPIO.output(PIN_MOTOR_MS1,True)
GPIO.output(PIN_MOTOR_MS2,True)
def spin_motor(num_steps):
for y in range(num_steps):
GPIO.output(PIN_MOTOR_STEP, True)
sleep(0.001)
GPIO.output(PIN_MOTOR_STEP, False)
sleep(0.001)
num_steps = int(input('Please enter the number of steps: '))
try:
while True:
spin_motor(num_steps)
input("Press enter to dispense another drop.")
except KeyboardInterrupt:
GPIO.cleanup()
| 23.538462 | 60 | 0.735294 |
2be7ab2d078730534f3088c73a51cf40d22e2ea4 | 273 | py | Python | utils/gurl.py | eigenphi/gcommon | bce1ee422874fa904d90afee03fd703a06dc7a4d | [
"MIT"
] | 3 | 2021-11-09T09:43:21.000Z | 2021-12-16T18:15:43.000Z | utils/gurl.py | eigenphi/gcommon | bce1ee422874fa904d90afee03fd703a06dc7a4d | [
"MIT"
] | null | null | null | utils/gurl.py | eigenphi/gcommon | bce1ee422874fa904d90afee03fd703a06dc7a4d | [
"MIT"
] | 2 | 2022-03-10T11:24:46.000Z | 2022-03-25T06:39:17.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# created: 2015-10-22
def remove_trailing_slash(path):
if path.endswith('/'):
path = path[:-1]
return path
def ensure_trailing_slash(path):
if path.endswith("/"):
return path
return path + "/"
| 15.166667 | 32 | 0.586081 |
6c650cca675bc2d1ef587961ebdc0da77c40f354 | 2,835 | py | Python | instance_segmentation/ADE20K/dataset.py | orestis-z/Mask_RCNN | d590e0f5085f8cbe895a6698e284426fd0116aa4 | [
"MIT"
] | 13 | 2021-01-21T23:28:02.000Z | 2021-12-16T13:40:01.000Z | instance_segmentation/ADE20K/dataset.py | orestis-z/Mask_RCNN | d590e0f5085f8cbe895a6698e284426fd0116aa4 | [
"MIT"
] | 4 | 2021-05-28T00:48:58.000Z | 2022-02-10T08:38:28.000Z | instance_segmentation/ADE20K/dataset.py | orestis-z/Mask_RCNN | d590e0f5085f8cbe895a6698e284426fd0116aa4 | [
"MIT"
] | 3 | 2021-06-17T08:33:08.000Z | 2022-02-23T11:29:07.000Z | import os, sys
import numpy as np
import scipy
from scipy.io import loadmat
from PIL import Image
# Root directory of the project
ROOT_DIR = os.path.abspath("../..")
if ROOT_DIR not in sys.path:
sys.path.insert(0, ROOT_DIR)
from instance_segmentation.objects_config import ObjectsConfig
from instance_segmentation.objects_dataset import ObjectsDataset
NAME = "ADE20K"
class Config(ObjectsConfig):
NAME = NAME
class Dataset(ObjectsDataset):
def load(self, dataset_dir, subset):
assert(subset == 'training' or subset == 'validation')
index = loadmat(os.path.join(dataset_dir, 'index_ade20k.mat'))['index']
# All images
image_ids = range(len(index['folder'][0][0][0]))
# Add classes
self.add_class(NAME, 1, "object")
# Add images
folders = index['folder'][0][0][0]
for i in image_ids:
folder = '/'.join(folders[i][0].split('/')[1:])
if subset in folder.split('/'):
file_name = index['filename'][0][0][0][i][0]
path = os.path.join(dataset_dir, folder, file_name)
im = Image.open(path)
width, height = im.size
self.add_image(
NAME,
image_id=i,
path=path,
width=width,
height=height)
def load_mask(self, image_id):
"""Load instance masks for the given image.
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
image_info = self.image_info[image_id]
seg_path = image_info['path'][:-4] + '_seg.png'
seg = scipy.misc.imread(seg_path)
# R = seg[:, :, 0]
# G = seg[:, :, 1]
B = seg[:, :, 2]
# port to python from matlab script:
# http://groups.csail.mit.edu/vision/datasets/ADE20K/code/loadAde20K.m
# object_class_masks = (R.astype(np.uint16) / 10) * 256 + G.astype(np.uint16)
instances = np.unique(B.flatten())
instances = instances.tolist()
if 0 in instances:
instances.remove(0)
n_instances = len(instances)
masks = np.zeros((img.shape[0], img.shape[1], n_instances))
for i, instance in enumerate(instances):
masks[:, :, i] = (B == instance).astype(np.uint8)
if not n_instances:
raise ValueError("No instances for image {}".format(instance_path))
class_ids = np.array([1] * n_instances, dtype=np.int32)
return masks, class_ids
if __name__ == '__main__':
dataset = Dataset()
dataset.load('/home/orestisz/data/ADE20K_2016_07_26', 'validation')
masks, class_ids = dataset.load_mask(0)
| 32.965116 | 85 | 0.585538 |
7587caa89d00e26bd48556f9d820fba27fdb323b | 8,767 | py | Python | apps/facenet/infer.py | UphillD/edgebench | eae2587ccce4866414304cd1841234625be4817b | [
"MIT"
] | 3 | 2021-06-03T01:26:32.000Z | 2021-11-12T14:57:37.000Z | apps/facenet/infer.py | UphillD/edgebench | eae2587ccce4866414304cd1841234625be4817b | [
"MIT"
] | null | null | null | apps/facenet/infer.py | UphillD/edgebench | eae2587ccce4866414304cd1841234625be4817b | [
"MIT"
] | 1 | 2021-06-03T01:26:33.000Z | 2021-06-03T01:26:33.000Z | # MIT License
#
# Copyright (c) 2017 PXL University College
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Clusters similar faces from input folder together in folders based on euclidean distance matrix
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from pathlib import Path
from scipy import misc
from shutil import copyfile
from sklearn.cluster import DBSCAN
import tensorflow as tf
import numpy as np
import os
import sys
import argparse
import facenet
import detect_face
import warnings
import time
def main(args):
payloaddir = '/app/data/payloads'
workdir = '/app/data/workdir'
# Get rid of unnecessary warnings
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
warnings.filterwarnings("ignore", category=DeprecationWarning)
pnet, rnet, onet = create_network_face_detection(args.gpu_memory_fraction)
with tf.Graph().as_default():
with tf.Session() as sess:
facenet.load_model(args.model)
print("Ready to infer!")
if args.loop == 'True':
copyfile(payloaddir + '/facenet/payload.jpg', args.data_dir + '/payload.jpg')
while True:
if not os.path.exists(args.data_dir + "/payload.jpg"):
time.sleep(0.01)
else:
t_start = time.time()
for filename in os.listdir(workdir + '/face_database/'):
copyfile(workdir + '/face_database/' + filename, args.data_dir + "/" + filename)
image_list = load_images_from_folder(args.data_dir)
Path(args.temp_file).touch()
images = align_data(image_list, args.image_size, args.margin, pnet, rnet, onet)
images_placeholder = sess.graph.get_tensor_by_name("input:0")
embeddings = sess.graph.get_tensor_by_name("embeddings:0")
phase_train_placeholder = sess.graph.get_tensor_by_name("phase_train:0")
feed_dict = {images_placeholder: images, phase_train_placeholder: False}
emb = sess.run(embeddings, feed_dict=feed_dict)
nrof_images = len(images)
matrix = np.zeros((nrof_images, nrof_images))
print('')
for i in range(nrof_images):
for j in range(nrof_images):
dist = np.sqrt(np.sum(np.square(np.subtract(emb[i, :], emb[j, :]))))
matrix[i][j] = dist
# DBSCAN is the only algorithm that doesn't require the number of clusters to be defined.
db = DBSCAN(eps=args.cluster_threshold, min_samples=args.min_cluster_size, metric='precomputed')
db.fit(matrix)
labels = db.labels_
# get number of clusters
no_clusters = len(set(labels)) - (1 if -1 in labels else 0)
largest_cluster = 0
for i in range(no_clusters):
if len(np.nonzero(labels == i)[0]) > len(np.nonzero(labels == largest_cluster)[0]):
largest_cluster = i
t_total = time.time() - t_start
print('Image inferred in: {:.5f}s'.format(t_total))
if largest_cluster > 0:
print('Match found! Person identified as ID: {}!'.format(largest_cluster))
else:
print('Match not found!')
print('')
if args.loop == 'False':
for filename in os.listdir(args.data_dir):
os.remove(args.data_dir + "/" + filename)
print('Ready to infer!')
def align_data(image_list, image_size, margin, pnet, rnet, onet):
minsize = 20 # minimum size of face
threshold = [0.6, 0.7, 0.7] # three steps's threshold
factor = 0.709 # scale factor
img_list = []
for x in range(len(image_list)):
img_size = np.asarray(image_list[x].shape)[0:2]
bounding_boxes, _ = detect_face.detect_face(image_list[x], minsize, pnet, rnet, onet, threshold, factor)
nrof_samples = len(bounding_boxes)
if nrof_samples > 0:
for i in range(nrof_samples):
if bounding_boxes[i][4] > 0.95:
det = np.squeeze(bounding_boxes[i, 0:4])
bb = np.zeros(4, dtype=np.int32)
bb[0] = np.maximum(det[0] - margin / 2, 0)
bb[1] = np.maximum(det[1] - margin / 2, 0)
bb[2] = np.minimum(det[2] + margin / 2, img_size[1])
bb[3] = np.minimum(det[3] + margin / 2, img_size[0])
cropped = image_list[x][bb[1]:bb[3], bb[0]:bb[2], :]
aligned = misc.imresize(cropped, (image_size, image_size), interp='bilinear')
prewhitened = facenet.prewhiten(aligned)
img_list.append(prewhitened)
if len(img_list) > 0:
images = np.stack(img_list)
return images
else:
return None
def create_network_face_detection(gpu_memory_fraction):
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_memory_fraction)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False))
with sess.as_default():
pnet, rnet, onet = detect_face.create_mtcnn(sess, None)
return pnet, rnet, onet
def load_images_from_folder(folder):
images = []
for filename in os.listdir(folder):
if not 'exec.tmp' in filename:
img = misc.imread(os.path.join(folder, filename))
if img is not None:
images.append(img)
return images
def parse_arguments(argv):
parser = argparse.ArgumentParser()
parser.add_argument('model', type=str,
help='Either a directory containing the meta_file and ckpt_file or a model protobuf (.pb) file')
parser.add_argument('data_dir', type=str,
help='The directory containing the images to cluster into folders.')
parser.add_argument('out_dir', type=str,
help='The output directory where the image clusters will be saved.')
parser.add_argument('temp_file', type=str,
help='The temporary file.')
parser.add_argument('loop', type=str,
help='Set to True for infinite operation.')
parser.add_argument('--image_size', type=int,
help='Image size (height, width) in pixels.', default=160)
parser.add_argument('--margin', type=int,
help='Margin for the crop around the bounding box (height, width) in pixels.', default=44)
parser.add_argument('--min_cluster_size', type=int,
help='The minimum amount of pictures required for a cluster.', default=1)
parser.add_argument('--cluster_threshold', type=float,
help='The minimum distance for faces to be in the same cluster', default=1.0)
parser.add_argument('--largest_cluster_only', action='store_true',
help='This argument will make that only the biggest cluster is saved.')
parser.add_argument('--gpu_memory_fraction', type=float,
help='Upper bound on the amount of GPU memory that will be used by the process.', default=1.0)
return parser.parse_args(argv)
if __name__ == '__main__':
main(parse_arguments(sys.argv[1:]))
| 42.765854 | 120 | 0.610927 |
26880e6be5d17988a1ba298d5a9eb5426c8e37bc | 628 | py | Python | 2021/day2/part1.py | trongbq/adventofcode | 7ab692a91304a00a7843460052238b98c6f44963 | [
"MIT"
] | null | null | null | 2021/day2/part1.py | trongbq/adventofcode | 7ab692a91304a00a7843460052238b98c6f44963 | [
"MIT"
] | null | null | null | 2021/day2/part1.py | trongbq/adventofcode | 7ab692a91304a00a7843460052238b98c6f44963 | [
"MIT"
] | null | null | null | # https://adventofcode.com/2021/day/2
# What do you get if you multiply your final horizontal position by your final depth?
def solve(commands):
horizontal_pos = 0
depth_pos = 0
for command in commands:
c, unit = command.split()
unit = int(unit)
if c == 'forward':
horizontal_pos += unit
elif c == 'up':
depth_pos -= unit
elif c == 'down':
depth_pos += unit
else:
print("Invalid command")
# Skip it
return horizontal_pos * depth_pos
if __name__ == '__main__':
f = open('input.txt')
planned_course = f.readlines()
res = solve(planned_course)
print(res)
| 20.933333 | 85 | 0.633758 |
2fb95e220c57747bb80510727e9784a02bd37271 | 8,337 | py | Python | depth/models/decode_heads/decode_head.py | zhyever/Monocular-Depth-Estimation-Toolbox | c591b9711321450387ffa7322ec1db9a340347c2 | [
"Apache-2.0"
] | 21 | 2022-03-12T01:42:05.000Z | 2022-03-31T17:01:45.000Z | depth/models/decode_heads/decode_head.py | zhyever/Monocular-Depth-Estimation-Toolbox | c591b9711321450387ffa7322ec1db9a340347c2 | [
"Apache-2.0"
] | 2 | 2022-03-29T10:50:33.000Z | 2022-03-30T10:40:53.000Z | depth/models/decode_heads/decode_head.py | zhyever/Monocular-Depth-Estimation-Toolbox | c591b9711321450387ffa7322ec1db9a340347c2 | [
"Apache-2.0"
] | 3 | 2022-03-26T11:52:44.000Z | 2022-03-30T21:24:16.000Z |
import mmcv
import copy
import torch
import numpy as np
import torch.nn as nn
from abc import ABCMeta, abstractmethod
from mmcv.runner import BaseModule, auto_fp16, force_fp32
from depth.ops import resize
from depth.models.builder import build_loss
class DepthBaseDecodeHead(BaseModule, metaclass=ABCMeta):
"""Base class for BaseDecodeHead.
Args:
in_channels (List): Input channels.
channels (int): Channels after modules, before conv_depth.
conv_cfg (dict|None): Config of conv layers. Default: None.
act_cfg (dict): Config of activation layers.
Default: dict(type='ReLU')
loss_decode (dict): Config of decode loss.
Default: dict(type='SigLoss').
sampler (dict|None): The config of depth map sampler.
Default: None.
align_corners (bool): align_corners argument of F.interpolate.
Default: False.
min_depth (int): Min depth in dataset setting.
Default: 1e-3.
max_depth (int): Max depth in dataset setting.
Default: None.
norm_cfg (dict|None): Config of norm layers.
Default: None.
classify (bool): Whether predict depth in a cls.-reg. manner.
Default: False.
n_bins (int): The number of bins used in cls. step.
Default: 256.
bins_strategy (str): The discrete strategy used in cls. step.
Default: 'UD'.
norm_strategy (str): The norm strategy on cls. probability
distribution. Default: 'linear'
scale_up (str): Whether predict depth in a scale-up manner.
Default: False.
"""
def __init__(self,
in_channels,
channels=96,
conv_cfg=None,
act_cfg=dict(type='ReLU'),
loss_decode=dict(
type='SigLoss',
valid_mask=True,
loss_weight=10),
sampler=None,
align_corners=False,
min_depth=1e-3,
max_depth=None,
norm_cfg=None,
classify=False,
n_bins=256,
bins_strategy='UD',
norm_strategy='linear',
scale_up=False,
):
super(DepthBaseDecodeHead, self).__init__()
self.in_channels = in_channels
self.channels = channels
self.conv_cfg = conv_cfg
self.act_cfg = act_cfg
self.loss_decode = build_loss(loss_decode)
self.align_corners = align_corners
self.min_depth = min_depth
self.max_depth = max_depth
self.norm_cfg = norm_cfg
self.classify = classify
self.n_bins = n_bins
self.scale_up = scale_up
if self.classify:
assert bins_strategy in ["UD", "SID"], "Support bins_strategy: UD, SID"
assert norm_strategy in ["linear", "softmax", "sigmoid"], "Support norm_strategy: linear, softmax, sigmoid"
self.bins_strategy = bins_strategy
self.norm_strategy = norm_strategy
self.softmax = nn.Softmax(dim=1)
self.conv_depth = nn.Conv2d(channels, n_bins, kernel_size=3, padding=1, stride=1)
else:
self.conv_depth = nn.Conv2d(channels, 1, kernel_size=3, padding=1, stride=1)
self.fp16_enabled = False
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
def extra_repr(self):
"""Extra repr."""
s = f'align_corners={self.align_corners}'
return s
@auto_fp16()
@abstractmethod
def forward(self, inputs):
"""Placeholder of forward function."""
pass
@auto_fp16()
@abstractmethod
def forward(self, inputs, img_metas):
"""Placeholder of forward function."""
pass
def forward_train(self, img, inputs, img_metas, depth_gt, train_cfg):
"""Forward function for training.
Args:
inputs (list[Tensor]): List of multi-level img features.
img_metas (list[dict]): List of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys see
`depth/datasets/pipelines/formatting.py:Collect`.
depth_gt (Tensor): GT depth
train_cfg (dict): The training config.
Returns:
dict[str, Tensor]: a dictionary of loss components
"""
depth_pred = self.forward(inputs, img_metas)
losses = self.losses(depth_pred, depth_gt)
log_imgs = self.log_images(img[0], depth_pred[0], depth_gt[0], img_metas[0])
losses.update(**log_imgs)
return losses
def forward_test(self, inputs, img_metas, test_cfg):
"""Forward function for testing.
Args:
inputs (list[Tensor]): List of multi-level img features.
img_metas (list[dict]): List of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys see
`depth/datasets/pipelines/formatting.py:Collect`.
test_cfg (dict): The testing config.
Returns:
Tensor: Output depth map.
"""
return self.forward(inputs, img_metas)
def depth_pred(self, feat):
"""Prediction each pixel."""
if self.classify:
logit = self.conv_depth(feat)
if self.bins_strategy == 'UD':
bins = torch.linspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device)
elif self.bins_strategy == 'SID':
bins = torch.logspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device)
# following Adabins, default linear
if self.norm_strategy == 'linear':
logit = torch.relu(logit)
eps = 0.1
logit = logit + eps
logit = logit / logit.sum(dim=1, keepdim=True)
elif self.norm_strategy == 'softmax':
logit = torch.softmax(logit, dim=1)
elif self.norm_strategy == 'sigmoid':
logit = torch.sigmoid(logit)
logit = logit / logit.sum(dim=1, keepdim=True)
output = torch.einsum('ikmn,k->imn', [logit, bins]).unsqueeze(dim=1)
else:
if self.scale_up:
output = self.sigmoid(self.conv_depth(feat)) * self.max_depth
else:
output = self.relu(self.conv_depth(feat)) + self.min_depth
return output
@force_fp32(apply_to=('depth_pred', ))
def losses(self, depth_pred, depth_gt):
"""Compute depth loss."""
loss = dict()
depth_pred = resize(
input=depth_pred,
size=depth_gt.shape[2:],
mode='bilinear',
align_corners=self.align_corners,
warning=False)
loss['loss_depth'] = self.loss_decode(
depth_pred,
depth_gt)
return loss
def log_images(self, img_path, depth_pred, depth_gt, img_meta):
show_img = copy.deepcopy(img_path.detach().cpu().permute(1, 2, 0))
show_img = show_img.numpy().astype(np.float32)
show_img = mmcv.imdenormalize(show_img,
img_meta['img_norm_cfg']['mean'],
img_meta['img_norm_cfg']['std'],
img_meta['img_norm_cfg']['to_rgb'])
show_img = np.clip(show_img, 0, 255)
show_img = show_img.astype(np.uint8)
show_img = show_img[:, :, ::-1]
show_img = show_img.transpose(0, 2, 1)
show_img = show_img.transpose(1, 0, 2)
depth_pred = depth_pred / torch.max(depth_pred)
depth_gt = depth_gt / torch.max(depth_gt)
depth_pred_color = copy.deepcopy(depth_pred.detach().cpu())
depth_gt_color = copy.deepcopy(depth_gt.detach().cpu())
return {"img_rgb": show_img, "img_depth_pred": depth_pred_color, "img_depth_gt": depth_gt_color} | 37.895455 | 119 | 0.576706 |
0c47abae1de6896b639ba6cf4f9db767abde3433 | 22,338 | py | Python | salt/states/pkgrepo.py | dmyerscough/salt | d7b19ab64f0695568f78c12b4ba209e033903804 | [
"Apache-2.0"
] | null | null | null | salt/states/pkgrepo.py | dmyerscough/salt | d7b19ab64f0695568f78c12b4ba209e033903804 | [
"Apache-2.0"
] | null | null | null | salt/states/pkgrepo.py | dmyerscough/salt | d7b19ab64f0695568f78c12b4ba209e033903804 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Management of APT/RPM package repos
===================================
Package repositories for APT-based and RPM-based distros(openSUSE/SUSE, CentOS/Fedora/Redhat) can be managed with
these states. Here is some example SLS:
.. code-block:: yaml
base:
pkgrepo.managed:
- humanname: CentOS-$releasever - Base
- mirrorlist: http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os
- comments:
- 'http://mirror.centos.org/centos/$releasever/os/$basearch/'
- gpgcheck: 1
- gpgkey: file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-6
.. code-block:: yaml
base:
pkgrepo.managed:
- humanname: Logstash PPA
- name: deb http://ppa.launchpad.net/wolfnet/logstash/ubuntu precise main
- dist: precise
- file: /etc/apt/sources.list.d/logstash.list
- keyid: 28B04E4A
- keyserver: keyserver.ubuntu.com
- require_in:
- pkg: logstash
pkg.latest:
- name: logstash
- refresh: True
.. code-block:: yaml
base:
pkgrepo.managed:
- humanname: deb-multimedia
- name: deb http://www.deb-multimedia.org stable main
- file: /etc/apt/sources.list.d/deb-multimedia.list
- key_url: salt://deb-multimedia/files/marillat.pub
.. code-block:: yaml
base:
pkgrepo.managed:
- humanname: Google Chrome
- name: deb http://dl.google.com/linux/chrome/deb/ stable main
- dist: stable
- file: /etc/apt/sources.list.d/chrome-browser.list
- require_in:
- pkg: google-chrome-stable
- gpgcheck: 1
- key_url: https://dl-ssl.google.com/linux/linux_signing_key.pub
.. code-block:: yaml
base:
pkgrepo.managed:
- ppa: wolfnet/logstash
pkg.latest:
- name: logstash
- refresh: True
.. _bug: https://bugs.launchpad.net/ubuntu/+source/software-properties/+bug/1249080
.. note::
On Ubuntu systems, the ``python-software-properties`` package should be
installed for better support of PPA repositories. To check if this package
is installed, run ``dpkg -l python-software-properties``.
Also, some Ubuntu releases have a bug_ in their
``python-software-properties`` package, a missing dependency on pycurl, so
``python-pycurl`` will need to be manually installed if it is not present
once ``python-software-properties`` is installed.
On Ubuntu & Debian systems, the ``python-apt`` package is required to be
installed. To check if this package is installed, run ``dpkg -l python-apt``.
``python-apt`` will need to be manually installed if it is not present.
'''
# Import Python libs
from __future__ import absolute_import
import sys
# Import salt libs
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.modules.aptpkg import _strip_uri
from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
import salt.utils
import salt.utils.files
import salt.utils.pkg.deb
import salt.utils.pkg.rpm
import salt.utils.versions
def __virtual__():
'''
Only load if modifying repos is available for this package type
'''
return 'pkg.mod_repo' in __salt__
def managed(name, ppa=None, **kwargs):
'''
This state manages software package repositories. Currently, :mod:`yum
<salt.modules.yumpkg>`, :mod:`apt <salt.modules.aptpkg>`, and :mod:`zypper
<salt.modules.zypper>` repositories are supported.
**YUM/DNF/ZYPPER-BASED SYSTEMS**
.. note::
One of ``baseurl`` or ``mirrorlist`` below is required. Additionally,
note that this state is not presently capable of managing more than one
repo in a single repo file, so each instance of this state will manage
a single repo file containing the configuration for a single repo.
name
This value will be used in two ways: Firstly, it will be the repo ID,
as seen in the entry in square brackets (e.g. ``[foo]``) for a given
repo. Secondly, it will be the name of the file as stored in
/etc/yum.repos.d (e.g. ``/etc/yum.repos.d/foo.conf``).
enabled : True
Whether or not the repo is enabled. Can be specified as True/False or
1/0.
disabled : False
Included to reduce confusion due to APT's use of the ``disabled``
argument. If this is passed for a YUM/DNF/Zypper-based distro, then the
reverse will be passed as ``enabled``. For example passing
``disabled=True`` will assume ``enabled=False``.
humanname
This is used as the "name" value in the repo file in
``/etc/yum.repos.d/`` (or ``/etc/zypp/repos.d`` for SUSE distros).
baseurl
The URL to a yum repository
mirrorlist
A URL which points to a file containing a collection of baseurls
comments
Sometimes you want to supply additional information, but not as
enabled configuration. Anything supplied for this list will be saved
in the repo configuration with a comment marker (#) in front.
Additional configuration values seen in repo files, such as ``gpgkey`` or
``gpgcheck``, will be used directly as key-value pairs. For example:
.. code-block:: yaml
foo:
pkgrepo.managed:
- humanname: Personal repo for foo
- baseurl: https://mydomain.tld/repo/foo/$releasever/$basearch
- gpgkey: file:///etc/pki/rpm-gpg/foo-signing-key
- gpgcheck: 1
**APT-BASED SYSTEMS**
ppa
On Ubuntu, you can take advantage of Personal Package Archives on
Launchpad simply by specifying the user and archive name. The keyid
will be queried from launchpad and everything else is set
automatically. You can override any of the below settings by simply
setting them as you would normally. For example:
.. code-block:: yaml
logstash-ppa:
pkgrepo.managed:
- ppa: wolfnet/logstash
ppa_auth
For Ubuntu PPAs there can be private PPAs that require authentication
to access. For these PPAs the username/password can be passed as an
HTTP Basic style username/password combination.
.. code-block:: yaml
logstash-ppa:
pkgrepo.managed:
- ppa: wolfnet/logstash
- ppa_auth: username:password
name
On apt-based systems this must be the complete entry as it would be
seen in the sources.list file. This can have a limited subset of
components (i.e. 'main') which can be added/modified with the
``comps`` option.
.. code-block:: yaml
precise-repo:
pkgrepo.managed:
- name: deb http://us.archive.ubuntu.com/ubuntu precise main
.. note::
The above example is intended as a more readable way of configuring
the SLS, it is equivalent to the following:
.. code-block:: yaml
'deb http://us.archive.ubuntu.com/ubuntu precise main':
pkgrepo.managed
disabled : False
Toggles whether or not the repo is used for resolving dependencies
and/or installing packages.
enabled : True
Included to reduce confusion due to yum/dnf/zypper's use of the
``enabled`` argument. If this is passed for an APT-based distro, then
the reverse will be passed as ``disabled``. For example, passing
``enabled=False`` will assume ``disabled=False``.
architectures
On apt-based systems, architectures can restrict the available
architectures that the repository provides (e.g. only amd64).
architectures should be a comma-separated list.
comps
On apt-based systems, comps dictate the types of packages to be
installed from the repository (e.g. main, nonfree, ...). For
purposes of this, comps should be a comma-separated list.
file
The filename for the .list that the repository is configured in.
It is important to include the full-path AND make sure it is in
a directory that APT will look in when handling packages
dist
This dictates the release of the distro the packages should be built
for. (e.g. unstable). This option is rarely needed.
keyid
The KeyID of the GPG key to install. This option also requires
the ``keyserver`` option to be set.
keyserver
This is the name of the keyserver to retrieve gpg keys from. The
``keyid`` option must also be set for this option to work.
key_url
URL to retrieve a GPG key from. Allows the usage of ``http://``,
``https://`` as well as ``salt://``.
.. note::
Use either ``keyid``/``keyserver`` or ``key_url``, but not both.
consolidate : False
If set to ``True``, this will consolidate all sources definitions to the
sources.list file, cleanup the now unused files, consolidate components
(e.g. main) for the same URI, type, and architecture to a single line,
and finally remove comments from the sources.list file. The consolidate
will run every time the state is processed. The option only needs to be
set on one repo managed by salt to take effect.
clean_file : False
If set to ``True``, empty the file before config repo
.. note::
Use with care. This can be dangerous if multiple sources are
configured in the same file.
.. versionadded:: 2015.8.0
refresh : True
If set to ``False`` this will skip refreshing the apt package database
on debian based systems.
refresh_db : True
.. deprecated:: Oxygen
Use ``refresh`` instead.
require_in
Set this to a list of pkg.installed or pkg.latest to trigger the
running of apt-get update prior to attempting to install these
packages. Setting a require in the pkg state will not work for this.
'''
if 'refresh_db' in kwargs:
salt.utils.versions.warn_until(
'Neon',
'The \'refresh_db\' argument to \'pkg.mod_repo\' has been '
'renamed to \'refresh\'. Support for using \'refresh_db\' will be '
'removed in the Neon release of Salt.'
)
kwargs['refresh'] = kwargs.pop('refresh_db')
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if 'pkg.get_repo' not in __salt__:
ret['result'] = False
ret['comment'] = 'Repo management not implemented on this platform'
return ret
if 'key_url' in kwargs and ('keyid' in kwargs or 'keyserver' in kwargs):
ret['result'] = False
ret['comment'] = 'You may not use both "keyid"/"keyserver" and ' \
'"key_url" argument.'
if 'repo' in kwargs:
ret['result'] = False
ret['comment'] = ('\'repo\' is not a supported argument for this '
'state. The \'name\' argument is probably what was '
'intended.')
return ret
enabled = kwargs.pop('enabled', None)
disabled = kwargs.pop('disabled', None)
if enabled is not None and disabled is not None:
ret['result'] = False
ret['comment'] = 'Only one of enabled/disabled is allowed'
return ret
elif enabled is None and disabled is None:
# If neither argument was passed we assume the repo will be enabled
enabled = True
repo = name
os_family = __grains__['os_family'].lower()
if __grains__['os'] in ('Ubuntu', 'Mint'):
if ppa is not None:
# overload the name/repo value for PPAs cleanly
# this allows us to have one code-path for PPAs
try:
repo = ':'.join(('ppa', ppa))
except TypeError:
repo = ':'.join(('ppa', str(ppa)))
kwargs['disabled'] = not salt.utils.is_true(enabled) \
if enabled is not None \
else salt.utils.is_true(disabled)
elif os_family in ('redhat', 'suse'):
if 'humanname' in kwargs:
kwargs['name'] = kwargs.pop('humanname')
if 'name' not in kwargs:
# Fall back to the repo name if humanname not provided
kwargs['name'] = repo
kwargs['enabled'] = not salt.utils.is_true(disabled) \
if disabled is not None \
else salt.utils.is_true(enabled)
elif os_family == 'nilinuxrt':
# opkg is the pkg virtual
kwargs['enabled'] = not salt.utils.is_true(disabled) \
if disabled is not None \
else salt.utils.is_true(enabled)
for kwarg in _STATE_INTERNAL_KEYWORDS:
kwargs.pop(kwarg, None)
try:
pre = __salt__['pkg.get_repo'](
repo,
ppa_auth=kwargs.get('ppa_auth', None)
)
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = \
'Failed to examine repo \'{0}\': {1}'.format(name, exc)
return ret
# This is because of how apt-sources works. This pushes distro logic
# out of the state itself and into a module that it makes more sense
# to use. Most package providers will simply return the data provided
# it doesn't require any "specialized" data massaging.
if 'pkg.expand_repo_def' in __salt__:
sanitizedkwargs = __salt__['pkg.expand_repo_def'](repo=repo, **kwargs)
else:
sanitizedkwargs = kwargs
if os_family == 'debian':
repo = _strip_uri(repo)
if pre:
for kwarg in sanitizedkwargs:
if kwarg not in pre:
if kwarg == 'enabled':
# On a RedHat-based OS, 'enabled' is assumed to be true if
# not explicitly set, so we don't need to update the repo
# if it's desired to be enabled and the 'enabled' key is
# missing from the repo definition
if os_family == 'redhat':
if not salt.utils.is_true(sanitizedkwargs[kwarg]):
break
else:
break
else:
break
elif kwarg == 'comps':
if sorted(sanitizedkwargs[kwarg]) != sorted(pre[kwarg]):
break
elif kwarg == 'line' and os_family == 'debian':
# split the line and sort everything after the URL
sanitizedsplit = sanitizedkwargs[kwarg].split()
sanitizedsplit[3:] = sorted(sanitizedsplit[3:])
reposplit, _, pre_comments = \
[x.strip() for x in pre[kwarg].partition('#')]
reposplit = reposplit.split()
reposplit[3:] = sorted(reposplit[3:])
if sanitizedsplit != reposplit:
break
if 'comments' in kwargs:
post_comments = \
salt.utils.pkg.deb.combine_comments(kwargs['comments'])
if pre_comments != post_comments:
break
elif kwarg == 'comments' and os_family == 'redhat':
precomments = salt.utils.pkg.rpm.combine_comments(pre[kwarg])
kwargcomments = salt.utils.pkg.rpm.combine_comments(
sanitizedkwargs[kwarg])
if precomments != kwargcomments:
break
else:
if os_family in ('redhat', 'suse') \
and any(isinstance(x, bool) for x in
(sanitizedkwargs[kwarg], pre[kwarg])):
# This check disambiguates 1/0 from True/False
if salt.utils.is_true(sanitizedkwargs[kwarg]) != \
salt.utils.is_true(pre[kwarg]):
break
else:
if str(sanitizedkwargs[kwarg]) != str(pre[kwarg]):
break
else:
ret['result'] = True
ret['comment'] = ('Package repo \'{0}\' already configured'
.format(name))
return ret
if __opts__['test']:
ret['comment'] = (
'Package repo \'{0}\' will be configured. This may cause pkg '
'states to behave differently than stated if this action is '
'repeated without test=True, due to the differences in the '
'configured repositories.'.format(name)
)
return ret
# empty file before configure
if kwargs.get('clean_file', False):
with salt.utils.files.fopen(kwargs['file'], 'w'):
pass
try:
if os_family == 'debian':
__salt__['pkg.mod_repo'](repo, saltenv=__env__, **kwargs)
else:
__salt__['pkg.mod_repo'](repo, **kwargs)
except Exception as exc:
# This is another way to pass information back from the mod_repo
# function.
ret['result'] = False
ret['comment'] = \
'Failed to configure repo \'{0}\': {1}'.format(name, exc)
return ret
try:
post = __salt__['pkg.get_repo'](
repo,
ppa_auth=kwargs.get('ppa_auth', None)
)
if pre:
for kwarg in sanitizedkwargs:
if post.get(kwarg) != pre.get(kwarg):
change = {'new': post[kwarg],
'old': pre.get(kwarg)}
ret['changes'][kwarg] = change
else:
ret['changes'] = {'repo': repo}
ret['result'] = True
ret['comment'] = 'Configured package repo \'{0}\''.format(name)
except Exception as exc:
ret['result'] = False
ret['comment'] = \
'Failed to confirm config of repo \'{0}\': {1}'.format(name, exc)
# Clear cache of available packages, if present, since changes to the
# repositories may change the packages that are available.
if ret['changes']:
sys.modules[
__salt__['test.ping'].__module__
].__context__.pop('pkg._avail', None)
return ret
def absent(name, **kwargs):
'''
This function deletes the specified repo on the system, if it exists. It
is essentially a wrapper around pkg.del_repo.
name
The name of the package repo, as it would be referred to when running
the regular package manager commands.
**UBUNTU-SPECIFIC OPTIONS**
ppa
On Ubuntu, you can take advantage of Personal Package Archives on
Launchpad simply by specifying the user and archive name.
.. code-block:: yaml
logstash-ppa:
pkgrepo.absent:
- ppa: wolfnet/logstash
ppa_auth
For Ubuntu PPAs there can be private PPAs that require authentication
to access. For these PPAs the username/password can be specified. This
is required for matching if the name format uses the ``ppa:`` specifier
and is private (requires username/password to access, which is encoded
in the URI).
.. code-block:: yaml
logstash-ppa:
pkgrepo.absent:
- ppa: wolfnet/logstash
- ppa_auth: username:password
keyid
If passed, then the GPG key corresponding to the passed KeyID will also
be removed.
keyid_ppa : False
If set to ``True``, the GPG key's ID will be looked up from
ppa.launchpad.net and removed, and the ``keyid`` argument will be
ignored.
.. note::
This option will be disregarded unless the ``ppa`` argument is
present.
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if 'ppa' in kwargs and __grains__['os'] in ('Ubuntu', 'Mint'):
name = kwargs.pop('ppa')
if not name.startswith('ppa:'):
name = 'ppa:' + name
remove_key = any(kwargs.get(x) is not None
for x in ('keyid', 'keyid_ppa'))
if remove_key and 'pkg.del_repo_key' not in __salt__:
ret['result'] = False
ret['comment'] = \
'Repo key management is not implemented for this platform'
return ret
try:
repo = __salt__['pkg.get_repo'](
name, ppa_auth=kwargs.get('ppa_auth', None)
)
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = \
'Failed to configure repo \'{0}\': {1}'.format(name, exc)
return ret
if not repo:
ret['comment'] = 'Package repo {0} is absent'.format(name)
ret['result'] = True
return ret
if __opts__['test']:
ret['comment'] = ('Package repo \'{0}\' will be removed. This may '
'cause pkg states to behave differently than stated '
'if this action is repeated without test=True, due '
'to the differences in the configured repositories.'
.format(name))
return ret
try:
__salt__['pkg.del_repo'](repo=name, **kwargs)
except (CommandExecutionError, SaltInvocationError) as exc:
ret['result'] = False
ret['comment'] = exc.strerror
return ret
repos = __salt__['pkg.list_repos']()
if name not in repos:
ret['changes']['repo'] = name
ret['comment'] = 'Removed repo {0}'.format(name)
if not remove_key:
ret['result'] = True
else:
try:
removed_keyid = __salt__['pkg.del_repo_key'](name, **kwargs)
except (CommandExecutionError, SaltInvocationError) as exc:
ret['result'] = False
ret['comment'] += ', but failed to remove key: {0}'.format(exc)
else:
ret['result'] = True
ret['changes']['keyid'] = removed_keyid
ret['comment'] += ', and keyid {0}'.format(removed_keyid)
else:
ret['result'] = False
ret['comment'] = 'Failed to remove repo {0}'.format(name)
return ret
| 35.855538 | 113 | 0.587206 |
c332defb94592349973d1f5758f26a5c40713b66 | 34,772 | py | Python | scrapydd/main.py | zanachka/scrapydd | ba7854a69e756e5d0e6b5f835d8f36fe57f7f7c2 | [
"Apache-2.0"
] | 5 | 2017-06-13T05:07:57.000Z | 2021-02-26T16:16:49.000Z | scrapydd/main.py | zanachka/scrapydd | ba7854a69e756e5d0e6b5f835d8f36fe57f7f7c2 | [
"Apache-2.0"
] | 7 | 2019-04-15T01:34:30.000Z | 2020-09-16T02:41:00.000Z | scrapydd/main.py | zanachka/scrapydd | ba7854a69e756e5d0e6b5f835d8f36fe57f7f7c2 | [
"Apache-2.0"
] | 3 | 2017-06-28T09:58:28.000Z | 2020-07-09T08:57:57.000Z | # -*-coding:utf8-*-
"""
Main entrypoint of scrapydd server
"""
# pylint: disable=missing-module-docstring
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
import ssl
import os.path
import sys
import logging
import subprocess
import signal
import json
# pylint: disable=deprecated-module
from optparse import OptionParser
import chardet
import tornado.ioloop
import tornado.web
import tornado.template
import tornado.httpserver
import tornado.netutil
from tornado.web import authenticated
from sqlalchemy import desc
from .models import Session, Project, Spider, SpiderExecutionQueue
from .models import init_database, HistoricalJob
from .models import session_scope, SpiderSettings, WebhookJob
from .models import SpiderParameter, SpiderFigure
from .schedule import SchedulerManager, build_scheduler
from .nodes import NodeManager
from .config import Config
from .process import fork_processes
from .project import ProjectManager
from .exceptions import SpiderNotFound, ProjectNotFound
from .exceptions import InvalidCronExpression
from .webhook import WebhookDaemon
from .daemonize import daemonize
from .workspace import RunnerFactory
from .cluster import ClusterNode
from .spiderplugin import SpiderPluginManager
from .ssl_gen import SSLCertificateGenerator
from .settting import SpiderSettingLoader
from .security import NoAuthenticationProvider
from .handlers.auth import LogoutHandler, SigninHandler
from .handlers.base import AppBaseHandler, RestBaseHandler
from .handlers import admin, profile, rest
from .handlers.node import NodesHandler, ExecuteNextHandler
from .handlers.node import ExecuteCompleteHandler, NodeHeartbeatHandler
from .handlers.node import JobStartHandler, RegisterNodeHandler, JobEggHandler
from .handlers import webui
from .handlers import node as node_handlers
from .storage import ProjectStorage
from .scripts.upgrade_filestorage import upgrade as upgrade_project_storage
from .scripts.upgrade_projectpackage import upgrade as upgrade_project_package
from .handlers.api import apply as api_apply
from .grpcservice.server import start as start_grpc_server
LOGGER = logging.getLogger(__name__)
BASE_DIR = os.path.dirname(__file__)
class ProjectList(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self):
with session_scope() as session:
projects = self.project_manager.get_projects(session, self.current_user)
response_data = {'projects': [{'id': item.id} for item in projects]}
self.write(response_data)
class SpiderInstanceHandler2(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self, project_id, spider_id):
session = self.session
spider = self.project_manager.get_spider(session, self.current_user, project_id, spider_id)
project = spider.project
jobs = session.query(HistoricalJob) \
.filter(HistoricalJob.spider_id == spider.id) \
.order_by(desc(HistoricalJob.start_time)) \
.slice(0, 100)
running_jobs = session.query(SpiderExecutionQueue) \
.filter(SpiderExecutionQueue.spider_id == spider.id) \
.order_by(desc(SpiderExecutionQueue.update_time))
jobs_count = session.query(HistoricalJob) \
.filter(HistoricalJob.spider_id == spider.id).count()
webhook_jobs = session.query(WebhookJob).filter_by(spider_id=spider.id)
context = {}
context['spider'] = spider
context['project'] = project
context['jobs'] = jobs
context['running_jobs'] = running_jobs
context['settings'] = session.query(SpiderSettings)\
.filter_by(spider_id=spider.id)\
.order_by(SpiderSettings.setting_key)
context['webhook_jobs'] = webhook_jobs
spider_parameters = session.query(SpiderParameter) \
.filter_by(spider_id=spider.id) \
.order_by(SpiderParameter.parameter_key)
context['spider_parameters'] = {
parameter.parameter_key: parameter.value for parameter in
spider_parameters}
self.render("spider.html", jobs_count=jobs_count, **context)
session.close()
class SpiderEggHandler(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self, spider_id):
session = Session()
spider = session.query(Spider).filter_by(id=spider_id).first()
project_storage = ProjectStorage(
self.settings.get('project_storage_dir'),
project=spider.project)
_, f_egg = project_storage.get_egg()
self.write(f_egg.read())
session.close()
class ProjectSpiderEggHandler(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self, project_id, spider_id):
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user, project_id, spider_id)
project = spider.project
project_storage = ProjectStorage(
self.settings.get('project_storage_dir'),
project)
_, f_egg = project_storage.get_egg()
self.write(f_egg.read())
session.close()
class SpiderListHandler(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self):
session = Session()
projects = self.project_manager.get_projects(session, self.current_user)
spiders = []
for project in projects:
for spider in project.spiders:
spiders.append(spider)
self.render("spiderlist.html", spiders=spiders)
session.close()
class SpiderTriggersHandler(AppBaseHandler):
# pylint: disable=arguments-differ
scheduler_manager = None
def initialize(self, scheduler_manager=None):
assert isinstance(scheduler_manager, SchedulerManager)
assert scheduler_manager
super(SpiderTriggersHandler, self).initialize()
self.scheduler_manager = scheduler_manager
@authenticated
def get(self, project_id, spider_id):
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user, project_id, spider_id)
context = {'spider': spider, 'errormsg': None}
self.render("spidercreatetrigger.html", **context)
@authenticated
def post(self, project_id, spider_id):
cron = self.get_argument('cron')
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user, project_id, spider_id)
project = spider.project
try:
self.scheduler_manager.add_schedule(project, spider, cron)
return self.redirect('/projects/%s/spiders/%s' % (
project.id, spider.id))
except InvalidCronExpression:
context = {'spider': spider,
'errormsg': 'Invalid cron expression.'}
return self.render("spidercreatetrigger.html", **context)
class DeleteSpiderTriggersHandler(AppBaseHandler):
# pylint: disable=arguments-differ
scheduler_manager = None
def initialize(self, scheduler_manager=None):
super(DeleteSpiderTriggersHandler, self).initialize()
self.scheduler_manager = scheduler_manager
@authenticated
def post(self, project_id, spider_id, trigger_id):
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user, project_id, spider_id)
self.scheduler_manager \
.remove_schedule(spider, trigger_id)
self.redirect('/projects/%s/spiders/%s' % (spider.project.id, spider.id))
class DeleteSpiderJobHandler(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def post(self, project_id, spider_id, job_id):
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
job = session.query(HistoricalJob) \
.filter_by(spider_id=spider.id, id=job_id).first()
if not job:
return self.set_status(404, 'job not found.')
project_storage = ProjectStorage(
self.settings.get('project_storage_dir'), spider.project)
project_storage.delete_job_data(job)
session.delete(job)
session.commit()
return self.write('Success.')
class JobsHandler(AppBaseHandler):
# pylint: disable=arguments-differ
def initialize(self, scheduler_manager):
super(JobsHandler, self).initialize()
self.scheduler_manager = scheduler_manager
@authenticated
def get(self):
with session_scope() as session:
pending, running, finished = self.scheduler_manager.jobs(session)
context = {
'pending': pending,
'running': running,
'finished': finished,
}
self.render("jobs.html", **context)
class SpiderWebhookHandler(AppBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self, project_id, spider_id):
session = Session()
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
webhook_setting = session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='webhook_payload').first()
if webhook_setting:
self.write(webhook_setting.value)
@authenticated
def post(self, project_id, spider_id):
payload_url = self.get_argument('payload_url')
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
webhook_setting = session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='webhook_payload').first()
if webhook_setting is None:
# no existing row
webhook_setting = SpiderSettings()
webhook_setting.spider_id = spider.id
webhook_setting.setting_key = 'webhook_payload'
webhook_setting.value = payload_url
session.add(webhook_setting)
session.commit()
@authenticated
def put(self, project_id, spider_id):
self.post(project_id, spider_id)
@authenticated
def delete(self, project_id, spider_id):
with session_scope() as session:
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='webhook_payload').delete()
session.commit()
class SpiderSettingsHandler(AppBaseHandler):
# pylint: disable=arguments-differ
available_settings = {
'concurrency': r'\d+',
'timeout': r'\d+',
'webhook_payload': '.*',
'webhook_batch_size': r'\d+',
'tag': '.*',
'extra_requirements': '.*'
}
@authenticated
def get(self, project_id, spider_id):
session = self.session
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
project = spider.project
job_settings = {
setting.setting_key: setting.value for setting in
session.query(SpiderSettings).filter_by(spider_id=spider.id)}
# default setting values
if 'concurrency' not in job_settings:
job_settings['concurrency'] = 1
if 'timeout' not in job_settings:
job_settings['timeout'] = 3600
if 'tag' not in job_settings or job_settings['tag'] is None:
job_settings['tag'] = ''
context = {}
context['settings'] = job_settings
context['project'] = project
context['spider'] = spider
spider_parameters = session.query(SpiderParameter) \
.filter_by(spider_id=spider.id) \
.order_by(SpiderParameter.parameter_key)
context['spider_parameters'] = spider_parameters
figure_json = '{}'
if spider.figure and spider.figure.text:
figure_json = spider.figure.text
context['figure_json'] = figure_json
return self.render('spidersettings.html', **context)
@authenticated
def post(self, project_id, spider_id):
session = self.session
spider = self.project_manager.get_spider(session, self.current_user,
project_id, spider_id)
project = spider.project
setting_concurrency_value = self.get_body_argument('concurrency',
'1')
setting_concurrency = session.query(SpiderSettings).filter_by(
spider_id=spider.id,
setting_key='concurrency').first()
if not setting_concurrency:
setting_concurrency = SpiderSettings()
setting_concurrency.spider_id = spider.id
setting_concurrency.setting_key = 'concurrency'
setting_concurrency.value = setting_concurrency_value
session.add(setting_concurrency)
setting_timeout_value = self.get_body_argument('timeout', '3600')
setting_timeout = session.query(SpiderSettings).filter_by(
spider_id=spider.id,
setting_key='timeout').first()
if not setting_timeout:
setting_timeout = SpiderSettings()
setting_timeout.spider_id = spider.id
setting_timeout.setting_key = 'timeout'
setting_timeout.value = setting_timeout_value
session.add(setting_timeout)
setting_webhook_payload_value = self.get_body_argument(
'webhook_payload', '')
setting_webhook_payload = session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='webhook_payload').first()
if not setting_webhook_payload:
setting_webhook_payload = SpiderSettings()
setting_webhook_payload.spider_id = spider.id
setting_webhook_payload.setting_key = 'webhook_payload'
setting_webhook_payload.value = setting_webhook_payload_value
session.add(setting_webhook_payload)
setting_webhook_batch_size_value = self.get_body_argument(
'webhook_batch_size', '')
setting_webhook_batch_size = session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='webhook_batch_size').first()
if not setting_webhook_batch_size:
setting_webhook_batch_size = SpiderSettings()
setting_webhook_batch_size.spider_id = spider.id
setting_webhook_batch_size.setting_key = 'webhook_batch_size'
setting_webhook_batch_size.value = setting_webhook_batch_size_value
session.add(setting_webhook_batch_size)
setting_tag_value = self.get_body_argument('tag', '').strip()
setting_tag_value = None if setting_tag_value == '' else \
setting_tag_value
setting_tag = session.query(SpiderSettings).filter_by(
spider_id=spider.id,
setting_key='tag').first()
if not setting_tag:
setting_tag = SpiderSettings()
setting_tag.spider_id = spider.id
setting_tag.setting_key = 'tag'
setting_tag.value = setting_tag_value
session.add(setting_tag)
setting_extra_requirements_value = self.get_body_argument(
'extra_requirements', '').strip()
setting_extra_requirements_value = None if \
setting_extra_requirements_value == '' else \
setting_extra_requirements_value
setting_extra_requirements = session.query(SpiderSettings) \
.filter_by(spider_id=spider.id,
setting_key='extra_requirements').first()
if not setting_extra_requirements:
setting_extra_requirements = SpiderSettings()
setting_extra_requirements.spider_id = spider.id
setting_extra_requirements.setting_key = 'extra_requirements'
setting_extra_requirements.value = setting_extra_requirements_value
session.add(setting_extra_requirements)
spider_parameter_keys = self.get_body_arguments(
'SpiderParameterKey')
spider_parameter_values = self.get_body_arguments(
'SpiderParameterValue')
session.query(SpiderParameter) \
.filter_by(spider_id=spider.id) \
.delete()
figure_json_text = self.get_body_argument('figure_json')
if figure_json_text:
figure = spider.figure or SpiderFigure(spider_id=spider.id)
try:
parsed_figure = json.loads(figure_json_text)
figure.text = figure_json_text
session.add(figure)
except Exception as ex:
LOGGER.error(ex)
for i, spider_parameter_key in enumerate(spider_parameter_keys):
spider_parameter_key = spider_parameter_key.strip()
if spider_parameter_key == '':
continue
spider_parameter_value = spider_parameter_values[i]
spider_parameter = session.query(SpiderParameter).filter_by(
spider_id=spider.id,
parameter_key=spider_parameter_key).first()
if not spider_parameter:
spider_parameter = SpiderParameter()
spider_parameter.spider_id = spider.id
spider_parameter.parameter_key = spider_parameter_key
spider_parameter.value = spider_parameter_value
session.add(spider_parameter)
session.commit()
self.redirect(webui.spider_url(self, spider))
class CACertHandler(RestBaseHandler):
# pylint: disable=arguments-differ
def get(self):
self.write(open('keys/ca.crt', 'rb').read())
self.set_header('Content-Type', 'application/cert')
class ListProjectVersionsHandler(RestBaseHandler):
# pylint: disable=arguments-differ
@authenticated
def get(self):
try:
project = self.get_argument('project')
except tornado.web.MissingArgumentError as ex:
return self.write({'status': 'error', 'message': ex.arg_name})
project_storage = ProjectStorage(
self.settings.get('project_storage_dir'), project)
versions = project_storage.list_egg_versions()
return self.write({'status': 'ok', 'versions': versions})
def make_app(scheduler_manager, node_manager, webhook_daemon=None,
authentication_providers=None,
debug=False,
enable_authentication=False,
secret_key='',
enable_node_registration=False,
project_storage_dir='.',
default_project_storage_version=2,
runner_factory=None,
project_manager=None,
spider_plugin_manager=None):
"""
@type scheduler_manager SchedulerManager
@type node_manager NodeManager
@type webhook_daemon: WebhookDaemon
@type authentication_provider: AuthenticationProvider
:return: tornado.web.Application
"""
if project_manager is None:
project_manager = ProjectManager(runner_factory, project_storage_dir,
scheduler_manager,
default_project_storage_version)
settings = dict(cookie_secret=secret_key,
login_url="/signin",
static_path=os.path.join(BASE_DIR, 'static'),
template_path=os.path.join(BASE_DIR, 'templates'),
xsrf_cookies=True,
debug=debug,
enable_authentication=enable_authentication,
scheduler_manager=scheduler_manager,
enable_node_registration=enable_node_registration,
project_storage_dir=project_storage_dir,
default_project_storage_version=
default_project_storage_version,
runner_factory=runner_factory,
project_manager=project_manager,
spider_plugin_manager=spider_plugin_manager,
)
if authentication_providers is None:
authentication_providers = []
if len(authentication_providers) == 0:
authentication_providers.append(NoAuthenticationProvider())
settings['authentication_providers'] = authentication_providers
settings['node_manager'] = node_manager
settings['ui_methods'] = {
'spider_url': webui.spider_url
}
application = tornado.web.Application([
(r"/", webui.MainHandler),
(r'/signin', SigninHandler),
(r'/logout', LogoutHandler),
(r'/uploadproject', webui.UploadProject),
# scrapyd apis
(r'/addversion.json', rest.AddVersionHandler),
(r'/delproject.json', rest.DeleteProjectHandler,
{'scheduler_manager': scheduler_manager}),
(r'/listversions.json', ListProjectVersionsHandler),
(r'/schedule.json', rest.ScheduleHandler,
{'scheduler_manager': scheduler_manager}),
(r'/add_schedule.json', rest.AddScheduleHandler,
{'scheduler_manager': scheduler_manager}),
(r'/projects', ProjectList),
(r'/spiders', SpiderListHandler),
(r'/spiders/(\d+)/egg', SpiderEggHandler),
(r'/projects/(\w+)/spiders/(\w+)', SpiderInstanceHandler2),
(r'/projects/(\w+)/spiders/(\w+)/triggers', SpiderTriggersHandler,
{'scheduler_manager': scheduler_manager}),
(r'/projects/(\w+)/spiders/(\w+)/triggers/(\w+)/delete',
DeleteSpiderTriggersHandler,
{'scheduler_manager': scheduler_manager}),
(r'/projects/(\w+)/spiders/(\w+)/jobs/(\w+)/delete',
DeleteSpiderJobHandler),
(r'/projects/(\w+)/spiders/(\w+)/settings', SpiderSettingsHandler),
(r'/projects/(\w+)/spiders/(\w+)/webhook', SpiderWebhookHandler),
(r'^/projects/(\w+)/spiders/(\w+)/run$', webui.RunSpiderHandler,
{'scheduler_manager': scheduler_manager}),
(r'/projects/(\w+)/spiders/(\w+)/egg', ProjectSpiderEggHandler),
(r'/projects/(\w+)/delete$', webui.DeleteProjectHandler),
(r'/projects/(\w+)/settings$', webui.ProjectSettingsHandler),
(r'/projects/(\w+)/package$', webui.ProjectPackageHandler),
(r'/new/project$', webui.NewProject),
(r'/projects/(\w+)$', webui.ProjectInfoHandler),
(r'/profile$', profile.ProfileHomeHandler),
(r'/profile/keys$', profile.ProfileKeysHandler),
(r'/profile/change_password$', profile.ProfileChangepasswordHandler),
(r'/admin$', admin.AdminHomeHandler),
(r'^/admin/nodes$', admin.AdminNodesHandler),
(r'^/admin/nodes/(\w+)/delete$', admin.AdminNodesDeleteHandler),
(r'^/admin/spiderplugins$', admin.AdminPluginsHandler),
(r'^/admin/users$', admin.AdminUsersHandler),
(r'^/admin/users/disable$', admin.AdminDisableUserAjaxHandler),
(r'^/admin/users/enable$', admin.AdminEnableUserAjaxHandler),
(r'^/admin/users/new$', admin.AdminNewUserHandler),
# rest apis
(r'^/api/projects/(\w+)/spiders/(\w+)/jobs/(\w+)', rest.GetProjectJob),
(r'^/api/projects/(\w+)/spiders/(\w+)/jobs/(\w+)/items',
rest.GetProjectJobItems),
(r'^/api/jobs/(\w+)', rest.GetJobHandler),
(r'^/api/jobs/(\w+)/items', rest.GetJobItemsHandler),
# agent node ysing handlers
(r'/executing/next_task', ExecuteNextHandler,
{'scheduler_manager': scheduler_manager}),
(r'/executing/complete', ExecuteCompleteHandler,
{'webhook_daemon': webhook_daemon,
'scheduler_manager': scheduler_manager}),
(r'/nodes', NodesHandler, {'node_manager': node_manager}),
(r'/nodes/register', RegisterNodeHandler),
#(r'/v1/nodes/(\w+)', node_handlers.GetNodeHandler),
(r'/nodes/(\d+)/heartbeat', NodeHeartbeatHandler,
{'node_manager': node_manager,
'scheduler_manager': scheduler_manager}),
(r'/jobs', JobsHandler, {'scheduler_manager': scheduler_manager}),
(r'/jobs/(\w+)/start', JobStartHandler,
{'scheduler_manager': scheduler_manager}),
(r'/jobs/(\w+)/egg', JobEggHandler),
(r'/logs/(\w+)/(\w+)/(\w+).log', webui.LogsHandler),
(r'/items/(\w+)/(\w+)/(\w+).jl', webui.ItemsFileHandler),
(r'/ca.crt', CACertHandler),
(r'/static/(.*)', tornado.web.StaticFileHandler,
{'path': os.path.join(os.path.dirname(__file__), 'static')}),
] + node_handlers.url_patterns, **settings)
api_apply(application)
return application
def check_and_gen_ssl_keys(config):
server_name = config.get('server_name')
if not server_name:
raise Exception('Must specify a server name')
ssl_gen = SSLCertificateGenerator('keys')
try:
ssl_gen.get_ca_key()
ssl_gen.get_ca_cert()
except IOError:
LOGGER.info('ca cert not exist, creating new cert and key.')
ssl_gen.gen_ca('scrapydd', 'scrapydd')
host_name = config.get('server_name')
if not os.path.exists(os.path.join('keys', '%s.crt' % host_name)):
LOGGER.info('server cert not exist, creating new.')
alt_names = [s.strip() for s in config.get('dns_alt_names').split(',')]
if '' in alt_names:
alt_names.remove('')
ssl_gen.gen_cert(host_name, usage=2, alt_names=alt_names)
def start_server(argv=None):
config = Config()
logging.debug('starting server with argv : %s', argv)
is_debug = config.getboolean('debug')
init_database()
upgrade_project_storage()
# upgrade_project_package()
bind_address = config.get('bind_address')
bind_port = config.getint('bind_port')
try:
https_port = int(config.get('https_port')) if config.get(
'https_port') else None
except ValueError:
LOGGER.warning(
'https_port is configured, but it is not int, %s',
config.get('https_port'))
https_port = None
if https_port:
https_sockets = tornado.netutil.bind_sockets(https_port, bind_address)
sockets = tornado.netutil.bind_sockets(bind_port, bind_address)
# task_id is current process identifier when forked processes, start with 0
task_id = None
if not is_debug:
if not sys.platform.startswith('win'):
task_id = fork_processes(config.getint('fork_proc_count'))
else:
LOGGER.warning((
'Windows platform does not support forking process,'
'running in single process mode.'))
cluster_sync_obj = None
if task_id is not None and config.get('cluster_bind_address'):
cluster_node = ClusterNode(task_id, config)
cluster_sync_obj = cluster_node.sync_obj
enable_authentication = config.getboolean('enable_authentication')
scheduler = build_scheduler()
scheduler_manager = SchedulerManager(config=config,
syncobj=cluster_sync_obj,
scheduler=scheduler)
scheduler_manager.init()
node_manager = NodeManager(scheduler_manager,
enable_authentication=enable_authentication)
node_manager.init()
webhook_daemon = WebhookDaemon(config, SpiderSettingLoader(), scheduler_manager)
webhook_daemon.init()
runner_factory = RunnerFactory(config)
project_storage_dir = config.get('project_storage_dir')
default_project_storage_version = config.getint(
'default_project_storage_version')
spider_plugin_manager = SpiderPluginManager()
project_manager = ProjectManager(runner_factory, project_storage_dir,
scheduler_manager,
default_project_storage_version)
secret_key = config.get('secret_key')
app = make_app(scheduler_manager, node_manager, webhook_daemon,
debug=is_debug,
enable_authentication=enable_authentication,
secret_key=secret_key,
enable_node_registration=config.getboolean(
'enable_node_registration', False),
project_storage_dir=project_storage_dir,
default_project_storage_version=config.getint(
'default_project_storage_version'),
runner_factory=runner_factory,
spider_plugin_manager=spider_plugin_manager,
project_manager=project_manager)
server = tornado.httpserver.HTTPServer(app)
server.add_sockets(sockets)
check_and_gen_ssl_keys(config)
if https_port:
if config.getboolean('client_validation'):
ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
else:
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_ctx.load_cert_chain(
os.path.join('keys', "%s.crt" % config.get('server_name')),
os.path.join('keys', "%s.key" % config.get('server_name')))
ssl_ctx.load_verify_locations(cafile='keys/ca.crt')
ssl_ctx.check_hostname = False
httpsserver = tornado.httpserver.HTTPServer(app, ssl_options=ssl_ctx)
httpsserver.add_sockets(https_sockets)
LOGGER.info('starting https server on %s:%s', bind_address, https_port)
ioloop = tornado.ioloop.IOLoop.current()
try:
grpc_server = start_grpc_server(node_manager,
scheduler_manager,
project_manager)
scheduler.start()
ioloop.start()
except KeyboardInterrupt:
grpc_server.stop(True)
ioloop.stop()
def run(argv=None):
if argv is None:
argv = sys.argv
import asyncio
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
parser = OptionParser(prog='scrapydd server')
parser.add_option('--daemon', action='store_true',
help='run scrapydd server in daemon mode')
parser.add_option('--pidfile',
help='pid file will be created when daemon started')
opts, _ = parser.parse_args(argv)
pidfile = opts.pidfile or 'scrapydd-server.pid'
config = Config()
init_logging(config)
if opts.daemon:
print('starting daemon.')
daemon = Daemon(pidfile=pidfile)
daemon.start()
sys.exit(0)
else:
start_server()
def init_logging(config):
log_dir = 'logs'
if not os.path.exists(log_dir):
os.makedirs(log_dir)
logger = logging.getLogger()
file_handler = logging.handlers.TimedRotatingFileHandler(
os.path.join(log_dir, 'scrapydd-server.log'), when='D',
backupCount=7)
error_handler = logging.handlers.TimedRotatingFileHandler(
os.path.join(log_dir, 'scrapydd-error.log'), when='D',
backupCount=30)
console_handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
error_handler.setFormatter(formatter)
error_handler.setLevel(logging.ERROR)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.addHandler(error_handler)
if config.getboolean('debug'):
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
access_log_logger = logging.getLogger('tornado.access')
access_log_fh = logging.handlers.TimedRotatingFileHandler(
os.path.join(log_dir, 'scrapydd-access.log'), when='D',
backupCount=30)
access_log_logger.addHandler(access_log_fh)
access_log_logger.setLevel(logging.INFO)
class Daemon():
def __init__(self, pidfile):
self.pidfile = pidfile
self.subprocess_p = None
self.pid = 0
def start_subprocess(self):
argv = sys.argv
argv.remove('--daemon')
pargs = argv
env = os.environ.copy()
self.subprocess_p = subprocess.Popen(pargs, env=env)
signal.signal(signal.SIGINT, self.on_signal)
signal.signal(signal.SIGTERM, self.on_signal)
self.subprocess_p.wait()
def read_pidfile(self):
try:
with open(self.pidfile, 'r') as f_pid:
return int(f_pid.readline())
except IOError:
return None
def try_remove_pidfile(self):
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
def on_signal(self, signum, frame):
LOGGER.info('receive signal %d closing, frame: %s', signum, frame)
if self.subprocess_p:
self.subprocess_p.terminate()
self.try_remove_pidfile()
tornado.ioloop.IOLoop.instance().stop()
def start(self):
# signal.signal(signal.SIGINT, self.on_signal)
# signal.signal(signal.SIGTERM, self.on_signal)
daemonize(pidfile=self.pidfile)
# self.start_subprocess()
start_server()
self.try_remove_pidfile()
if __name__ == "__main__":
run()
| 40.669006 | 104 | 0.629414 |
bb0dc8e667d2bc81824173fd7a6adf6924a46ead | 1,504 | py | Python | script/popolamento_database/fill_pubblica_e_contiene1.py | dariocurreri/bdm | 04f1bf9e6f100d0952c5d1c5734976158f04cc1f | [
"MIT"
] | 1 | 2022-02-17T20:35:04.000Z | 2022-02-17T20:35:04.000Z | script/popolamento_database/fill_pubblica_e_contiene1.py | dariocurreri/bdm | 04f1bf9e6f100d0952c5d1c5734976158f04cc1f | [
"MIT"
] | null | null | null | script/popolamento_database/fill_pubblica_e_contiene1.py | dariocurreri/bdm | 04f1bf9e6f100d0952c5d1c5734976158f04cc1f | [
"MIT"
] | 1 | 2021-05-31T11:28:47.000Z | 2021-05-31T11:28:47.000Z | from database_connection import database_connection
import numpy as np
def creazione_pubblica():
#"bdm_unipa", "bdm_unina", "bdm_unito""
for db in ["bdm_unimi"]:
sql = database_connection(db)
personale_ricerca = list(sql.execute_query("SELECT id_personale_ricerca, anagrafica_personale FROM personale_ricerca"))
pubblicazione = list(sql.execute_query("SELECT id_pubblicazione FROM pubblicazione"))
for pubb in pubblicazione:
personale = list()
for _ in range(0, np.random.randint(1, 4)):
persona = personale_ricerca[np.random.randint(0, len(personale_ricerca))]
if persona not in personale:
personale.append(persona)
for persona in personale:
dipartimento = str(sql.execute_query("SELECT dipartimento FROM personale WHERE cf =\'" + str(persona[1]) + "\'" ))
dipartimento = dipartimento[2:dipartimento.find(",")]
pubblica = (pubb[0], persona[0])
query = "INSERT INTO pubblica VALUES ("
for value in pubblica:
query += "'" + str(value) + "',"
sql.execute_query(query[:-1] + ")")
contiene = (int(dipartimento), pubb[0])
query = "INSERT INTO contiene_1 VALUES ("
for value in contiene:
query += "'" + str(value) + "',"
sql.execute_query(query[:-1] + ")")
creazione_pubblica()
| 45.575758 | 130 | 0.579122 |
87521c8cbf321163e46d4eef4e947e083956f439 | 2,591 | py | Python | test/core/gen2/local/test_transformer.py | bcdev/xcube | 9d275ef3baef8fbcea5c1fbbfb84c3d0164aecd3 | [
"MIT"
] | 97 | 2018-06-26T13:02:55.000Z | 2022-03-26T21:03:13.000Z | test/core/gen2/local/test_transformer.py | bcdev/xcube | 9d275ef3baef8fbcea5c1fbbfb84c3d0164aecd3 | [
"MIT"
] | 524 | 2018-11-09T12:00:08.000Z | 2022-03-31T17:00:13.000Z | test/core/gen2/local/test_transformer.py | bcdev/xcube | 9d275ef3baef8fbcea5c1fbbfb84c3d0164aecd3 | [
"MIT"
] | 15 | 2019-07-09T08:46:03.000Z | 2022-02-07T18:47:34.000Z | import unittest
import xarray as xr
from xcube.core.gen2 import CubeConfig
from xcube.core.gen2.local.transformer import CubeIdentity
from xcube.core.gen2.local.transformer import CubeTransformer
from xcube.core.gen2.local.transformer import TransformedCube
from xcube.core.gen2.local.transformer import transform_cube
from xcube.core.gridmapping import GridMapping
from xcube.core.new import new_cube
CALLBACK_MOCK_URL = 'https://xcube-gen.test/api/v1/jobs/tomtom/iamajob/callback'
class CubeIdentityTest(unittest.TestCase):
def test_it(self):
cube = new_cube(variables=dict(a=0.5))
gm = GridMapping.from_dataset(cube)
cube_config = CubeConfig()
identity = CubeIdentity()
t_cube = identity.transform_cube(cube,
gm,
cube_config)
self.assertIsInstance(t_cube, tuple)
self.assertEqual(3, len(t_cube))
self.assertIs(cube, t_cube[0])
self.assertIs(gm, t_cube[1])
self.assertIs(cube_config, t_cube[2])
class MyTiler(CubeTransformer):
def transform_cube(self,
cube: xr.Dataset,
gm: GridMapping,
cube_config: CubeConfig) -> TransformedCube:
cube = cube.chunk(dict(lon=cube_config.tile_size[0],
lat=cube_config.tile_size[1]))
cube_config = cube_config.drop_props('tile_size')
return cube, gm, cube_config
class TransformCubeTest(unittest.TestCase):
def test_non_empty_cube(self):
cube = new_cube(variables=dict(a=0.5))
gm = GridMapping.from_dataset(cube)
cube_config = CubeConfig(tile_size=180)
t_cube = transform_cube((cube, gm, cube_config), MyTiler())
self.assertIsInstance(t_cube, tuple)
self.assertEqual(3, len(t_cube))
cube2, gm2, cc2 = t_cube
self.assertIsNot(cube, cube2)
self.assertEqual(((5,), (180,), (180, 180)), cube2.a.chunks)
self.assertIs(gm, gm2)
self.assertEqual(None, cc2.tile_size)
def test_empty_cube(self):
cube = new_cube()
gm = GridMapping.from_dataset(cube)
cube_config = CubeConfig(tile_size=180)
t_cube = transform_cube((cube, gm, cube_config), MyTiler())
self.assertIsInstance(t_cube, tuple)
self.assertEqual(3, len(t_cube))
cube2, gm2, cc2 = t_cube
self.assertIs(cube, cube2)
self.assertIs(gm, gm2)
self.assertIs(cube_config, cc2)
self.assertEqual((180, 180), cc2.tile_size)
| 34.092105 | 80 | 0.641837 |
6387ae9114bd089cc040b1492a4c3a0c74f1045d | 1,039 | py | Python | {{cookiecutter.project_name}}/config/common/logging.py | pyfs/cc_django | 19aa1b8c3d5f5f71c7c676349df42a141330a313 | [
"BSD-3-Clause"
] | 3 | 2020-12-07T00:40:53.000Z | 2021-01-26T00:46:45.000Z | {{cookiecutter.project_name}}/config/common/logging.py | pyfs/cc_django | 19aa1b8c3d5f5f71c7c676349df42a141330a313 | [
"BSD-3-Clause"
] | 2 | 2020-12-07T08:41:46.000Z | 2020-12-08T03:37:43.000Z | {{cookiecutter.project_name}}/config/common/logging.py | pyfs/cc_django | 19aa1b8c3d5f5f71c7c676349df42a141330a313 | [
"BSD-3-Clause"
] | null | null | null | """
推荐容器化运行,所以不建议日志落盘,直接输出到 stdout 即可;
如果有日志落盘要求,可直接取消如下注释即可;
"""
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s [%(asctime)s] %(pathname)s %(lineno)d %(funcName)s %(process)d %(thread)d \n \t %(message)s \n',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
'simple': {
'format': '%(levelname)s %(message)s \n'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
},
'django.request': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
'root': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
},
}
}
| 24.162791 | 133 | 0.419634 |
bed9af05038405cc2109561be6cf418029df9a97 | 1,626 | py | Python | test/bench_lazy.py | facebookresearch/loop_tool | 8ee5a63f21e261a017daa91663db2011d3db4bc9 | [
"MIT"
] | 57 | 2021-07-08T17:00:20.000Z | 2022-03-18T13:44:31.000Z | test/bench_lazy.py | facebookresearch/loop_tool | 8ee5a63f21e261a017daa91663db2011d3db4bc9 | [
"MIT"
] | 6 | 2021-07-08T17:43:29.000Z | 2022-02-21T17:02:57.000Z | test/bench_lazy.py | facebookresearch/loop_tool | 8ee5a63f21e261a017daa91663db2011d3db4bc9 | [
"MIT"
] | 7 | 2021-07-09T18:19:09.000Z | 2022-03-18T13:39:26.000Z | import loop_tool_py as lt
import numpy as np
import time
L = 1024
if "cuda" in lt.backends():
lt.set_default_hardware("cuda")
lt.set_default_backend("cuda")
L *= 1024
X = lt.Tensor(L)
Y = lt.Tensor(L)
X.set(np.random.randn(L))
Y.set(np.random.randn(L))
N = lt.Symbol("N")
Z = X.to(N) + Y.to(N)
assert np.allclose(Z.numpy(), X.numpy() + Y.numpy(), atol=0.0001, rtol=0.0001)
def bench(loop_tree, warmup, iters):
X = lt.Tensor(L)
Y = lt.Tensor(L)
X.set(np.random.randn(L))
Y.set(np.random.randn(L))
N = lt.Symbol("N")
Z = X.to(N) + Y.to(N)
Z.set(loop_tree)
for i in range(warmup):
Z = X.to(N) + Y.to(N)
Z.resolve()
t1 = time.time()
for i in range(iters):
Z = X.to(N) + Y.to(N)
Z.resolve()
t2 = time.time()
print(f"{iters / (t2 - t1):.2f} iters/sec")
def split(loop, parallel_size, inner_size):
assert loop.tail == 0
s = loop.size // (parallel_size * inner_size)
t = loop.size % (parallel_size * inner_size)
return [
(loop.var, (s, t)),
(loop.var, (parallel_size, 0)),
(loop.var, (inner_size, 0)),
]
loop_tree = Z.loop_tree
ir = loop_tree.ir
for l in loop_tree.loops:
if loop_tree.trivially_parallel(l):
loop = loop_tree.loop(l)
for n in ir.nodes:
ir.set_order(n, split(loop, 128, 4))
ir.disable_reuse(n, 2)
loop_tree = lt.LoopTree(ir)
# parallelize the outermost loops
loop_tree.annotate(loop_tree.loops[0], "parallel")
loop_tree.annotate(loop_tree.loops[1], "parallel")
Z.set(loop_tree)
print(Z.loop_tree)
bench(loop_tree, 10, 1000)
| 22.583333 | 78 | 0.603321 |
33c9d9caa650b1e5957bd0273f25686e75bf6c4b | 1,483 | py | Python | python/getLog.py | ruz76/lora | 264e0fe14beba70e7e8ed4f4a66524f887a182d8 | [
"MIT"
] | null | null | null | python/getLog.py | ruz76/lora | 264e0fe14beba70e7e8ed4f4a66524f887a182d8 | [
"MIT"
] | 4 | 2020-07-19T15:16:33.000Z | 2022-02-26T23:57:38.000Z | python/getLog.py | ruz76/lora | 264e0fe14beba70e7e8ed4f4a66524f887a182d8 | [
"MIT"
] | null | null | null | import math
import datetime
import sys
import mysql.connector
import random
from config import *
import pyproj
import ast
def getPayload(payload):
gtws = ['eui-b827ebfffe998292',
'eui-b827ebfffed3b23f',
'eui-b827ebfffe411ace',
'eui-b827ebfffe13b290',
'eui-b827ebfffe71f386'
]
euis = payload.split("&")
euis.sort()
output = ""
for eui in euis:
if eui.split(";")[0] in gtws:
output += ";" + eui.split(";")[1]
return output
def getDeltas(deltas):
# {eui-b827ebfffe13b290: datetime.timedelta(0, 0, 120), eui-b827ebfffe71f386: datetime.timedelta(0, 0, 102), eui-b827ebfffe998292: datetime.timedelta(0, 0, 100), eui-b827ebfffe411ace: datetime.timedelta(0, 0, 383348)}
deltas = deltas.replace("{", "").replace("}", "").replace(" datetime.timedelta(0, 0, ", "").replace(")", "").replace(" ", "")
deltas = deltas.split(",")
deltas.sort()
output = ""
for delta in deltas:
output += ";" + delta.split(":")[1]
return output
def printLog():
last = "SELECT * FROM sensor_id4_log"
# print(last)
mycursor = mydb.cursor()
mycursor.execute(last)
records = mycursor.fetchall()
for row in records:
payload = getPayload(row[5])
deltas = getDeltas(row[6])
print(str(row[0]) + ";" + str(row[1]) + ";" + str(row[2]) + ";" + str(row[3]) + ";" + str(row[4]) + payload + deltas)
mydb = getConnection()
printLog()
mydb.close()
| 30.265306 | 221 | 0.597438 |
f9da5f75a9dc94dddb55c689e910f51ea361f045 | 7,537 | py | Python | plugins/image.py | FlashTek/data-viewer | f81bec33afae182decbb2b6f083b5551f965ec41 | [
"MIT"
] | 1 | 2018-12-19T19:30:36.000Z | 2018-12-19T19:30:36.000Z | plugins/image.py | FlashTek/data-viewer | f81bec33afae182decbb2b6f083b5551f965ec41 | [
"MIT"
] | null | null | null | plugins/image.py | FlashTek/data-viewer | f81bec33afae182decbb2b6f083b5551f965ec41 | [
"MIT"
] | null | null | null | # Copyright (c) 2018 Roland Zimmermann
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from core.plugins import Visualizer, PluginRegistry, Parser, ParsedData
import numpy as np
import cv2
from PyQt5 import QtGui, QtWidgets
@PluginRegistry.visualizer
class ImageVisualizer(Visualizer):
def __init__(self):
self._autoresize_checkbox = None
@staticmethod
def get_ui_name():
return "Image"
@staticmethod
def validate_input_format(parsed_data_type):
pass
def show_settings(self, container_widget):
return False
def _show_data(self, container_widget):
data = None
try:
if isinstance(self._data, list):
try:
self._data = np.array(self._data)
except:
pass
if isinstance(self._data, np.ndarray):
self._data = np.squeeze(self._data)
if len(self._data.shape) < 2:
data = np.empty_like(self._data, dtype=object)
for i in range(data.size):
data.itemset(i, str(self._data.item(i)))
else:
raise ValueError("Array has rank > 2. This cannot be displayed.")
# to make vectors appear as (-1, 1) matrices
data = data.reshape(len(data), -1)
else:
return False
except:
return False
table = QtWidgets.QTableWidget()
table.setContentsMargins(0,0,0,0)
table.setRowCount(data.shape[0])
table.setColumnCount(data.shape[1])
# table.horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.Stretch)
# table.horizontalHeader().setStretchLastSection(True)
for i, row in enumerate(data):
for j, val in enumerate(row):
table.setItem(i, j, QtWidgets.QTableWidgetItem(val))
# no resize, scroll viewer
container_widget.layout().addWidget(table, 0,0,1,1)
size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
size_policy.setHorizontalStretch(1)
size_policy.setVerticalStretch(1)
table.setSizePolicy(size_policy)
table.show()
return True
def visualize_data(self, data, container_widget):
self._data = data
self._container_widget = container_widget
return self._show_data(container_widget)
@PluginRegistry.visualizer
class ImageVisualizer(Visualizer):
def __init__(self):
pass
@staticmethod
def get_ui_name():
return "Image"
@staticmethod
def validate_input_format(parsed_data_type):
return parsed_data_type == ParsedDataImage
def _show_data(self, container_widget):
if self._parsed_data is None:
return False
cv_image = self._parsed_data.get_data()
image = QtGui.QImage(cv_image, cv_image.shape[1], cv_image.shape[0], cv_image.shape[1]*3, QtGui.QImage.Format_RGB888)
pixmap = QtGui.QPixmap(image)
label = QtWidgets.QLabel()
label.setPixmap(pixmap)
label.setContentsMargins(0,0,0,0)
if self._autoresize_checkbox.isChecked():
# auto resize, no scroll viewer
container_widget.layout().addWidget(label, 0,0,1,1)
size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
size_policy.setHorizontalStretch(1)
size_policy.setVerticalStretch(1)
label.setSizePolicy(size_policy)
label.setScaledContents(True)
label.show()
else:
# no resize, scroll viewer
scroll_viewer = QtWidgets.QScrollArea()
scroll_viewer.setWidget(label)
scroll_viewer.show()
container_widget.layout().addWidget(scroll_viewer, 0,0,1,1)
size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
size_policy.setHorizontalStretch(1)
size_policy.setVerticalStretch(1)
scroll_viewer.setSizePolicy(size_policy)
label.setSizePolicy(size_policy)
label.setScaledContents(True)
label.show()
return True
def visualize_data(self, parsed_data, container_widget):
self._parsed_data = parsed_data
self._container_widget = container_widget
return self._show_data(container_widget)
@PluginRegistry.parser
class ImageParser(Parser):
def __init__(self):
self._autoresize_checkbox = None
def validate_input_format(self, shape, size, dtype):
return True
def parse(self, data):
value = None
try:
if isinstance(data, np.ndarray):
if data.dtype.type is np.string_:
value = np.fromstring(data, dtype=np.uint8)
if data.dtype.type is np.uint8:
value = data
elif isinstance(data, bytes):
value = np.fromstring(data, dtype=np.uint8)
finally:
if value is None:
return None
# decode the image
image = cv2.imdecode(value, -1)
if image is None:
return None
image_shape = np.asarray(image).shape
if len(image_shape) == 3 and image_shape[-1] == 3:
# convert BGR to RGB
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
else:
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
return ParsedDataImage(image)
def show_settings(self, container_widget):
if self._autoresize_checkbox:
del self._autoresize_checkbox
self._autoresize_checkbox = QtWidgets.QCheckBox()
self._autoresize_checkbox.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
container_widget.layout().addWidget(self._autoresize_checkbox)
self._autoresize_checkbox.setText("Resize image")
# self._autoresize_checkbox.toggled.connect(lambda _: self._show_data(container_widget))
return True
@staticmethod
def get_ui_name():
return "Image"
class ParsedDataImage(ParsedData):
def __init__(self, raw_value):
if isinstance(raw_value, np.ndarray):
self._value = image
else:
self._value = None
def get_data(self):
return self._value
| 34.893519 | 125 | 0.644952 |
7553d58c82d635c2cf53e6374a8bf9ff4917eff3 | 814 | py | Python | linked_list/0021_merge_two_sorted_lists/0021_merge_two_sorted_lists.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 6 | 2019-09-16T01:50:44.000Z | 2020-09-17T08:52:25.000Z | linked_list/0021_merge_two_sorted_lists/0021_merge_two_sorted_lists.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | null | null | null | linked_list/0021_merge_two_sorted_lists/0021_merge_two_sorted_lists.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 4 | 2020-02-07T12:43:16.000Z | 2021-04-11T06:38:55.000Z | # Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def mergeTwoLists(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
curr = dummy = ListNode(0)
while l1 and l2:
if l1.val < l2.val:
curr.next = l1
l1 = l1.next
else:
curr.next = l2
l2 = l2.next
curr = curr.next
curr.next = l1 or l2
return dummy.next
l1 = ListNode(1)
l1.next = ListNode(2)
l1.next.next = ListNode(4)
l2 = ListNode(1)
l2.next = ListNode(3)
l2.next.next = ListNode(4)
head = Solution().mergeTwoLists(l1, l2)
print(head.next.next.val) | 22.611111 | 39 | 0.528256 |
092656c817ac6f79f949ce10695aa8ebe0e47635 | 3,300 | py | Python | kivymd/slidingpanel.py | AndreMiras/KivyMD | dfd69f6da73718e2a596d29c33c9a257e58343c8 | [
"MIT"
] | 64 | 2018-09-17T20:14:39.000Z | 2022-02-19T21:39:33.000Z | kivymd/slidingpanel.py | AndreMiras/KivyMD | dfd69f6da73718e2a596d29c33c9a257e58343c8 | [
"MIT"
] | 13 | 2018-09-22T17:09:22.000Z | 2020-09-02T14:11:17.000Z | kivymd/slidingpanel.py | AndreMiras/KivyMD | dfd69f6da73718e2a596d29c33c9a257e58343c8 | [
"MIT"
] | 34 | 2018-09-20T20:19:47.000Z | 2022-02-20T10:35:18.000Z | # -*- coding: utf-8 -*-
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.lang import Builder
from kivy.metrics import dp
from kivy.properties import (BooleanProperty, ListProperty, NumericProperty,
OptionProperty, StringProperty)
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.relativelayout import RelativeLayout
Builder.load_string("""
#: import Window kivy.core.window.Window
<SlidingPanel>
orientation: 'vertical'
size_hint_x: None
width: dp(320)
x: -1 * self.width if self.side == 'left' else Window.width
<PanelShadow>
canvas:
Color:
rgba: root.color
Rectangle:
size: root.size
""")
class PanelShadow(BoxLayout):
color = ListProperty([0, 0, 0, 0])
class SlidingPanel(BoxLayout):
anim_length_close = NumericProperty(0.3)
anim_length_open = NumericProperty(0.3)
animation_t_open = StringProperty('out_sine')
animation_t_close = StringProperty('out_sine')
side = OptionProperty('left', options=['left', 'right'])
_open = False
def __init__(self, **kwargs):
super(SlidingPanel, self).__init__(**kwargs)
self.shadow = PanelShadow()
Clock.schedule_once(lambda x: Window.add_widget(self.shadow, 89), 0)
Clock.schedule_once(lambda x: Window.add_widget(self, 90), 0)
def toggle(self):
Animation.stop_all(self, 'x')
Animation.stop_all(self.shadow, 'color')
if self._open:
if self.side == 'left':
target_x = -1 * self.width
else:
target_x = Window.width
sh_anim = Animation(duration=self.anim_length_open,
t=self.animation_t_open,
color=[0, 0, 0, 0])
sh_anim.start(self.shadow)
self._get_main_animation(duration=self.anim_length_close,
t=self.animation_t_close,
x=target_x,
is_closing=True).start(self)
self._open = False
else:
if self.side == 'left':
target_x = 0
else:
target_x = Window.width - self.width
Animation(duration=self.anim_length_open, t=self.animation_t_open,
color=[0, 0, 0, 0.5]).start(self.shadow)
self._get_main_animation(duration=self.anim_length_open,
t=self.animation_t_open,
x=target_x,
is_closing=False).start(self)
self._open = True
def _get_main_animation(self, duration, t, x, is_closing):
return Animation(duration=duration, t=t, x=x)
def on_touch_down(self, touch):
# Prevents touch events from propagating to anything below the widget.
super(SlidingPanel, self).on_touch_down(touch)
if self.collide_point(*touch.pos) or self._open:
return True
def on_touch_up(self, touch):
super(SlidingPanel, self).on_touch_up(touch)
if not self.collide_point(touch.x, touch.y) and self._open:
self.toggle()
return True
| 35.106383 | 78 | 0.590606 |
09842da799b2c478bb7391002048a0b400f02ea4 | 731 | py | Python | lektor_hidden_attachments.py | nyergler/lektor-hidden-attachments | fa25f7fa443716e084a8d406bdae9574bc2f6be7 | [
"MIT"
] | null | null | null | lektor_hidden_attachments.py | nyergler/lektor-hidden-attachments | fa25f7fa443716e084a8d406bdae9574bc2f6be7 | [
"MIT"
] | 1 | 2018-05-15T17:00:24.000Z | 2018-05-15T17:00:24.000Z | lektor_hidden_attachments.py | nyergler/lektor-hidden-attachments | fa25f7fa443716e084a8d406bdae9574bc2f6be7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os.path
from lektor.pluginsystem import Plugin
from jinja2 import is_undefined
class HiddenAttachmentsPlugin(Plugin):
name = u'hidden-attachments'
description = u'Hide attachments by default based on file extension.'
def on_before_build(self, source, **extra):
config = self.get_config()
# if we're building an attachment & hidden isn't explicitly set
if (getattr(source, 'is_attachment', False) and
is_undefined(source._data['_hidden'])
):
# see if this extension is hidden by default
ext = os.path.splitext(source.path)[1][1:]
source._data['_hidden'] = config.get('hidden.{}'.format(ext), False)
| 31.782609 | 80 | 0.649795 |
79286bbff19853f6a43a7ecd79c07ee5f0a8bdfa | 1,974 | py | Python | advanced/mathematical_optimization/examples/compare_optimizers_plot.py | negm/scipy-lecture-notes | 7a4decc7127471c269ab64340261e22b08ddd226 | [
"CC-BY-4.0"
] | 419 | 2016-03-05T08:50:48.000Z | 2022-03-24T15:16:46.000Z | advanced/mathematical_optimization/examples/compare_optimizers_plot.py | techeye220/scipy-lecture-notes-zh-CN | cc87204fcc4bd2f4702f7c29c83cb8ed5c94b7d6 | [
"CC-BY-4.0"
] | 5 | 2016-05-21T14:21:12.000Z | 2017-10-06T11:09:48.000Z | advanced/mathematical_optimization/examples/compare_optimizers_plot.py | techeye220/scipy-lecture-notes-zh-CN | cc87204fcc4bd2f4702f7c29c83cb8ed5c94b7d6 | [
"CC-BY-4.0"
] | 233 | 2016-02-13T09:22:57.000Z | 2021-11-11T17:58:44.000Z | """
Plotting the comparison of optimizers
======================================
Plots the results from the comparison of optimizers.
"""
import pickle
import numpy as np
import pylab as pl
results = pickle.load(file('compare_optimizers.pkl'))
#results = pickle.load(file('compare_optimizers_gradients.pkl'))
n_methods = len(results.values()[0]['Rosenbrock '])
n_dims = len(results)
symbols = 'o>*Ds'
pl.figure(1, figsize=(10, 4))
pl.clf()
colors = pl.cm.Spectral(np.linspace(0, 1, n_dims))[:, :3]
method_names = results.values()[0]['Rosenbrock '].keys()
method_names.sort(key=lambda x: x[::-1], reverse=True)
for n_dim_index, ((n_dim, n_dim_bench), color) in enumerate(
zip(sorted(results.items()), colors)):
for (cost_name, cost_bench), symbol in zip(sorted(n_dim_bench.items()),
symbols):
for method_index, method_name, in enumerate(method_names):
this_bench = cost_bench[method_name]
bench = np.mean(this_bench)
pl.semilogy([method_index + .1*n_dim_index, ], [bench, ],
marker=symbol, color=color)
# Create a legend for the problem type
for cost_name, symbol in zip(sorted(n_dim_bench.keys()),
symbols):
pl.semilogy([-10, ], [0, ], symbol, color='.5',
label=cost_name)
pl.xticks(np.arange(n_methods), method_names, size=11)
pl.xlim(-.2, n_methods - .5)
pl.legend(loc='best', numpoints=1, handletextpad=0, prop=dict(size=12),
frameon=False)
pl.ylabel('# function calls (a.u.)')
# Create a second legend for the problem dimensionality
pl.twinx()
for n_dim, color in zip(sorted(results.keys()), colors):
pl.plot([-10, ], [0, ], 'o', color=color,
label='# dim: %i' % n_dim)
pl.legend(loc=(.47, .07), numpoints=1, handletextpad=0, prop=dict(size=12),
frameon=False, ncol=2)
pl.xlim(-.2, n_methods - .5)
pl.xticks(np.arange(n_methods), method_names)
pl.yticks(())
pl.tight_layout()
pl.show()
| 29.029412 | 75 | 0.638298 |
5cc32e1a2ffca7764455b51f51914f0b1a05ec7a | 2,189 | py | Python | tests_tf/test_attack_bundling.py | posenhuang/cleverhans | 5ec82b2b7d594258df5cf1915013797f30ed5983 | [
"MIT"
] | 1 | 2020-08-10T01:49:45.000Z | 2020-08-10T01:49:45.000Z | tests_tf/test_attack_bundling.py | shreyashankar/cleverhans | 0eba0d3a226a022aa3b9090fa17ddcf1cb99d105 | [
"MIT"
] | null | null | null | tests_tf/test_attack_bundling.py | shreyashankar/cleverhans | 0eba0d3a226a022aa3b9090fa17ddcf1cb99d105 | [
"MIT"
] | 1 | 2019-02-26T06:30:31.000Z | 2019-02-26T06:30:31.000Z | import numpy as np
from cleverhans.attack_bundling import AttackConfig
from cleverhans.attack_bundling import Misclassify
from cleverhans.attack_bundling import unfinished_attack_configs
def test_unfinished_attack_configs():
new_work_goal = {}
work_before = {}
run_counts = {}
expected_unfinished = []
expected_finished = []
easy_finished = AttackConfig(None, None)
new_work_goal[easy_finished] = 1
work_before[easy_finished] = np.array([0, 0])
run_counts[easy_finished] = np.array([1, 1])
expected_finished.append(easy_finished)
easy_unfinished = AttackConfig(None, None)
new_work_goal[easy_unfinished] = 1
work_before[easy_unfinished] = np.array([0, 0])
run_counts[easy_unfinished] = np.array([0, 0])
expected_unfinished.append(easy_unfinished)
only_partly_finished = AttackConfig(None, None)
new_work_goal[only_partly_finished] = 1
work_before[only_partly_finished] = np.array([0, 0])
run_counts[only_partly_finished] = np.array([1, 0])
expected_unfinished.append(only_partly_finished)
work_not_new = AttackConfig(None, None)
new_work_goal[work_not_new] = 1
work_before[work_not_new] = np.array([1, 1])
run_counts[work_not_new] = np.array([1, 1])
expected_unfinished.append(work_not_new)
actual_unfinished = unfinished_attack_configs(new_work_goal, work_before,
run_counts)
assert all(e in actual_unfinished for e in expected_unfinished)
assert all(e not in actual_unfinished for e in expected_finished)
def test_misclassify_request_examples():
cfg = AttackConfig(None, None)
goal = Misclassify(new_work_goal={cfg: 1})
correctness = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1], dtype=np.bool)
run_counts = np.array([1, 1, 1, 0, 0, 0, 1, 1, 1, 0], dtype=np.int64)
criteria = {'correctness': correctness}
batch_size = 3
idxs = goal.request_examples(cfg, criteria, {cfg: run_counts}, batch_size)
assert idxs.shape == (batch_size,)
idxs = list(idxs)
for already_misclassified in [0, 2, 4, 6, 8]:
assert already_misclassified not in idxs
for already_run in [1, 7]:
assert already_run not in idxs
for needed in [3, 5, 9]:
assert needed in idxs
| 34.746032 | 76 | 0.728186 |
55a7f77b306d1e95ce435ff5c7ca3f15a05e58d8 | 408 | py | Python | backend/product/migrations/0007_alter_product_image.py | Qlas/ztziwb | 24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03 | [
"MIT"
] | null | null | null | backend/product/migrations/0007_alter_product_image.py | Qlas/ztziwb | 24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03 | [
"MIT"
] | null | null | null | backend/product/migrations/0007_alter_product_image.py | Qlas/ztziwb | 24d9ec804e93d2137c94d5af1fd6e5a67fa0eb03 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.11 on 2022-01-17 19:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0006_alter_product_image'),
]
operations = [
migrations.AlterField(
model_name='product',
name='image',
field=models.ImageField(blank=True, upload_to='uploads/'),
),
]
| 21.473684 | 70 | 0.605392 |
6928363791566af4a6fbd6401688ef3393f21e98 | 5,777 | py | Python | src/main.py | nbsp1221/everytimeless-bot | 5c0c0311088362761bb740d3e285751ac41fb266 | [
"MIT"
] | 3 | 2020-10-02T17:40:14.000Z | 2022-01-08T14:26:40.000Z | src/main.py | nbsp1221/everytimeless-bot | 5c0c0311088362761bb740d3e285751ac41fb266 | [
"MIT"
] | null | null | null | src/main.py | nbsp1221/everytimeless-bot | 5c0c0311088362761bb740d3e285751ac41fb266 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import os
import re
import settings
import threading
import time
from everytime import Everytime
from telegrambot import TelegramBot
boards = []
keywords = []
last = {}
def load_datas():
global boards
global keywords
if not os.path.isfile('datas.json'):
return
f = open('datas.json', 'r', encoding='utf8')
datas = json.loads(f.read())
f.close()
boards = datas['boards']
keywords = datas['keywords']
def save_datas():
f = open('datas.json', 'w', encoding='utf8')
f.write(json.dumps({ 'boards': boards, 'keywords': keywords }))
f.close()
def get_article_info_with_keywords(number):
result = et.show_article(number)
target = (result['title'] + result['content']).lower()
target = re.sub(re.compile('<a.*>'), '', target)
return {
'title': result['title'],
'content': result['content'],
'keywords': [ x for x in keywords if x in target ]
}
def core():
try:
article_group = []
article_list = []
for board in boards:
result = et.show_board(board)
if not board in last:
last[board] = { 'title': result['title'], 'last_article': result['articles'][0] }
continue
for article in result['articles']:
if article <= last[board]['last_article']:
continue
article_info = get_article_info_with_keywords(article)
if len(article_info['keywords']) == 0:
continue
article_list.append('* https://everytime.kr/%d/v/%d - [%s]' % (board, article, ', '.join(article_info['keywords'])))
time.sleep(2)
if len(article_list) > 0:
article_group.append('%s (%d)\n%s' % (last[board]['title'], board, '\n'.join(article_list)))
article_list.clear()
last[board]['last_article'] = result['articles'][0]
time.sleep(2)
if len(article_group) == 0:
return
message = '[ 키워드가 감지되었습니다. ]\n'
message += '\n'
message += '\n\n'.join(article_group)
tb.send_message(message)
except Exception as e:
tb.send_message(str(e))
def loop():
while True:
core()
time.sleep(60)
def handle_help(update, context):
message = '/help - 명령어 확인\n'
message += '/add [board | keyword] <value> - 게시판 또는 키워드 추가\n'
message += '/remove [board | keyword] <value> - 게시판 또는 키워드 삭제\n'
message += '/show - 추가된 목록 확인'
update.message.reply_text(message)
def handle_add(update, context):
try:
if context.args[0] == 'board':
board = int(context.args[1])
if board in boards:
update.message.reply_text('해당 게시판이 이미 존재합니다.')
else:
update.message.reply_text('게시판이 정상적으로 추가되었습니다.')
boards.append(board)
save_datas()
elif context.args[0] == 'keyword':
keyword = context.args[1].lower()
if keyword in keywords:
update.message.reply_text('해당 키워드가 이미 존재합니다.')
else:
update.message.reply_text('키워드가 정상적으로 추가되었습니다.')
keywords.append(keyword)
keywords.sort()
save_datas()
else:
update.message.reply_text('/add [board | keyword] <value> - 게시판 또는 키워드 추가')
except:
update.message.reply_text('/add [board | keyword] <value> - 게시판 또는 키워드 추가')
def handle_remove(update, context):
try:
if context.args[0] == 'board':
board = int(context.args[1])
if board in boards:
update.message.reply_text('게시판이 정상적으로 제거되었습니다.')
boards.remove(board)
del last[board]
save_datas()
else:
update.message.reply_text('해당 게시판이 존재하지 않습니다.')
elif context.args[0] == 'keyword':
keyword = context.args[1].lower()
if keyword in keywords:
update.message.reply_text('키워드가 정상적으로 제거되었습니다.')
keywords.remove(keyword)
save_datas()
else:
update.message.reply_text('해당 키워드가 존재하지 않습니다.')
else:
update.message.reply_text('/remove [board | keyword] <value> - 게시판 또는 키워드 삭제')
except:
update.message.reply_text('/remove [board | keyword] <value> - 게시판 또는 키워드 삭제')
def handle_show(update, context):
for board in boards:
if board in last:
continue
result = et.show_board(board)
last[board] = { 'title': result['title'], 'last_article': result['articles'][0] }
message = '현재 설정된 게시판 목록\n'
message += '* ' + '\n* '.join([ '%s (%s) / last: %d' % (last[x]['title'], x, last[x]['last_article']) for x in last ])
message += '\n\n'
message += '현재 설정된 키워드 목록\n'
message += '[ ' + ', '.join(keywords) + ' ]'
update.message.reply_text(message)
def main():
if not et.login(settings.everytime['id'], settings.everytime['password']):
tb.send_message('로그인 실패! 아이디와 비밀번호를 확인해 주세요.')
return
load_datas()
thread_loop = threading.Thread(target=loop)
thread_loop.daemon = True
thread_loop.start()
tb.send_message('로그인 성공! 주기적으로 게시글을 키워드로 감지해 알림을 보내드리도록 하겠습니다.')
tb.add_command_handler('help', handle_help)
tb.add_command_handler('add', handle_add)
tb.add_command_handler('remove', handle_remove)
tb.add_command_handler('show', handle_show)
tb.start()
if __name__ == '__main__':
et = Everytime()
tb = TelegramBot(settings.telegram_bot['token'], settings.telegram_bot['chat_id'])
# Start
main()
| 29.030151 | 132 | 0.561191 |
02bc3c95e6f61632e26ff2dfc3f3cfe94950de9f | 7,650 | py | Python | memegen/domain/template.py | jacebrowning/memegen-flask | e4e67e76f061fa4e418901031b6086966376b8f3 | [
"MIT"
] | 3 | 2020-09-02T13:11:11.000Z | 2020-12-24T00:41:56.000Z | memegen/domain/template.py | jacebrowning/memegen-flask | e4e67e76f061fa4e418901031b6086966376b8f3 | [
"MIT"
] | 13 | 2020-08-30T21:38:53.000Z | 2020-09-05T03:19:17.000Z | memegen/domain/template.py | jacebrowning/memegen-flask | e4e67e76f061fa4e418901031b6086966376b8f3 | [
"MIT"
] | null | null | null | import os
import hashlib
import shutil
from pathlib import Path
from contextlib import suppress
import tempfile
import requests
from PIL import Image
import log
from .text import Text
DEFAULT_REQUEST_HEADERS = {
'User-Agent': "Googlebot/2.1 (+http://www.googlebot.com/bot.html)",
}
class Template:
"""Blank image to generate a meme."""
DEFAULT = 'default'
EXTENSIONS = ('.png', '.jpg')
SAMPLE_LINES = ["YOUR TEXT", "GOES HERE"]
VALID_LINK_FLAG = '.valid_link.tmp'
MIN_HEIGHT = 240
MIN_WIDTH = 240
def __init__(self, key, name=None, lines=None, aliases=None, link=None,
root=None):
self.key = key
self.name = name or ""
self.lines = lines or [""]
self.aliases = aliases or []
self.link = link or ""
self.root = root or ""
def __str__(self):
return self.key
def __eq__(self, other):
return self.key == other.key
def __ne__(self, other):
return self.key != other.key
def __lt__(self, other):
return self.name < other.name
@property
def dirpath(self):
return os.path.join(self.root, self.key)
@property
def path(self):
return self.get_path()
@property
def default_text(self):
return Text(self.lines)
@property
def default_path(self):
return self.default_text.path or Text.EMPTY
@property
def sample_text(self):
return self.default_text or Text(self.SAMPLE_LINES)
@property
def sample_path(self):
return self.sample_text.path
@property
def aliases_lowercase(self):
return [self.strip(a, keep_special=True) for a in self.aliases]
@property
def aliases_stripped(self):
return [self.strip(a, keep_special=False) for a in self.aliases]
@property
def styles(self):
return sorted(self._styles())
def _styles(self):
"""Yield all template style names."""
for filename in os.listdir(self.dirpath):
name, ext = os.path.splitext(filename.lower())
if ext in self.EXTENSIONS and name != self.DEFAULT:
yield name
@property
def keywords(self):
words = set()
for fields in [self.key, self.name] + self.aliases + self.lines:
for word in fields.lower().replace(Text.SPACE, ' ').split(' '):
if word:
words.add(word)
return words
@staticmethod
def strip(text, keep_special=False):
text = text.lower().strip().replace(' ', '-')
if not keep_special:
for char in ('-', '_', '!', "'"):
text = text.replace(char, '')
return text
def get_path(self, style_or_url=None, *, download=True):
path = None
if style_or_url and '://' in style_or_url:
if download:
path = download_image(style_or_url)
if path is None:
path = self._find_path_for_style(self.DEFAULT)
else:
names = [n.lower() for n in [style_or_url, self.DEFAULT] if n]
path = self._find_path_for_style(*names)
return path
def _find_path_for_style(self, *names):
for name in names:
for extension in self.EXTENSIONS:
path = Path(self.dirpath, name + extension)
with suppress(OSError):
if path.is_file():
return path
return None
def search(self, query):
"""Count the number of times a query exists in relevant fields."""
if query is None:
return -1
count = 0
for field in [self.key, self.name] + self.aliases + self.lines:
count += field.lower().count(query.lower())
return count
def validate(self, validators=None):
if validators is None:
validators = [
self.validate_meta,
self.validate_link,
self.validate_size,
]
for validator in validators:
if not validator():
return False
return True
def validate_meta(self):
if not self.lines:
self._error("has no default lines of text")
return False
if not self.name:
self._error("has no name")
return False
if not self.name[0].isalnum():
self._error(f"name '{self.name}' should start with alphanumeric")
return False
if not self.path:
self._error("has no default image")
return False
return True
def validate_link(self):
if not self.link:
return True
flag = Path(self.dirpath, self.VALID_LINK_FLAG)
with suppress(IOError):
with flag.open() as f:
if f.read() == self.link:
log.info(f"Link already checked: {self.link}")
return True
log.info(f"Checking link {self.link}")
try:
response = requests.head(self.link, timeout=5,
headers=DEFAULT_REQUEST_HEADERS)
except requests.exceptions.ReadTimeout:
log.warning("Connection timed out")
return True # assume URL is OK; it will be checked again
if response.status_code in [403, 429, 503]:
self._warn(f"link is unavailable ({response.status_code})")
elif response.status_code >= 400:
self._error(f"link is invalid ({response.status_code})")
return False
with flag.open('w') as f:
f.write(self.link)
return True
def validate_size(self):
im = Image.open(self.path)
w, h = im.size
if w < self.MIN_WIDTH or h < self.MIN_HEIGHT:
log.error("Image must be at least "
f"{self.MIN_WIDTH}x{self.MIN_HEIGHT} (is {w}x{h})")
return False
return True
def _warn(self, message):
log.warning(f"Template '{self}' " + message)
def _error(self, message):
log.error(f"Template '{self}' " + message)
class Placeholder:
"""Default image for missing templates."""
FALLBACK_PATH = str(Path(__file__)
.parents[1]
.joinpath('static', 'images', 'missing.png'))
path = None
def __init__(self, key):
self.key = key
@classmethod
def get_path(cls, url=None, download=True):
path = None
if url and download:
path = download_image(url)
if path is None:
path = cls.FALLBACK_PATH
return path
def download_image(url):
if not url or '://' not in url:
raise ValueError(f"Not a URL: {url!r}")
path = Path(tempfile.gettempdir(),
hashlib.md5(url.encode('utf-8')).hexdigest())
if path.is_file():
log.debug(f"Already downloaded: {url}")
return path
try:
response = requests.get(url, stream=True, timeout=5,
headers=DEFAULT_REQUEST_HEADERS)
except ValueError:
log.error(f"Invalid link: {url}")
return None
except requests.exceptions.RequestException:
log.error(f"Bad connection: {url}")
return None
if response.status_code == 200:
log.info(f"Downloading {url}")
with open(str(path), 'wb') as outfile:
response.raw.decode_content = True
shutil.copyfileobj(response.raw, outfile)
return path
log.error(f"Unable to download: {url}")
return None
| 27.617329 | 77 | 0.564706 |
e8eea6267b97ba42a45c27e4e411f1b359663ae8 | 1,575 | py | Python | codes/datasets/loader/build_loader.py | dreamerlin/MVFNet | ed336228ad88821ffe407a4355017acb416e4670 | [
"Apache-2.0"
] | 102 | 2020-12-16T03:55:21.000Z | 2022-03-11T03:46:03.000Z | codes/datasets/loader/build_loader.py | dreamerlin/MVFNet | ed336228ad88821ffe407a4355017acb416e4670 | [
"Apache-2.0"
] | 4 | 2021-06-14T18:43:53.000Z | 2022-03-28T16:28:51.000Z | codes/datasets/loader/build_loader.py | dreamerlin/MVFNet | ed336228ad88821ffe407a4355017acb416e4670 | [
"Apache-2.0"
] | 8 | 2021-03-03T00:18:58.000Z | 2022-02-18T07:20:06.000Z | """ https://github.com/pytorch/pytorch/issues/973"""
import resource
from torch.distributed import get_rank, get_world_size
# from mmcv.parallel import collate
from torch.utils.data import DataLoader # , DistributedSampler
from .sampler import DistributedSampler
# from functools import partial
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (4096, rlimit[1]))
def build_dataloader(dataset,
videos_per_gpu,
workers_per_gpu,
num_gpus=1,
dist=True,
shuffle=True,
pin_memory=True,
**kwargs):
"""build dataloader"""
if dist:
rank = get_rank()
world_size = get_world_size()
sampler = DistributedSampler(
dataset, world_size, rank, shuffle=shuffle)
shuffle = False
batch_size = videos_per_gpu
num_workers = workers_per_gpu
else:
# if not kwargs.get('shuffle', True):
# sampler = None
# else:
# sampler = GroupSampler(dataset, videos_per_gpu)
sampler = None
batch_size = num_gpus * videos_per_gpu
num_workers = num_gpus * workers_per_gpu
data_loader = DataLoader(
dataset,
batch_size=batch_size,
sampler=sampler,
num_workers=num_workers,
# collate_fn=partial(collate, samples_per_gpu=videos_per_gpu),
pin_memory=pin_memory,
shuffle=shuffle,
**kwargs)
return data_loader
| 29.716981 | 70 | 0.616508 |
8fb70cdf8b460d09db49d848449348fa37cb4b00 | 2,287 | py | Python | watchman/python/pywatchman/load.py | istiak101/watchman | 8bede2333411b4cafc43c08ed21866dc100f3bd2 | [
"MIT"
] | 1 | 2022-03-04T14:09:05.000Z | 2022-03-04T14:09:05.000Z | watchman/python/pywatchman/load.py | Siyabonga-Gregory/watchman | 4c2a9ee8bc01f16be5be81c6734c0a00f8548770 | [
"MIT"
] | null | null | null | watchman/python/pywatchman/load.py | Siyabonga-Gregory/watchman | 4c2a9ee8bc01f16be5be81c6734c0a00f8548770 | [
"MIT"
] | null | null | null | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import ctypes
try:
from . import bser
except ImportError:
from . import pybser as bser
EMPTY_HEADER = b"\x00\x01\x05\x00\x00\x00\x00"
def _read_bytes(fp, buf):
"""Read bytes from a file-like object
@param fp: File-like object that implements read(int)
@type fp: file
@param buf: Buffer to read into
@type buf: bytes
@return: buf
"""
# Do the first read without resizing the input buffer
offset = 0
remaining = len(buf)
while remaining > 0:
l = fp.readinto((ctypes.c_char * remaining).from_buffer(buf, offset))
if l is None or l == 0:
return offset
offset += l
remaining -= l
return offset
def load(fp, mutable=True, value_encoding=None, value_errors=None):
"""Deserialize a BSER-encoded blob.
@param fp: The file-object to deserialize.
@type file:
@param mutable: Whether to return mutable results.
@type mutable: bool
@param value_encoding: Optional codec to use to decode values. If
unspecified or None, return values as bytestrings.
@type value_encoding: str
@param value_errors: Optional error handler for codec. 'strict' by default.
The other most common argument is 'surrogateescape' on
Python 3. If value_encoding is None, this is ignored.
@type value_errors: str
"""
buf = ctypes.create_string_buffer(8192)
SNIFF_BUFFER_SIZE = len(EMPTY_HEADER)
header = (ctypes.c_char * SNIFF_BUFFER_SIZE).from_buffer(buf)
read_len = _read_bytes(fp, header)
if read_len < len(header):
return None
total_len = bser.pdu_len(buf)
if total_len > len(buf):
ctypes.resize(buf, total_len)
body = (ctypes.c_char * (total_len - len(header))).from_buffer(buf, len(header))
read_len = _read_bytes(fp, body)
if read_len < len(body):
raise RuntimeError("bser data ended early")
return bser.loads(
(ctypes.c_char * total_len).from_buffer(buf, 0),
mutable,
value_encoding,
value_errors,
)
| 27.554217 | 84 | 0.647136 |
4ab9c1e7010db63e8d58ae45531e56810d40c70a | 2,576 | py | Python | spacy/tests/parser/test_add_label.py | gandersen101/spaCy | 109849bd311490f17a29b320cb032e43d153f36f | [
"MIT"
] | 10 | 2021-05-31T07:18:08.000Z | 2022-03-19T09:20:11.000Z | spacy/tests/parser/test_add_label.py | gandersen101/spaCy | 109849bd311490f17a29b320cb032e43d153f36f | [
"MIT"
] | 4 | 2021-06-02T00:49:27.000Z | 2022-01-13T01:59:34.000Z | spacy/tests/parser/test_add_label.py | gandersen101/spaCy | 109849bd311490f17a29b320cb032e43d153f36f | [
"MIT"
] | 2 | 2020-02-15T18:33:35.000Z | 2022-02-13T14:11:41.000Z | # coding: utf8
from __future__ import unicode_literals
import pytest
from thinc.neural.optimizers import Adam
from thinc.neural.ops import NumpyOps
from spacy.attrs import NORM
from spacy.gold import GoldParse
from spacy.vocab import Vocab
from spacy.tokens import Doc
from spacy.pipeline import DependencyParser, EntityRecognizer
from spacy.util import fix_random_seed
@pytest.fixture
def vocab():
return Vocab(lex_attr_getters={NORM: lambda s: s})
@pytest.fixture
def parser(vocab):
parser = DependencyParser(vocab)
return parser
def test_init_parser(parser):
pass
def _train_parser(parser):
fix_random_seed(1)
parser.add_label("left")
parser.begin_training([], **parser.cfg)
sgd = Adam(NumpyOps(), 0.001)
for i in range(5):
losses = {}
doc = Doc(parser.vocab, words=["a", "b", "c", "d"])
gold = GoldParse(doc, heads=[1, 1, 3, 3], deps=["left", "ROOT", "left", "ROOT"])
parser.update([doc], [gold], sgd=sgd, losses=losses)
return parser
def test_add_label(parser):
parser = _train_parser(parser)
parser.add_label("right")
sgd = Adam(NumpyOps(), 0.001)
for i in range(10):
losses = {}
doc = Doc(parser.vocab, words=["a", "b", "c", "d"])
gold = GoldParse(
doc, heads=[1, 1, 3, 3], deps=["right", "ROOT", "left", "ROOT"]
)
parser.update([doc], [gold], sgd=sgd, losses=losses)
doc = Doc(parser.vocab, words=["a", "b", "c", "d"])
doc = parser(doc)
assert doc[0].dep_ == "right"
assert doc[2].dep_ == "left"
def test_add_label_deserializes_correctly():
ner1 = EntityRecognizer(Vocab())
ner1.add_label("C")
ner1.add_label("B")
ner1.add_label("A")
ner1.begin_training([])
ner2 = EntityRecognizer(Vocab()).from_bytes(ner1.to_bytes())
assert ner1.moves.n_moves == ner2.moves.n_moves
for i in range(ner1.moves.n_moves):
assert ner1.moves.get_class_name(i) == ner2.moves.get_class_name(i)
@pytest.mark.parametrize(
"pipe_cls,n_moves", [(DependencyParser, 5), (EntityRecognizer, 4)]
)
def test_add_label_get_label(pipe_cls, n_moves):
"""Test that added labels are returned correctly. This test was added to
test for a bug in DependencyParser.labels that'd cause it to fail when
splitting the move names.
"""
labels = ["A", "B", "C"]
pipe = pipe_cls(Vocab())
for label in labels:
pipe.add_label(label)
assert len(pipe.move_names) == len(labels) * n_moves
pipe_labels = sorted(list(pipe.labels))
assert pipe_labels == labels
| 29.272727 | 88 | 0.657609 |
d8bb8209aafdb2257a903f3b45f0e2f13dcf21a9 | 107 | py | Python | packages/infrastructure/lib/authtest/emailReceiver.py | chessdbai/Hercules | b9edf053f45039b9560e11791b3e19a67023c3b1 | [
"MIT"
] | null | null | null | packages/infrastructure/lib/authtest/emailReceiver.py | chessdbai/Hercules | b9edf053f45039b9560e11791b3e19a67023c3b1 | [
"MIT"
] | null | null | null | packages/infrastructure/lib/authtest/emailReceiver.py | chessdbai/Hercules | b9edf053f45039b9560e11791b3e19a67023c3b1 | [
"MIT"
] | null | null | null | import boto3
import json
def handle(event, context):
print('Received event:')
print(json.dumps(event)) | 17.833333 | 27 | 0.738318 |
78a355293b8da6b640eb10cdc6de52efcfd3be1d | 2,544 | py | Python | wrappers/python/wirepas_messaging/gateway/api/status.py | vvalkonen/backend-apis | 769a45e6a90a87ab5af78e9a50ebde12f4821b99 | [
"Apache-2.0"
] | 9 | 2019-12-20T06:41:37.000Z | 2020-09-21T03:34:47.000Z | wrappers/python/wirepas_messaging/gateway/api/status.py | vvalkonen/backend-apis | 769a45e6a90a87ab5af78e9a50ebde12f4821b99 | [
"Apache-2.0"
] | 38 | 2019-05-09T09:55:01.000Z | 2022-01-04T10:52:46.000Z | wrappers/python/wirepas_messaging/gateway/api/status.py | vvalkonen/backend-apis | 769a45e6a90a87ab5af78e9a50ebde12f4821b99 | [
"Apache-2.0"
] | 13 | 2019-10-29T19:51:08.000Z | 2021-11-25T15:08:02.000Z | """
Status
======
.. Copyright:
Copyright 2019 Wirepas Ltd under Apache License, Version 2.0.
See file LICENSE for full license details.
"""
import enum
import wirepas_messaging
from .event import Event
from .wirepas_exceptions import GatewayAPIParsingException
# This API should never be changes in future (prupose of protobuf)
API_VERSION = 1
class GatewayState(enum.Enum):
"""
GatewayState
Enum providing the possible
states for the gateway
ONLINE or OFFLINE
"""
ONLINE = 0
OFFLINE = 1
class StatusEvent(Event):
"""
StatusEvent: Event generated by the gateway to set its sttaus (ONLINE/OFFLINE)
Attributes:
gw_id (str): gw_id (str): gateway unique identifier
state (GatewayState): state of the gateway
version (int): API version for gateway. Should be always 1
event_id(int): event unique id (random value generated if None)
"""
def __init__(self, gw_id, state, version=API_VERSION, event_id=None, **kwargs):
super(StatusEvent, self).__init__(gw_id, event_id=event_id, **kwargs)
self.state = state
self.version = version
@classmethod
def from_payload(cls, payload):
""" Converts a protobuff message into a python object """
message = wirepas_messaging.gateway.GenericMessage()
try:
message.ParseFromString(payload)
except Exception:
# Any Exception is promoted to Generic API exception
raise GatewayAPIParsingException("Cannot parse StatusEvent payload")
event = message.wirepas.status_event
if event.state == wirepas_messaging.gateway.ON:
online = GatewayState.ONLINE
else:
online = GatewayState.OFFLINE
if event.version != API_VERSION:
raise RuntimeError("Wrong API version")
d = Event._parse_event_header(event.header)
return cls(d["gw_id"], online, event_id=d["event_id"])
@property
def payload(self):
""" Returns a proto serialization of itself """
message = wirepas_messaging.gateway.GenericMessage()
# Fill the request header
status = message.wirepas.status_event
status.header.CopyFrom(self._make_event_header())
status.version = API_VERSION
if self.state == GatewayState.ONLINE:
status.state = wirepas_messaging.gateway.ON
else:
status.state = wirepas_messaging.gateway.OFF
return message.SerializeToString()
| 28.266667 | 83 | 0.658412 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.