text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import unittest
from airflow import configuration
from airflow.utils import db
from airflow import models
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
from airflow.contrib.hooks.ssh_hook import SSHHook
HELLO_SERVER_CMD = """
import socket, sys
listener = socket.socket()
listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listener.bind(('localhost', 2134))
listener.listen(1)
sys.stdout.write('ready')
sys.stdout.flush()
conn = listener.accept()[0]
conn.sendall(b'hello')
"""
class SSHHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
@mock.patch('airflow.contrib.hooks.ssh_hook.paramiko.SSHClient')
def test_ssh_connection_with_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
port='port',
username='username',
password='password',
timeout=10,
key_file='fake.file')
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
hostname='remote_host',
username='username',
password='password',
key_filename='fake.file',
timeout=10,
compress=True,
port='port',
sock=None
)
@mock.patch('airflow.contrib.hooks.ssh_hook.paramiko.SSHClient')
def test_ssh_connection_without_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
port='port',
username='username',
timeout=10,
key_file='fake.file')
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
hostname='remote_host',
username='username',
key_filename='fake.file',
timeout=10,
compress=True,
port='port',
sock=None
)
@mock.patch('airflow.contrib.hooks.ssh_hook.SSHTunnelForwarder')
def test_tunnel_with_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
port='port',
username='username',
password='password',
timeout=10,
key_file='fake.file')
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with('remote_host',
ssh_port='port',
ssh_username='username',
ssh_password='password',
ssh_pkey='fake.file',
ssh_proxy=None,
local_bind_address=('localhost', ),
remote_bind_address=('localhost', 1234),
logger=hook.log)
@mock.patch('airflow.contrib.hooks.ssh_hook.SSHTunnelForwarder')
def test_tunnel_without_password(self, ssh_mock):
hook = SSHHook(remote_host='remote_host',
port='port',
username='username',
timeout=10,
key_file='fake.file')
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with('remote_host',
ssh_port='port',
ssh_username='username',
ssh_pkey='fake.file',
ssh_proxy=None,
local_bind_address=('localhost', ),
remote_bind_address=('localhost', 1234),
host_pkey_directories=[],
logger=hook.log)
def test_conn_with_extra_parameters(self):
db.merge_conn(
models.Connection(conn_id='ssh_with_extra',
host='localhost',
conn_type='ssh',
extra='{"compress" : true, "no_host_key_check" : "true"}'
)
)
ssh_hook = SSHHook(ssh_conn_id='ssh_with_extra')
self.assertEqual(ssh_hook.compress, True)
self.assertEqual(ssh_hook.no_host_key_check, True)
def test_ssh_connection(self):
hook = SSHHook(ssh_conn_id='ssh_default')
with hook.get_conn() as client:
(_, stdout, _) = client.exec_command('ls')
self.assertIsNotNone(stdout.read())
def test_ssh_connection_old_cm(self):
with SSHHook(ssh_conn_id='ssh_default') as hook:
client = hook.get_conn()
(_, stdout, _) = client.exec_command('ls')
self.assertIsNotNone(stdout.read())
def test_tunnel(self):
hook = SSHHook(ssh_conn_id='ssh_default')
import subprocess
import socket
server_handle = subprocess.Popen(["python", "-c", HELLO_SERVER_CMD],
stdout=subprocess.PIPE)
with hook.create_tunnel(2135, 2134):
server_output = server_handle.stdout.read(5)
self.assertEqual(server_output, b"ready")
s = socket.socket()
s.connect(("localhost", 2135))
response = s.recv(5)
self.assertEqual(response, b"hello")
s.close()
output, _ = server_handle.communicate()
self.assertEqual(server_handle.returncode, 0)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "764779fe11e9c41739c98af62c0410a9",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 87,
"avg_line_length": 37.125786163522015,
"alnum_prop": 0.4846688124682365,
"repo_name": "gtoonstra/airflow",
"id": "ad5621fe92357d189aed293cd6f4e25b3b3bad6f",
"size": "6715",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/contrib/hooks/test_ssh_hook.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "68968"
},
{
"name": "Dockerfile",
"bytes": "2001"
},
{
"name": "HTML",
"bytes": "280689"
},
{
"name": "JavaScript",
"bytes": "1385403"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "4340511"
},
{
"name": "Shell",
"bytes": "47989"
}
],
"symlink_target": ""
}
|
import six
import os
import time
import itertools
import tempfile
import requests
from quantrocket.master import download_master_file
from quantrocket.exceptions import ParameterError, NoHistoricalData, NoRealtimeData
from quantrocket.history import (
download_history_file,
get_db_config as get_history_db_config,
list_databases as list_history_databases)
from quantrocket.realtime import (
download_market_data_file,
get_db_config as get_realtime_db_config,
list_databases as list_realtime_databases)
from quantrocket.zipline import (
list_bundles,
get_bundle_config,
download_bundle_file)
__all__ = [
"get_prices",
"get_prices_reindexed_like",
]
TMP_DIR = os.environ.get("QUANTROCKET_TMP_DIR", tempfile.gettempdir())
def get_prices(codes, start_date=None, end_date=None,
universes=None, sids=None,
exclude_universes=None, exclude_sids=None,
times=None, fields=None,
timezone=None, infer_timezone=None,
cont_fut=None, data_frequency=None):
"""
Query one or more history databases, real-time aggregate databases,
and/or Zipline bundles and load prices into a DataFrame.
For bar sizes smaller than 1-day, the resulting DataFrame will have a MultiIndex
with levels (Field, Date, Time). For bar sizes of 1-day or larger, the MultiIndex
will have levels (Field, Date).
Parameters
----------
codes : str or list of str, required
the code(s) of one or more databases to query. If multiple databases
are specified, they must have the same bar size. List databases in order of
priority (highest priority first). If multiple databases provide the same
field for the same sid on the same datetime, the first database's value will
be used.
start_date : str (YYYY-MM-DD), optional
limit to data on or after this date
end_date : str (YYYY-MM-DD), optional
limit to data on or before this date
universes : list of str, optional
limit to these universes (default is to return all securities in database)
sids : list of str, optional
limit to these sids
exclude_universes : list of str, optional
exclude these universes
exclude_sids : list of str, optional
exclude these sids
times: list of str (HH:MM:SS), optional
limit to these times, specified in the timezone of the relevant exchange. See
additional information in the Notes section regarding the timezone to use.
fields : list of str, optional
only return these fields. (If querying multiple databases that have different fields,
provide the complete list of desired fields; only the supported fields for each
database will be queried.)
timezone : str, optional
convert timestamps to this timezone, for example America/New_York (see
`pytz.all_timezones` for choices); ignored for non-intraday bar sizes
infer_timezone : bool
infer the timezone from the securities master Timezone field; defaults to
True if using intraday bars and no `timezone` specified; ignored for
non-intraday bars, or if `timezone` is passed
cont_fut : str
stitch futures into continuous contracts using this method (default is not
to stitch together). Only applicable to history databases. Possible choices:
concat
data_frequency : str
for Zipline bundles, whether to query minute or daily data. If omitted,
defaults to minute data for minute bundles and to daily data for daily bundles.
This parameter only needs to be set to request daily data from a minute bundle.
Possible choices: daily, minute (or aliases d, m).
Returns
-------
DataFrame
a MultiIndex (Field, Date) or (Field, Date, Time) DataFrame of prices
Notes
-----
The `times` parameter, if provided, is applied differently for history databases and
Zipline bundles vs real-time aggregate databases. For history databases and Zipline
bundles, the parameter is applied when querying the database. For real-time aggregate
databases, the parameter is not applied when querying the database; rather, all available
times are retrieved and the `times` filter is applied to the resulting DataFrame after
casting it to the appropriate timezone (as inferred from the securities master Timezone
field or as explicitly specified with the `timezone` parameter). The rationale for this
behavior is that history databases and Zipline bundles store intraday data in the timezone
of the relevant exchange whereas real-time aggregate databases store data in UTC. By
applying the `times` filter as described, users can specify the times in the timezone of
the relevant exchange for both types of databases.
Examples
--------
Load intraday prices:
>>> prices = get_prices('stk-sample-5min', fields=["Close", "Volume"])
>>> prices.head()
Sid FIBBG1 FIBBG2
Field Date Time
Close 2017-07-26 09:30:00 153.62 2715.0
09:35:00 153.46 2730.0
09:40:00 153.21 2725.0
09:45:00 153.28 2725.0
09:50:00 153.18 2725.0
Isolate the closes:
>>> closes = prices.loc["Close"]
>>> closes.head()
Sid FIBBG1 FIBBG2
Date Time
2017-07-26 09:30:00 153.62 2715.0
09:35:00 153.46 2730.0
09:40:00 153.21 2725.0
09:45:00 153.28 2725.0
09:50:00 153.18 2725.0
Isolate the 15:45:00 prices:
>>> session_closes = closes.xs("15:45:00", level="Time")
>>> session_closes.head()
Sid FIBBG1 FIBBG2
Date
2017-07-26 153.29 2700.00
2017-07-27 150.10 2660.00
2017-07-28 149.43 2650.02
2017-07-31 148.99 2650.34
2017-08-01 149.72 2675.50
"""
# Import pandas lazily since it can take a moment to import
try:
import pandas as pd
except ImportError:
raise ImportError("pandas must be installed to use this function")
try:
import pytz
except ImportError:
raise ImportError("pytz must be installed to use this function")
if timezone and timezone not in pytz.all_timezones:
raise ParameterError(
"invalid timezone: {0} (see `pytz.all_timezones` for choices)".format(
timezone))
dbs = codes
if not isinstance(dbs, (list, tuple)):
dbs = [dbs]
fields = fields or []
if not isinstance(fields, (list, tuple)):
fields = [fields]
# separate history dbs from Zipline bundles from realtime dbs; in case one or
# more of the services is not running, we print a warning and try the other
# services
try:
history_dbs = set(list_history_databases())
except requests.HTTPError as e:
if e.response.status_code == 502:
import warnings
warnings.warn(
f"Error while checking if {', '.join(dbs)} is a history database, "
f"will assume it's not. Error was: {e}", RuntimeWarning)
history_dbs = set()
else:
raise
try:
realtime_dbs = list_realtime_databases()
except requests.HTTPError as e:
if e.response.status_code == 502:
import warnings
warnings.warn(
f"Error while checking if {', '.join(dbs)} is a realtime database, "
f"will assume it's not. Error was: {e}", RuntimeWarning)
realtime_dbs = {}
realtime_agg_dbs = set()
else:
raise
else:
realtime_agg_dbs = set(itertools.chain(*realtime_dbs.values()))
try:
zipline_bundles = set(list_bundles())
except requests.HTTPError as e:
if e.response.status_code == 502:
import warnings
warnings.warn(
f"Error while checking if {', '.join(dbs)} is a Zipline bundle, "
f"will assume it's not. Error was: {e}", RuntimeWarning)
zipline_bundles = set()
else:
raise
history_dbs.intersection_update(set(dbs))
realtime_agg_dbs.intersection_update(set(dbs))
zipline_bundles.intersection_update(set(dbs))
unknown_dbs = set(dbs) - history_dbs - realtime_agg_dbs - zipline_bundles
if unknown_dbs:
tick_dbs = set(realtime_dbs.keys()).intersection(unknown_dbs)
# Improve error message if possible
if tick_dbs:
raise ParameterError("{} is a real-time tick database, only history databases or "
"real-time aggregate databases are supported".format(
", ".join(tick_dbs)))
raise ParameterError(
"no history or real-time aggregate databases or Zipline bundles called {}".format(
", ".join(unknown_dbs)))
db_bar_sizes = set()
db_bar_sizes_parsed = set()
history_db_fields = {}
realtime_db_fields = {}
zipline_bundle_fields = {}
for db in history_dbs:
db_config = get_history_db_config(db)
bar_size = db_config.get("bar_size")
db_bar_sizes.add(bar_size)
# to validate uniform bar sizes, we need to parse them in case dbs
# store different but equivalent timedelta strings. History db
# strings may need massaging to be parsable.
if bar_size.endswith("s"):
# strip s from secs, mins, hours to get valid pandas timedelta
bar_size = bar_size[:-1]
elif bar_size == "1 week":
bar_size = "7 day"
elif bar_size == "1 month":
bar_size = "30 day"
db_bar_sizes_parsed.add(pd.Timedelta(bar_size))
history_db_fields[db] = db_config.get("fields", [])
for db in realtime_agg_dbs:
db_config = get_realtime_db_config(db)
bar_size = db_config.get("bar_size")
db_bar_sizes.add(bar_size)
db_bar_sizes_parsed.add(pd.Timedelta(bar_size))
realtime_db_fields[db] = db_config.get("fields", [])
for db in zipline_bundles:
# look up bundle data_frequency if not specified
if not data_frequency:
bundle_config = get_bundle_config(db)
data_frequency = bundle_config["data_frequency"]
if data_frequency in ("daily", "d"):
db_bar_sizes.add("1 day")
db_bar_sizes_parsed.add(pd.Timedelta("1 day"))
elif data_frequency in ("minute", "m"):
db_bar_sizes.add("1 min")
db_bar_sizes_parsed.add(pd.Timedelta("1 min"))
else:
raise ParameterError("invalid data_frequency: {}".format(data_frequency))
zipline_bundle_fields[db] = ["Open", "High", "Low", "Close", "Volume"]
if len(db_bar_sizes_parsed) > 1:
raise ParameterError(
"all databases must contain same bar size but {0} have different "
"bar sizes: {1}".format(", ".join(dbs), ", ".join(db_bar_sizes))
)
all_prices = []
for db in dbs:
if db in history_dbs:
# different DBs might support different fields so only request the
# subset of supported fields
fields_for_db = set(fields).intersection(set(history_db_fields[db]))
kwargs = dict(
start_date=start_date,
end_date=end_date,
universes=universes,
sids=sids,
exclude_universes=exclude_universes,
exclude_sids=exclude_sids,
times=times,
cont_fut=cont_fut,
fields=list(fields_for_db),
)
tmp_filepath = "{dir}{sep}history.{db}.{pid}.{time}.csv".format(
dir=TMP_DIR, sep=os.path.sep, db=db, pid=os.getpid(), time=time.time())
try:
download_history_file(db, tmp_filepath, **kwargs)
except NoHistoricalData:
# don't complain about NoHistoricalData if we're checking
# multiple databases, unless none of them have data
if len(dbs) == 1:
raise
else:
continue
prices = pd.read_csv(tmp_filepath)
prices = prices.pivot(index="Sid", columns="Date").T
prices.index.set_names(["Field", "Date"], inplace=True)
all_prices.append(prices)
os.remove(tmp_filepath)
if db in realtime_agg_dbs:
fields_for_db = set(fields).intersection(set(realtime_db_fields[db]))
kwargs = dict(
start_date=start_date,
end_date=end_date,
universes=universes,
sids=sids,
exclude_universes=exclude_universes,
exclude_sids=exclude_sids,
fields=list(fields_for_db))
# pass timezone to realtime service if provided, for interpreting start/end
# dates
if timezone and start_date:
kwargs["start_date"] = f"{start_date} {timezone}"
if timezone and end_date:
kwargs["end_date"] = f"{end_date} {timezone}"
tmp_filepath = "{dir}{sep}realtime.{db}.{pid}.{time}.csv".format(
dir=TMP_DIR, sep=os.path.sep, db=db, pid=os.getpid(), time=time.time())
try:
download_market_data_file(db, tmp_filepath, **kwargs)
except NoRealtimeData as e:
# don't complain about NoRealtimeData if we're checking
# multiple databases, unless none of them have data
if len(dbs) == 1:
raise
else:
continue
prices = pd.read_csv(tmp_filepath)
prices = prices.pivot(index="Sid", columns="Date").T
prices.index.set_names(["Field", "Date"], inplace=True)
all_prices.append(prices)
os.remove(tmp_filepath)
if db in zipline_bundles:
fields_for_db = set(fields).intersection(set(zipline_bundle_fields[db]))
kwargs = dict(
start_date=start_date,
end_date=end_date,
universes=universes,
sids=sids,
exclude_universes=exclude_universes,
exclude_sids=exclude_sids,
times=times,
data_frequency=data_frequency,
fields=list(fields_for_db))
tmp_filepath = "{dir}{sep}zipline.{db}.{pid}.{time}.csv".format(
dir=TMP_DIR, sep=os.path.sep, db=db, pid=os.getpid(), time=time.time())
try:
download_bundle_file(db, tmp_filepath, **kwargs)
except NoHistoricalData as e:
# don't complain about NoHistoricalData if we're checking
# multiple databases, unless none of them have data
if len(dbs) == 1:
raise
else:
continue
prices = pd.read_csv(tmp_filepath, index_col=["Field", "Date"])
prices.columns.name = "Sid"
all_prices.append(prices)
os.remove(tmp_filepath)
# complain if multiple dbs and none had data
if len(dbs) > 1 and not all_prices:
raise NoHistoricalData("no price data matches the query parameters in any of {0}".format(
", ".join(dbs)
))
prices = None
for _prices in all_prices:
if prices is None:
prices = _prices
else:
prices = prices.combine_first(_prices)
is_intraday = list(db_bar_sizes_parsed)[0] < pd.Timedelta("1 day")
# For intraday dbs, infer timezone from securities master
if is_intraday and not timezone and infer_timezone is not False:
infer_timezone = True
sids = list(prices.columns)
f = six.StringIO()
download_master_file(
f,
sids=sids,
fields="Timezone")
securities = pd.read_csv(f, index_col="Sid")
timezones = securities.Timezone.unique()
if len(timezones) > 1:
raise ParameterError(
"cannot infer timezone because multiple timezones are present "
"in data, please specify timezone explicitly (timezones: {0})".format(
", ".join(timezones)))
timezone = timezones[0]
if is_intraday:
dates = pd.to_datetime(prices.index.get_level_values("Date"), utc=True)
if timezone:
dates = dates.tz_convert(timezone)
else:
# use .str[:10] because the format might be 2020-04-05 (history dbs)
# or 2020-04-05T00:00:00-00 (realtime aggregate dbs)
dates = pd.to_datetime(
prices.index.get_level_values("Date").str[:10])
prices.index = pd.MultiIndex.from_arrays((
prices.index.get_level_values("Field"),
dates
), names=("Field", "Date"))
# Split date and time
dts = prices.index.get_level_values("Date")
dates = pd.to_datetime(dts.date).tz_localize(None) # drop tz-aware in Date index
prices.index = pd.MultiIndex.from_arrays(
(prices.index.get_level_values("Field"),
dates,
dts.strftime("%H:%M:%S")),
names=["Field", "Date", "Time"]
)
# Align dates if there are any duplicate. Explanation: Suppose there are
# two timezones represented in the data (e.g. history db in security
# timezone vs real-time db in UTC). After parsing these dates into a
# common timezone, they will align properly, but we pivoted before
# parsing the dates (for performance reasons), so they may not be
# aligned. Thus we need to dedupe the index. NOTE: first() is slow
# when prices contains categorical/object dtypes (which can happen
# with custom fundamental databases, for example), so only do this
# if we have to. nth(0) would be an alternative to first() but it
# doesn't ignore nans (nth()'s dropna param doesn't help here), so
# a universal solution is as yet elusive.
if prices.index.duplicated().any():
prices = prices.groupby(prices.index).first()
prices.index = pd.MultiIndex.from_tuples(prices.index)
prices.index.set_names(["Field", "Date", "Time"], inplace=True)
# Fill missing dates and times so that each field has the
# same set of dates and times, for easier vectorized operations.
# Example implications for intraday dbs:
# - if history is retrieved intraday, this ensures that today will have NaN
# entries for future times
# - early close dates will have a full set of times, with NaNs after the
# early close
unique_fields = prices.index.get_level_values("Field").unique()
unique_dates = prices.index.get_level_values("Date").unique()
unique_times = prices.index.get_level_values("Time").unique()
interpolated_index = None
for field in sorted(unique_fields):
field_idx = pd.MultiIndex.from_product([[field], unique_dates, unique_times]).sort_values()
if interpolated_index is None:
interpolated_index = field_idx
else:
interpolated_index = interpolated_index.append(field_idx)
prices = prices.reindex(interpolated_index)
prices.index.set_names(["Field", "Date", "Time"], inplace=True)
# Drop time if not intraday
if not is_intraday:
prices.index = prices.index.droplevel("Time")
return prices
# Apply times filter if needed (see Notes in docstring)
if times and realtime_agg_dbs:
if not isinstance(times, (list, tuple)):
times = [times]
prices = prices.loc[prices.index.get_level_values("Time").isin(times)]
return prices
def get_prices_reindexed_like(reindex_like, codes, fields=None,
shift=1, ffill=True, lookback_window=10,
agg="last",
timezone=None, infer_timezone=None,
times=None, cont_fut=None,
data_frequency=None):
"""
Return a multiindex (Field, Date) DataFrame of prices for one or more history
databases, real-time aggregate databases, or Zipline bundles, reindexed to match
the index (dates) and columns (sids) of `reindex_like`.
Prices are loaded with `quantrocket.get_prices` and shifted forward one period
(configurable with the `shift` parameter) to avoid lookahead bias. In the case
of sparse data, values are then forward-filled by default (configurable with the
`ffill` parameter).
The queried databases need not contain price data. This function can be used to query
custom history databases containing any kind of data.
Pay attention to the `lookback_window` parameter, which controls how much back data
in advance of the input DataFrame's start date to load. For example, if the input
DataFrame contains daily data and you are querying quarterly fundamental data,
the `lookback_window` must extend far enough back in time to access the most recent
quarterly value prior to the input DataFrame's start date. Setting `lookback_window`
too low will result in leading NaNs in the resulting DataFrame. Setting it too high
is okay but will load a larger amount of data into memory.
If you query an intraday database, it will be treated as a daily database. Specifically,
the intraday values will be aggregated (using the method specified by the `agg`
parameter) to produce a single value per day.
If the input DataFrame has levels in the index other than Date (for example, Time, in
the case of an intraday input DataFrame), the queried values will be broadcast across
the additional levels of the index.
Parameters
----------
reindex_like : DataFrame, required
a DataFrame with dates for the index (or, if DataFrame has a MultiIndex, with
dates for one level of the index) and sids for the columns, to which the shape
of the resulting DataFrame will be conformed.
codes : str or list of str, required
the code(s) of one or more databases to query. If multiple databases
are specified, they must have the same bar size. List databases in order of
priority (highest priority first). If multiple databases provide the same
field for the same sid on the same datetime, the first database's value will
be used.
fields : list of str, optional
only return these fields. (If querying multiple databases that have different fields,
provide the complete list of desired fields; only the supported fields for each
database will be queried.)
shift : int, optional
number of periods (in the date index) to shift the resulting data forward to
avoid lookahead bias. Default is 1. Shifting one period implies that data
timestamped to a particular date is available and actionable on the following
date.
ffill : bool
forward-fill values in the resulting DataFrame so that each date reflects
the latest available value as of that date. If False, values appear only
on the first date they were available, followed by NaNs. Default True.
lookback_window : int, optional
how many calendar days of back data prior to the reindex_like start date
should be loaded, to ensure an adequate cushion of data is available before
shifting. Default is 10. Sparse data such as fundamentals will require a
higher value.
times: list of str (HH:MM:SS), optional
limit to these times, specified in the timezone of the relevant exchange. Only
applicable to querying intraday databases. See additional information in the
Notes section of `quantrocket.get_prices` regarding the timezone to use.
timezone : str, optional
convert timestamps to this timezone, for example America/New_York (see
`pytz.all_timezones` for choices); ignored for non-intraday databases
infer_timezone : bool
infer the timezone from the securities master Timezone field; defaults to
True if querying intraday databases and no `timezone` specified; ignored for
non-intraday databases, or if `timezone` is passed
agg : str or function, optional
when querying intraday databases, how to aggregate each day's intraday values to
produce a single value per day. Default is "last", meaning use the last non-null
value of each day. This parameter is passed directly to the pandas `agg` function.
Example choices include "last", "first", "min", "max", "mean", "sum", etc. See
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.core.groupby.DataFrameGroupBy.aggregate.html
for more info. Note that aggregation occurs after the `times` filters are applied.
cont_fut : str
stitch futures into continuous contracts using this method (default is not
to stitch together). Only applicable to history databases. Possible choices:
concat
data_frequency : str
for Zipline bundles, whether to query minute or daily data. If omitted,
defaults to minute data for minute bundles and to daily data for daily bundles.
This parameter only needs to be set to request daily data from a minute bundle.
Possible choices: daily, minute (or aliases d, m).
Returns
-------
DataFrame
a MultiIndex DataFrame of prices shaped like reindex_like
Examples
--------
Use a DataFrame of closing prices to query a DataFrame of fundamentals in a
database called "custom-fundamentals". Since the fundamental data is sparse,
we specify a lookback window of 180 days to ensure that a previous value can
be forward-filled into the resulting DataFrame:
>>> closes = prices.loc["Close"]
>>> fundamentals = get_prices_reindexed_like(
closes, "custom-fundamentals", fields="Revenue",
lookback_window=180)
>>> revenues = fundamentals.loc["Revenue"]
"""
try:
import pandas as pd
except ImportError:
raise ImportError("pandas must be installed to use this function")
index_levels = reindex_like.index.names
if "Date" not in index_levels:
raise ParameterError(
"reindex_like must have index called 'Date', but has {0}".format(
",".join([str(name) for name in index_levels])))
reindex_like_dates = reindex_like.index.get_level_values("Date")
is_multiindex = len(reindex_like.index.names) > 1
if not hasattr(reindex_like_dates, "date"):
raise ParameterError("reindex_like must have a DatetimeIndex")
sids = list(reindex_like.columns)
start_date = reindex_like_dates.min().date()
start_date -= pd.Timedelta(days=lookback_window)
start_date = start_date.isoformat()
end_date = reindex_like_dates.max().date().isoformat()
prices = get_prices(
codes,
sids=sids,
fields=fields,
start_date=start_date,
end_date=end_date,
timezone=timezone,
infer_timezone=infer_timezone,
times=times,
cont_fut=cont_fut,
data_frequency=data_frequency)
all_fields = {}
# default values are set in the signature for docstring purposes, but we
# also set them here in case Nones were passed
if shift is None:
shift = 1
if lookback_window is None:
lookback_window = 10
if agg is None:
agg = "last"
fields = prices.index.get_level_values("Field").unique()
for field in fields:
prices_for_field = prices.loc[field]
# For intraday databases, drop Time and aggregate per date
if "Time" in prices_for_field.index.names:
prices_for_field.index = prices_for_field.index.get_level_values("Date")
prices_for_field = prices_for_field.groupby(prices_for_field.index).agg(agg)
# get_prices returns tz-naive dates, localize to match reindex_like
prices_for_field = prices_for_field.tz_localize(reindex_like_dates.tz)
# union indexes and reindex in case there are any price dates not in
# reindex_like or vice versa
unioned_idx = reindex_like_dates.union(prices_for_field.index).drop_duplicates()
prices_for_field = prices_for_field.reindex(index=unioned_idx)
if ffill:
prices_for_field = prices_for_field.fillna(method="ffill")
# shift forward to avoid lookahead bias
prices_for_field = prices_for_field.shift(shift)
# Reindex to requested shape
if is_multiindex:
prices_for_field = prices_for_field.reindex(
index=reindex_like.index, level="Date").reindex(
columns=reindex_like.columns)
else:
prices_for_field = prices_for_field.reindex(
index=reindex_like.index, columns=reindex_like.columns)
all_fields[field] = prices_for_field
names = ["Field"] + reindex_like.index.names
prices = pd.concat(all_fields, names=names)
return prices
|
{
"content_hash": "4cb41587a597bc8350735084b95bb890",
"timestamp": "",
"source": "github",
"line_count": 729,
"max_line_length": 118,
"avg_line_length": 40.344307270233195,
"alnum_prop": 0.6277923226003876,
"repo_name": "quantrocket-llc/quantrocket-client",
"id": "90c33a31d27924d0d336beb06e3281183729e39c",
"size": "30014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quantrocket/price.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1138449"
},
{
"name": "Shell",
"bytes": "340"
}
],
"symlink_target": ""
}
|
"""Aruba OS support"""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self._test_channel_read()
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = re.escape(self.base_prompt[:16])
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
{
"content_hash": "0b3cbfdfa1a7800db88e871b85afe5ee",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 81,
"avg_line_length": 36.064516129032256,
"alnum_prop": 0.610912343470483,
"repo_name": "fooelisa/netmiko",
"id": "98e929b2998e41b26888a7233cabedaf9dfe9067",
"size": "1118",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "netmiko/aruba/aruba_ssh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "314964"
},
{
"name": "Shell",
"bytes": "13478"
}
],
"symlink_target": ""
}
|
"""Common code for working with ceph object stores
"""
from keystoneauth1 import exceptions
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
import six.moves.urllib.parse as urlparse
from ceilometer.agent import plugin_base
from ceilometer import keystone_client
from ceilometer import sample
LOG = log.getLogger(__name__)
SERVICE_OPTS = [
cfg.StrOpt('radosgw',
default='object-store',
help='Radosgw service type.'),
]
CREDENTIAL_OPTS = [
cfg.StrOpt('access_key',
secret=True,
help='Access key for Radosgw Admin.'),
cfg.StrOpt('secret_key',
secret=True,
help='Secret key for Radosgw Admin.')
]
cfg.CONF.register_opts(SERVICE_OPTS, group='service_types')
cfg.CONF.register_opts(CREDENTIAL_OPTS, group='rgw_admin_credentials')
cfg.CONF.import_group('rgw_admin_credentials', 'ceilometer.service')
class _Base(plugin_base.PollsterBase):
METHOD = 'bucket'
_ENDPOINT = None
def __init__(self):
self.access_key = cfg.CONF.rgw_admin_credentials.access_key
self.secret = cfg.CONF.rgw_admin_credentials.secret_key
@property
def default_discovery(self):
return 'tenant'
@property
def CACHE_KEY_METHOD(self):
return 'rgw.get_%s' % self.METHOD
@staticmethod
def _get_endpoint(ksclient):
# we store the endpoint as a base class attribute, so keystone is
# only ever called once, also we assume that in a single deployment
# we may be only deploying `radosgw` or `swift` as the object-store
if _Base._ENDPOINT is None:
try:
conf = cfg.CONF.service_credentials
rgw_url = keystone_client.get_service_catalog(
ksclient).url_for(
service_type=cfg.CONF.service_types.radosgw,
interface=conf.interface)
_Base._ENDPOINT = urlparse.urljoin(rgw_url, '/admin')
except exceptions.EndpointNotFound:
LOG.debug("Radosgw endpoint not found")
return _Base._ENDPOINT
def _iter_accounts(self, ksclient, cache, tenants):
if self.CACHE_KEY_METHOD not in cache:
cache[self.CACHE_KEY_METHOD] = list(self._get_account_info(
ksclient, tenants))
return iter(cache[self.CACHE_KEY_METHOD])
def _get_account_info(self, ksclient, tenants):
endpoint = self._get_endpoint(ksclient)
if not endpoint:
raise StopIteration()
try:
from ceilometer.objectstore.rgw_client import RGWAdminClient
rgw_client = RGWAdminClient(endpoint, self.access_key, self.secret)
except ImportError:
raise plugin_base.PollsterPermanentError(tenants)
for t in tenants:
api_method = 'get_%s' % self.METHOD
yield t.id, getattr(rgw_client, api_method)(t.id)
class ContainersObjectsPollster(_Base):
"""Get info about object counts in a container using RGW Admin APIs."""
def get_samples(self, manager, cache, resources):
for tenant, bucket_info in self._iter_accounts(manager.keystone,
cache, resources):
for it in bucket_info['buckets']:
yield sample.Sample(
name='radosgw.containers.objects',
type=sample.TYPE_GAUGE,
volume=int(it.num_objects),
unit='object',
user_id=None,
project_id=tenant,
resource_id=tenant + '/' + it.name,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
class ContainersSizePollster(_Base):
"""Get info about object sizes in a container using RGW Admin APIs."""
def get_samples(self, manager, cache, resources):
for tenant, bucket_info in self._iter_accounts(manager.keystone,
cache, resources):
for it in bucket_info['buckets']:
yield sample.Sample(
name='radosgw.containers.objects.size',
type=sample.TYPE_GAUGE,
volume=int(it.size * 1024),
unit='B',
user_id=None,
project_id=tenant,
resource_id=tenant + '/' + it.name,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
class ObjectsSizePollster(_Base):
"""Iterate over all accounts, using keystone."""
def get_samples(self, manager, cache, resources):
for tenant, bucket_info in self._iter_accounts(manager.keystone,
cache, resources):
yield sample.Sample(
name='radosgw.objects.size',
type=sample.TYPE_GAUGE,
volume=int(bucket_info['size'] * 1024),
unit='B',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
class ObjectsPollster(_Base):
"""Iterate over all accounts, using keystone."""
def get_samples(self, manager, cache, resources):
for tenant, bucket_info in self._iter_accounts(manager.keystone,
cache, resources):
yield sample.Sample(
name='radosgw.objects',
type=sample.TYPE_GAUGE,
volume=int(bucket_info['num_objects']),
unit='object',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
class ObjectsContainersPollster(_Base):
def get_samples(self, manager, cache, resources):
for tenant, bucket_info in self._iter_accounts(manager.keystone,
cache, resources):
yield sample.Sample(
name='radosgw.objects.containers',
type=sample.TYPE_GAUGE,
volume=int(bucket_info['num_buckets']),
unit='object',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
class UsagePollster(_Base):
METHOD = 'usage'
def get_samples(self, manager, cache, resources):
for tenant, usage in self._iter_accounts(manager.keystone,
cache, resources):
yield sample.Sample(
name='radosgw.api.request',
type=sample.TYPE_GAUGE,
volume=int(usage),
unit='request',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None,
)
|
{
"content_hash": "65b4bf8199d54a6b7ccff61e91891f12",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 79,
"avg_line_length": 36.49261083743843,
"alnum_prop": 0.5437365010799136,
"repo_name": "idegtiarov/ceilometer",
"id": "e3ef290c7b3d54499b8ca49e24c47bfbe7d33242",
"size": "8001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/objectstore/rgw.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2506039"
},
{
"name": "Shell",
"bytes": "33383"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import ckeditor_uploader.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('img', models.ImageField(upload_to='images/%Y/%m')),
('alt', models.CharField(help_text='Цей текст буде вставлений до атрибуту alt', max_length=30, verbose_name='Опис')),
],
options={
'verbose_name_plural': 'Зображення',
'verbose_name': 'Зображення',
},
),
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=150, unique=True)),
('title', models.CharField(db_index=True, max_length=150, unique=True, verbose_name='Заголовок')),
('content', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Наповнення')),
('description', models.CharField(max_length=255, verbose_name='Опис')),
('created', models.DateField(auto_now_add=True, verbose_name='Дата створення')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('blog_news_img', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='news', to='news.Image')),
],
options={
'verbose_name_plural': 'Новини',
'verbose_name': 'Новина',
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=25)),
],
options={
'verbose_name_plural': 'Теги',
'verbose_name': 'Тег',
},
),
migrations.AddField(
model_name='news',
name='tags',
field=models.ManyToManyField(blank=True, to='news.Tag', verbose_name='Теги'),
),
]
|
{
"content_hash": "530f825d4563e46810179e8da3432150",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 136,
"avg_line_length": 41.390625,
"alnum_prop": 0.5587013967534918,
"repo_name": "aodarc/tennis_club",
"id": "362c0b45260848fb6ba8b5cee4d891ae00967f5f",
"size": "2837",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "news/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "478738"
},
{
"name": "HTML",
"bytes": "119163"
},
{
"name": "JavaScript",
"bytes": "343787"
},
{
"name": "Python",
"bytes": "33741"
}
],
"symlink_target": ""
}
|
from .base import SimSootExpr
class SimSootExpr_InstanceFieldRef(SimSootExpr):
def _execute(self):
field_ref = self._translate_value(self.expr)
self.expr = self.state.memory.load(field_ref, none_if_missing=True)
|
{
"content_hash": "5d196bdfe8a425373bcc3e49728965ac",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 75,
"avg_line_length": 33.42857142857143,
"alnum_prop": 0.717948717948718,
"repo_name": "iamahuman/angr",
"id": "b42d9c2f5faec2148a6f6c40dd553e2b255e42e0",
"size": "235",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "angr/engines/soot/expressions/instancefieldref.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6375"
},
{
"name": "C++",
"bytes": "39420"
},
{
"name": "Dockerfile",
"bytes": "493"
},
{
"name": "Makefile",
"bytes": "620"
},
{
"name": "Python",
"bytes": "4842037"
}
],
"symlink_target": ""
}
|
"""Setuptools setup script for the package."""
from setuptools import setup
def _get_version():
# pylint: disable=missing-docstring
with open(".version") as version:
return version.read().rstrip("\n")
setup(
name="mock-open",
version=_get_version(),
description="A better mock for file I/O",
url="http://github.com/nivbend/mock-open",
author="Niv Ben-David",
author_email="nivbend@gmail.com",
license="MIT",
packages=["mock_open", ],
test_suite="mock_open.test.test_mocks",
install_requires=[
"mock",
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Testing",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
keywords=" ".join([
"testing",
"unittest",
"test",
"mock",
"mocking",
"patch",
"patching",
"stubs",
"fakes",
"doubles"
]),
)
|
{
"content_hash": "745af48593e4939bcb7a85cdba63e723",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 71,
"avg_line_length": 28.903846153846153,
"alnum_prop": 0.5635395874916833,
"repo_name": "janusnic/mock-open",
"id": "359a34b5a281afed7d47025f1f1f15243816a68f",
"size": "1503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23911"
}
],
"symlink_target": ""
}
|
"""Optimizer that implements cross-shard gradient reduction for TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.tpu import tpu_function
from tensorflow.python.tpu.ops import tpu_ops
from tensorflow.python.training import optimizer
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=["tpu.CrossShardOptimizer"])
class CrossShardOptimizer(optimizer.Optimizer):
"""An optimizer that averages gradients across TPU shards."""
def __init__(self,
opt,
reduction=losses.Reduction.MEAN,
name="CrossShardOptimizer",
group_assignment=None):
"""Construct a new cross-shard optimizer.
Args:
opt: An existing `Optimizer` to encapsulate.
reduction: The reduction to apply to the shard losses.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "CrossShardOptimizer".
group_assignment: Optional 2d int32 lists with shape
[num_groups, num_replicas_per_group] which describles how to apply
optimizer to subgroups.
Raises:
ValueError: If reduction is not a valid cross-shard reduction.
"""
if reduction not in (losses.Reduction.SUM, losses.Reduction.MEAN):
raise ValueError("Unsupported reduction: %s." % reduction)
if isinstance(opt, optimizer_v2.OptimizerV2):
raise TypeError(
"CrossShardOptimizer does not work with OptimizerV2. If you are "
"using TPUStrategy, OptimizerV2 will sum gradients across replicas."
"If you are using TPUEstimator, you may instead sum your gradients "
"with: grads = [tf.compat.v1.tpu.cross_replica_sum(g) for g in grads]"
". If you want to average your gradients, rescale your loss with: "
"loss /= global_batch_size")
super(CrossShardOptimizer, self).__init__(False, name)
self._opt = opt
self._reduction = reduction
self._group_assignment = group_assignment
def _verify_and_get_subgroup_size(self, group_assignment, num_shards):
"""Verify group_assignment and get the subgroup size".
Args:
group_assignment: list of group ids for applying the optimizer
to subgroups.
num_shards: The number of TPU shards.
Returns:
The size of one subgroup in group_assignment.
Raises:
ValueError: If group_assignment is invalid.
"""
if not group_assignment:
return None
if not (isinstance(group_assignment, list) and
all(isinstance(i, list) for i in group_assignment)):
raise ValueError("group_assignment must be a list of list. Got {}".format(
group_assignment))
replica_ids = set()
for g in group_assignment:
for i in g:
replica_ids.add(i)
if set(range(num_shards)) != replica_ids:
raise ValueError("group_assignment must be a permutation of range({0})."
" Got group_assignment={1}".format(
num_shards, group_assignment))
subgroup_size_list = [len(group) for group in group_assignment]
if all(subgroup_size_list[0] == size for size in subgroup_size_list):
return subgroup_size_list[0]
else:
raise ValueError("The size of each subgroup in group_assignment must "
"be equal. Got group_assignment={}".format(
self._group_assignment))
def compute_gradients(self, loss, var_list=None, **kwargs):
"""Compute gradients of "loss" for the variables in "var_list".
This simply wraps `compute_gradients()` from the real optimizer. The
gradients will be aggregated in `apply_gradients()` so that user can
modify the gradients like clipping with per replica global norm if needed.
The global norm with aggregated gradients can be bad as one replica's huge
gradients can hurt the gradients from other replicas.
When the CrossShardOptimizer is constructed with
`reduction == losses.Reduction.MEAN` (default), this function scales the
loss by `1.0 / num_shards` before computing the gradients. Assuming the
optimizer uses the default implementation of `compute_gradients()`, the
gradients of the scaled loss are scaled by `1.0 / num_shards` compared to
the gradients of the original loss. This scaling factor is important because
`apply_gradients()` sums gradients across shards, rather than averaging
them. However, the scaling factor must be taken into account when clipping
the norm of the gradients or performing other postprocessing.
Args:
loss: A Tensor containing the value to minimize.
var_list: Optional list or tuple of `tf.Variable` to update to minimize
`loss`. Defaults to the list of variables collected in the graph
under the key `GraphKey.TRAINABLE_VARIABLES`.
**kwargs: Keyword arguments for compute_gradients().
Returns:
A list of (gradient, variable) pairs.
Raises:
ValueError: If not within a tpu_shard_context or group_assignment is
invalid.
"""
num_shards = tpu_function.get_tpu_context().number_of_shards
if num_shards is None:
logging.warning(
"CrossShardOptimizer should be used within a tpu_shard_context, but "
"got unset number_of_shards. Assuming 1.")
num_shards = 1
subgroup_size = self._verify_and_get_subgroup_size(self._group_assignment,
num_shards)
if num_shards > 1 and self._reduction == losses.Reduction.MEAN:
if self._group_assignment:
scale = 1.0 / subgroup_size
else:
scale = 1.0 / num_shards
loss *= scale
return self._opt.compute_gradients(loss, var_list=var_list, **kwargs)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
Calls tpu_ops.cross_replica_sum() to sum gradient contributions across
replicas, and then applies the real optimizer.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
compute_gradients().
global_step: Optional Variable to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the Optimizer constructor.
Returns:
An `Operation` that applies the gradients. If `global_step` was not None,
that operation also increments `global_step`.
Raises:
ValueError: If the grads_and_vars is malformed.
"""
summed_grads_and_vars = []
for (grad, var) in grads_and_vars:
if grad is None:
summed_grads_and_vars.append((grad, var))
else:
with ops.colocate_with(grad):
summed_grads_and_vars.append((tpu_ops.cross_replica_sum(
grad, self._group_assignment), var))
return self._opt.apply_gradients(summed_grads_and_vars, global_step, name)
def get_slot(self, *args, **kwargs):
"""Return a slot named "name" created for "var" by the Optimizer.
This simply wraps the get_slot() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
return self._opt.get_slot(*args, **kwargs)
def get_slot_names(self, *args, **kwargs):
"""Return a list of the names of slots created by the `Optimizer`.
This simply wraps the get_slot_names() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
A list of strings.
"""
return self._opt.get_slot_names(*args, **kwargs)
def variables(self):
"""Forwarding the variables from the underlying optimizer."""
return self._opt.variables()
|
{
"content_hash": "95cb5efc5854f90329caa1ac4ce98fed",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 80,
"avg_line_length": 39.038277511961724,
"alnum_prop": 0.673489398210565,
"repo_name": "xzturn/tensorflow",
"id": "e233bbb007c79b01d88374cdcfc3333c16d72c67",
"size": "8848",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tensorflow/python/tpu/tpu_optimizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31572"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "905803"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82703929"
},
{
"name": "CMake",
"bytes": "6967"
},
{
"name": "Dockerfile",
"bytes": "113964"
},
{
"name": "Go",
"bytes": "1871773"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1048075"
},
{
"name": "Jupyter Notebook",
"bytes": "550861"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "2139315"
},
{
"name": "Makefile",
"bytes": "66796"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "319649"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "38461380"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "6846"
},
{
"name": "Shell",
"bytes": "696334"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3678649"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
}
|
from scipy import misc
from skimage import color
import numpy as np
import os
import random
import pickle
import som as mySom
patch_sizes = [ [5,5] ] #to be generated, sizes must be odd
directoryData = 'dataset/Opencountry' #raw data
som_sizes = [[3,3]] #soms to use
patches_per_img = 10000
def createDataSet (rawDataDir, patch_size, som):
#Create the training set from a directory with color images
si, sj = patch_size[0], patch_size[1]
cantFeatures = si*sj+2
cantData = len(os.listdir(rawDataDir)) * patches_per_img
X = np.zeros((cantData, cantFeatures)) #data
y = np.zeros((cantData, 1), dtype=np.int) #labels
dataRow=0 #count data rows
for filename in os.listdir(rawDataDir):
if filename.endswith(".jpg"):
img = misc.imread(rawDataDir+'/'+filename) #load image from file
print (' Processing image: ' + filename + ' ' + str(img.shape))
imgLuv = color.rgb2luv(img) #transform the image to CIE LUV
codebook = som._normalizer.denormalize_by(som.data_raw, som.codebook.matrix)
#obtain the "patches" from each figure
for n in range(0, patches_per_img):
i = random.randrange(img.shape[0]-si)
j = random.randrange(img.shape[1]-sj)
#print (' Processing patch: ' + str(j) + ', ' + str (i))
subImg = imgLuv[i:i+si, j:j+sj, :]
#print(subImg.shape)
#misc.imsave('/tmp/parche'+str(x)+'_'+str(y)+'.png', color.luv2rgb(subImg))
pixelUV = subImg[si//2, sj//2, 1:] # obtain the center pixel, only the U and V components
pixelGroup = mySom.getBMU(som, pixelUV.reshape(1,-1), codebook) # get the group of the pixel (the Best Matching Unit of the SOM). For y NN
#print (pixelGroup)
pixelPos = np.array([[i,j]])
#print(pixelPos)
#print(subImg[:,:,0])
patchL = subImg[:,:,0] # get the L components of the patch. For X NN
patchLpos = np.concatenate((patchL.reshape(1, si*sj), pixelPos),1)
#print(patchLpos)
#print (' Updating X...')
X[dataRow] = patchLpos
#print (' Updating Y...')
y[dataRow] = pixelGroup.reshape(1,-1)
#print(y[dataRow])
#if X is None:
# X = patchLpos
#else:
# print (' Concatenating X...')
# X = np.concatenate((X, patchLpos))
#if y is None:
# y = pixelGroup.reshape(1,-1)
#else:
# print (' Concatenating y...')
# y = np.concatenate((y, pixelGroup.reshape(1,-1)))
dataRow = dataRow+1
return X, y
###################################################
#directorySom = 'dataset/Opencountry' #som training images
#mapsize = [3,3]
#som = mySom.defineAndTrainSOM (directorySom, mapsize)
#somFile = open ('pkls/trainedSOM.pkl','wb')
#pickle.dump (som, somFile)
#somFile.close()
for som_size in som_sizes:
print ('Using som: [' + str(som_size[0]) + 'x' + str (som_size[1]) + ']')
somFile = open ('pkls/trainedSOM' + str(som_size[0]) + 'x' + str (som_size[1])+'.pkl','rb')
som = pickle.load (somFile)
somFile.close()
for patch_size in patch_sizes:
print (' With patch size: [' + str(patch_size[0]) + ',' + str (patch_size[1])+']')
X, y = createDataSet (directoryData, patch_size, som)
fileData = open ('pkls/data_pos_patch' + str(patch_size[0]) + 'x' + str (patch_size[1])+'_som' + str(som_size[0]) + 'x' + str (som_size[1])+'.pkl','wb')
pickle.dump (X, fileData)
fileData.close()
fileLabels = open ('pkls/labels_pos_patch' + str(patch_size[0]) + 'x' + str (patch_size[1])+'_som' + str(som_size[0]) + 'x' + str (som_size[1])+'.pkl','wb')
pickle.dump (y, fileLabels)
fileLabels.close()
|
{
"content_hash": "4e71db821ab898609ae47617f5566936",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 165,
"avg_line_length": 39.21818181818182,
"alnum_prop": 0.507185906351414,
"repo_name": "mrichart/NNcoloring",
"id": "8d1d2d4e2c8df3998a19c4b0340242263b4caba9",
"size": "4314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/preprocess_position.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47943"
}
],
"symlink_target": ""
}
|
import unittest
from conans import tools
from conans.test.utils.tools import TestServer, TestClient
from conans.paths import CONANFILE
from conans.util.files import save
from conans.model.ref import ConanFileReference
import os
conan_content = """
from conans import ConanFile
class OpenSSLConan(ConanFile):
name = "openssl"
version = "2.0.1"
files = '*'
"""
class AuthorizeTest(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.servers = {}
self.conan_reference = ConanFileReference.loads("openssl/2.0.1@lasote/testing")
# Create a default remote. R/W is not authorized for conan_reference, just for pepe and owner
self.test_server = TestServer([(str(self.conan_reference), "pepe")], # read permissions
[(str(self.conan_reference), "pepe")], # write permissions
users={"lasote": "mypass",
"pepe": "pepepass"}) # exported users and passwords
self.servers["default"] = self.test_server
def retries_test(self):
"""Bad login 2 times"""
self.conan = TestClient(servers=self.servers, users={"default": [("baduser", "badpass"),
("baduser", "badpass2"),
("pepe", "pepepass")]})
save(os.path.join(self.conan.current_folder, CONANFILE), conan_content)
self.conan.run("export . lasote/testing")
errors = self.conan.run("upload %s" % str(self.conan_reference))
# Check that return was ok
self.assertFalse(errors)
# Check that upload was granted
self.assertTrue(os.path.exists(self.test_server.paths.export(self.conan_reference)))
# Check that login failed two times before ok
self.assertEquals(self.conan.user_io.login_index["default"], 3)
def auth_with_env_test(self):
def _upload_with_credentials(credentials):
cli = TestClient(servers=self.servers, users={})
save(os.path.join(cli.current_folder, CONANFILE), conan_content)
cli.run("export . lasote/testing")
with tools.environment_append(credentials):
cli.run("upload %s" % str(self.conan_reference))
return cli
# Try with remote name in credentials
client = _upload_with_credentials({"CONAN_PASSWORD_DEFAULT": "pepepass",
"CONAN_LOGIN_USERNAME_DEFAULT": "pepe"})
self.assertIn("Got username 'pepe' from environment", client.user_io.out)
self.assertIn("Got password '******' from environment", client.user_io.out)
# Try with generic password and login
client = _upload_with_credentials({"CONAN_PASSWORD": "pepepass",
"CONAN_LOGIN_USERNAME_DEFAULT": "pepe"})
self.assertIn("Got username 'pepe' from environment", client.user_io.out)
self.assertIn("Got password '******' from environment", client.user_io.out)
# Try with generic password and generic login
client = _upload_with_credentials({"CONAN_PASSWORD": "pepepass",
"CONAN_LOGIN_USERNAME": "pepe"})
self.assertIn("Got username 'pepe' from environment", client.user_io.out)
self.assertIn("Got password '******' from environment", client.user_io.out)
# Bad pass raise
with self.assertRaises(Exception):
client = _upload_with_credentials({"CONAN_PASSWORD": "bad",
"CONAN_LOGIN_USERNAME": "pepe"})
self.assertIn("Too many failed login attempts, bye!", client.user_io.out)
def max_retries_test(self):
"""Bad login 3 times"""
self.conan = TestClient(servers=self.servers, users={"default": [("baduser", "badpass"),
("baduser", "badpass2"),
("baduser3", "badpass3")]})
save(os.path.join(self.conan.current_folder, CONANFILE), conan_content)
self.conan.run("export . lasote/testing")
errors = self.conan.run("upload %s" % str(self.conan_reference), ignore_error=True)
# Check that return was not ok
self.assertTrue(errors)
# Check that upload was not granted
self.assertFalse(os.path.exists(self.test_server.paths.export(self.conan_reference)))
# Check that login failed all times
self.assertEquals(self.conan.user_io.login_index["default"], 3)
|
{
"content_hash": "aacbbe12b7631266231ecb42f6113820",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 101,
"avg_line_length": 48.68041237113402,
"alnum_prop": 0.5789919525624735,
"repo_name": "tivek/conan",
"id": "bb21405f8af204b504ff0eb0d323c138b46f19c9",
"size": "4722",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "conans/test/remote/auth_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1100"
},
{
"name": "Groovy",
"bytes": "6080"
},
{
"name": "Python",
"bytes": "2456395"
},
{
"name": "Shell",
"bytes": "1864"
}
],
"symlink_target": ""
}
|
__all__ = ['']
#make core available on flat import
from .core import *
#verbose flags for the entire module
_verbose = False
def getVerbose(verbose=None):
"""resolve verbose flag, using module settings if verbose=None"""
global _verbose
if verbose is None:
return _verbose
_verbose = verbose
return verbose
|
{
"content_hash": "7385942c9115092ba4f16fb3d60dad77",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 66,
"avg_line_length": 21.133333333333333,
"alnum_prop": 0.7255520504731862,
"repo_name": "PMBio/limix",
"id": "14799bced5fcbd549233f1defa8fcd7044314862",
"size": "956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "limix/deprecated/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "1550482"
},
{
"name": "C++",
"bytes": "8073525"
},
{
"name": "CMake",
"bytes": "21097"
},
{
"name": "Fortran",
"bytes": "363470"
},
{
"name": "M4",
"bytes": "16520"
},
{
"name": "Makefile",
"bytes": "11605"
},
{
"name": "Matlab",
"bytes": "25435"
},
{
"name": "PowerShell",
"bytes": "3104"
},
{
"name": "Python",
"bytes": "1704175"
},
{
"name": "Roff",
"bytes": "66747"
},
{
"name": "Shell",
"bytes": "15645"
},
{
"name": "TeX",
"bytes": "26251"
}
],
"symlink_target": ""
}
|
'''
New Integration Test for creating volume under PS maintain mode.
@author: SyZhao
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.primarystorage_operations as ps_ops
import zstackwoodpecker.operations.host_operations as host_ops
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstackwoodpecker.header.vm as vm_header
import zstackwoodpecker.operations.resource_operations as res_ops
import apibinding.inventory as inventory
import os
_config_ = {
'timeout' : 1000,
'noparallel' : True
}
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
ps_uuid = None
host_uuid = None
vr_uuid = None
def test():
global test_obj_dict
global ps_uuid
global host_uuid
global vr_uuid
allow_ps_list = [inventory.CEPH_PRIMARY_STORAGE_TYPE, "SharedBlock"]
test_lib.skip_test_when_ps_type_not_in_list(allow_ps_list)
test_util.test_dsc('Create test vm and check')
bs_cond = res_ops.gen_query_conditions("status", '=', "Connected")
bss = res_ops.query_resource_fields(res_ops.BACKUP_STORAGE, bs_cond, \
None)
if not bss:
test_util.test_skip("not find available backup storage. Skip test")
l3_1_name = os.environ.get('l3VlanNetworkName1')
vm = test_stub.create_vlan_vm(l3_name=l3_1_name)
#l3_1 = test_lib.lib_get_l3_by_name(l3_1_name)
#vr = test_lib.lib_find_vr_by_l3_uuid(l3_1.uuid)[0]
#vr_uuid = vr.uuid
host = test_lib.lib_get_vm_host(vm.get_vm())
host_uuid = host.uuid
test_obj_dict.add_vm(vm)
vm.check()
disk_offering = test_lib.lib_get_disk_offering_by_name(os.environ.get('rootDiskOfferingName'))
volume_creation_option = test_util.VolumeOption()
volume_creation_option.set_disk_offering_uuid(disk_offering.uuid)
volume_creation_option.set_system_tags(['ephemeral::shareable', 'capability::virtio-scsi'])
#ps = test_lib.lib_get_primary_storage_by_vm(vm.get_vm())
#ps_uuid = ps.uuid
#ps_ops.change_primary_storage_state(ps_uuid, 'maintain')
test_stub.maintain_all_pss()
if not test_lib.lib_wait_target_down(vm.get_vm().vmNics[0].ip, '22', 90):
test_util.test_fail('VM is expected to stop when PS change to maintain state')
vm.set_state(vm_header.STOPPED)
vm.check()
volume = test_stub.create_volume(volume_creation_option)
test_obj_dict.add_volume(volume)
volume.check()
#ps_ops.change_primary_storage_state(ps_uuid, 'enable')
test_stub.enable_all_pss()
host_ops.reconnect_host(host_uuid)
#vm_ops.reconnect_vr(vr_uuid)
#vrs = test_lib.lib_get_all_vrs()
#for vr in vrs:
# vm_ops.start_vm(vr.uuid)
vm.start()
vm.check()
volume.delete()
#volume.expunge()
volume.check()
vm.destroy()
test_util.test_pass('Delete volume under PS maintain mode Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
global ps_uuid
if ps_uuid != None:
ps_ops.change_primary_storage_state(ps_uuid, 'enable')
global host_uuid
if host_uuid != None:
host_ops.reconnect_host(host_uuid)
vrs = test_lib.lib_get_all_vrs()
for vr in vrs:
vm_ops.start_vm(vr.uuid)
#global vr_uuid
#if vr_uuid != None:
# vm_ops.reconnect_vr(vr_uuid)
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
|
{
"content_hash": "60021ad45bfd5cebf27294bc34193398",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 98,
"avg_line_length": 34.009433962264154,
"alnum_prop": 0.6618585298196948,
"repo_name": "zstackio/zstack-woodpecker",
"id": "d195cc877d2bfc71054b54fe45dc45f4970a907d",
"size": "3605",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/virtualrouter/ps/test_maintain_ps_crt_share_vol.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
"""Exports a convolutional feature extractor for MNIST in SavedModel format.
The feature extractor is a convolutional neural network plus a hidden layer
that gets trained as part of an MNIST classifier and then written to a
SavedModel (without the classification layer). From there, use_mnist_cnn.py
picks it up for transfer learning.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
from tensorflow.examples.saved_model.integration_tests import mnist_util
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
FLAGS = flags.FLAGS
flags.DEFINE_string(
'export_dir', None,
'Directory of exported SavedModel.')
flags.DEFINE_integer(
'epochs', 10,
'Number of epochs to train.')
flags.DEFINE_bool(
'fast_test_mode', False,
'Shortcut training for running in unit tests.')
flags.DEFINE_bool(
'export_print_hparams', False,
'If true, the exported function will print its effective hparams.')
def make_feature_extractor(l2_strength, dropout_rate):
"""Returns a Keras Model to compute a feature vector from MNIST images."""
regularizer = lambda: tf.keras.regularizers.l2(l2_strength)
net = inp = tf.keras.Input(mnist_util.INPUT_SHAPE)
net = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', name='conv1',
kernel_regularizer=regularizer())(net)
net = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', name='conv2',
kernel_regularizer=regularizer())(net)
net = tf.keras.layers.MaxPooling2D(pool_size=(2, 2), name='pool1')(net)
net = tf.keras.layers.Dropout(dropout_rate, name='dropout1')(net)
net = tf.keras.layers.Flatten(name='flatten')(net)
net = tf.keras.layers.Dense(10, activation='relu', name='dense1',
kernel_regularizer=regularizer())(net)
return tf.keras.Model(inputs=inp, outputs=net)
def set_feature_extractor_hparams(model, dropout_rate):
model.get_layer('dropout1').rate = dropout_rate
def make_classifier(feature_extractor, l2_strength, dropout_rate=0.5):
"""Returns a Keras Model to classify MNIST using feature_extractor."""
regularizer = lambda: tf.keras.regularizers.l2(l2_strength)
net = inp = tf.keras.Input(mnist_util.INPUT_SHAPE)
net = feature_extractor(net)
net = tf.keras.layers.Dropout(dropout_rate)(net)
net = tf.keras.layers.Dense(mnist_util.NUM_CLASSES, activation='softmax',
kernel_regularizer=regularizer())(net)
return tf.keras.Model(inputs=inp, outputs=net)
def wrap_keras_model_for_export(model, batch_input_shape,
set_hparams, default_hparams):
"""Wraps `model` for saving and loading as SavedModel."""
if default_hparams is None: default_hparams = {}
hparam_keys = list(default_hparams.keys())
hparam_defaults = tuple(default_hparams.values())
# The goal is to save a function with this argspec...
argspec = tf_inspect.FullArgSpec(
args=(['inputs', 'training'] + hparam_keys),
defaults=((False,) + hparam_defaults),
varargs=None, varkw=None,
kwonlyargs=[], kwonlydefaults=None,
annotations={})
# ...and this behavior:
def call_fn(inputs, training, *args):
if FLAGS.export_print_hparams:
args = [tf.keras.backend.print_tensor(args[i], 'training=%s and %s='
% (training, hparam_keys[i]))
for i in range(len(args))]
kwargs = dict(zip(hparam_keys, args))
if kwargs: set_hparams(model, **kwargs)
return model(inputs, training=training)
# We cannot spell out `args` in def statement for call_fn, but since
# tf.function uses tf_inspect, we can use tf_decorator to wrap it with
# the desired argspec.
def wrapped(*args, **kwargs): # TODO(arnoegw): Can we use call_fn itself?
return call_fn(*args, **kwargs)
traced_call_fn = tf.function(autograph=False)(
tf_decorator.make_decorator(call_fn, wrapped, decorator_argspec=argspec))
# Now we need to trigger traces for
# - training set to Python values True or False (hence two traces),
# - tensor inputs of the expected nesting, shape and dtype,
# - tensor-valued kwargs for hparams, with caller-side defaults.
# Tracing with partially determined shapes requires an input signature,
# so we initiate tracing from a helper function with only tensor inputs.
@tf.function(autograph=False)
def trigger_traces(inputs, **kwargs):
return tuple(traced_call_fn(inputs, training=training, **kwargs)
for training in (True, False))
inputs_spec = tf.TensorSpec(shape=batch_input_shape, dtype=tf.float32)
hparams_spec = {name: tf.TensorSpec.from_tensor(tf.constant(value))
for name, value in default_hparams.items()}
_ = trigger_traces.get_concrete_function(inputs_spec, **hparams_spec)
# Assemble the output object.
obj = tf.train.Checkpoint()
obj.__call__ = traced_call_fn
obj.trainable_variables = model.trainable_variables
obj.variables = model.trainable_variables + model.non_trainable_variables
obj.regularization_losses = [_get_traced_loss(model, i)
for i in range(len(model.losses))]
return obj
def _get_traced_loss(model, i):
"""Returns tf.function for model.losses[i] with a trace for zero args.
The intended usage is
[_get_traced_loss(model, i) for i in range(len(model.losses))]
This is better than
[tf.function(lambda: model.losses[i], input_signature=[]) for i ...]
because it avoids capturing a loop index in a lambda, and removes any
chance of deferring the trace.
Args:
model: a Keras Model.
i: an integer between from 0 up to but to len(model.losses).
"""
f = tf.function(lambda: model.losses[i])
_ = f.get_concrete_function()
return f
def main(argv):
del argv
# Build a complete classifier model using a feature extractor.
default_hparams = dict(dropout_rate=0.25)
l2_strength = 0.01 # Not a hparam for inputs -> outputs.
feature_extractor = make_feature_extractor(l2_strength=l2_strength,
**default_hparams)
classifier = make_classifier(feature_extractor, l2_strength=l2_strength)
# Train the complete model.
(x_train, y_train), (x_test, y_test) = mnist_util.load_reshaped_data(
fake_tiny_data=FLAGS.fast_test_mode)
classifier.compile(loss=tf.keras.losses.categorical_crossentropy,
optimizer=tf.keras.optimizers.SGD(),
metrics=['accuracy'])
classifier.fit(x_train, y_train,
batch_size=128,
epochs=FLAGS.epochs,
verbose=1,
validation_data=(x_test, y_test))
# Save the feature extractor to a framework-agnostic SavedModel for reuse.
# Note that the feature_extractor object has not been compiled or fitted,
# so it does not contain an optimizer and related state.
exportable = wrap_keras_model_for_export(feature_extractor,
(None,) + mnist_util.INPUT_SHAPE,
set_feature_extractor_hparams,
default_hparams)
tf.saved_model.save(exportable, FLAGS.export_dir)
if __name__ == '__main__':
tf.enable_v2_behavior()
app.run(main)
|
{
"content_hash": "bb2aa765c5fc3a64e5fe1e477601c7ce",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 79,
"avg_line_length": 42.52,
"alnum_prop": 0.6706087891412444,
"repo_name": "ageron/tensorflow",
"id": "7c1a356e661dd1917414cb3846db8ab994d8f843",
"size": "8130",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/examples/saved_model/integration_tests/export_mnist_cnn.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3560"
},
{
"name": "Batchfile",
"bytes": "14734"
},
{
"name": "C",
"bytes": "644380"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "59281238"
},
{
"name": "CMake",
"bytes": "207169"
},
{
"name": "Dockerfile",
"bytes": "75509"
},
{
"name": "Go",
"bytes": "1501606"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "908340"
},
{
"name": "Jupyter Notebook",
"bytes": "2510253"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "94466"
},
{
"name": "Objective-C",
"bytes": "60069"
},
{
"name": "Objective-C++",
"bytes": "118322"
},
{
"name": "PHP",
"bytes": "15024"
},
{
"name": "Pascal",
"bytes": "617"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "46230508"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "481859"
},
{
"name": "Smarty",
"bytes": "27249"
},
{
"name": "Swift",
"bytes": "53109"
}
],
"symlink_target": ""
}
|
import os
from twisted.internet import stdio as twistedStdio
from twisted.protocols.basic import LineReceiver
from src.shared.logconfig import newLogger
log = newLogger(__name__)
def setupStdio(backend):
twistedStdio.StandardIO(StdioHandler(backend))
class StdioHandler(LineReceiver):
# The default delimiter for a LineReceiver is '\r\n', which doesn't work
# with Python's stdin (at least on *nix, probably on Windows as well) which
# uses either '\n' or os.linesep instead.
# TODO: I'm actually not sure which separator Python uses on Windows, so
# this might not work on Windows. We should test it and -- if necessary --
# change this to '\n'.
delimiter = os.linesep
def __init__(self, backend):
# For some reason calling LineReceiver.__init__ doesn't work??
self.backend = backend
self.backend.stdioReady(self)
def cleanup(self):
pass
def lineReceived(self, line):
self.backend.stdioMessage(line)
def backendMessage(self, message):
# Do we want to use this?
self.sendLine(message)
|
{
"content_hash": "15021490096b664b8f5cf0836ed2b906",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 79,
"avg_line_length": 28.256410256410255,
"alnum_prop": 0.6860254083484574,
"repo_name": "CheeseLord/warts",
"id": "cf5c127691acfa4ed795a9a775ec618182412717",
"size": "1102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/client/stdio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "759"
},
{
"name": "Python",
"bytes": "182874"
},
{
"name": "Shell",
"bytes": "3109"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from chaco.abstract_overlay import AbstractOverlay
from traits.trait_types import List
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.spectrometer.graph.marker_line import MarkerLine
class MarkerLineOverlay(AbstractOverlay):
lines = List
_cached_lines = List
def add_marker_line(self, x, bgcolor='black'):
l = MarkerLine(data_x=self.component.index_mapper.map_data(x),
x=x,
bgcolor=bgcolor)
self.lines.append(l)
self._layout_needed = True
self.do_layout()
return l
def overlay(self, other_component, gc, view_bounds=None, mode="normal"):
with gc:
gc.clip_to_rect(other_component.x, other_component.y,
other_component.width,
other_component.height)
gc.set_stroke_color((1, 0, 0, 0.75))
gc.set_line_dash((12, 6))
gc.translate_ctm(0, other_component.y)
for l in self._cached_lines:
if l.visible:
l.draw(gc, other_component.height)
def _do_layout(self):
if self._layout_needed:
mapper = self.component.index_mapper
self._cached_lines = self.lines[:]
for ci in self._cached_lines:
if ci.visible:
ci.x = mapper.map_screen(ci.data_x)
ci.height = self.component.height
ci.visible = bool(ci.x > 0)
self._layout_needed = False
# ============= EOF =============================================
|
{
"content_hash": "eb426969b7be83ca4562d0148f7cfd23",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 76,
"avg_line_length": 37.5,
"alnum_prop": 0.5176811594202898,
"repo_name": "UManPychron/pychron",
"id": "8088c629a2d3270226d9fbcd83cc59b84d1dfc5c",
"size": "2525",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/spectrometer/graph/marker_line_overlay.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
}
|
"""
Constants: Machine Settings Info
---------------------------------
Specific constants relating to the retrieval of machine settings information to be used in
``PyLHC``, to help with consistency.
"""
from pylhc.constants.general import TFS_SUFFIX
# TFS-File Conventions #########################################################
# Filename
info_name = f"machine_settings{TFS_SUFFIX}"
knobdef_suffix = f"_definition{TFS_SUFFIX}"
trimhistory_suffix = f"_trims{TFS_SUFFIX}"
# Columns
column_knob = "KNOB"
column_time = "TIME"
column_timestamp = "TIMESTAMP"
column_value = "VALUE"
# Headers
head_accel = "Accelerator"
head_time = "Time"
head_start_time = "StartTime"
head_end_time = "EndTime"
head_beamprocess = "Beamprocess"
head_fill = "Fill"
head_beamprocess_start = "BeamprocessStart"
head_context_category = "ContextCategory"
head_beamprcess_description = "Description"
head_optics = "Optics"
head_optics_start = "OpticsStart"
|
{
"content_hash": "5a291d1c5ebf093e8ce4ce502e8b0367",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 90,
"avg_line_length": 27.5,
"alnum_prop": 0.679144385026738,
"repo_name": "pylhc/PyLHC",
"id": "acd637894553e8d62bca3a0baa42c7148c7a7c6b",
"size": "935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylhc/constants/machine_settings_info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "195846"
}
],
"symlink_target": ""
}
|
"""Converts GitHub flavored markdown changelogs to release notes.
"""
import argparse
import re
import subprocess
import string
import six
NO_HEADING = 'PRODUCT HAS NO HEADING'
PRODUCTS = {
'FirebaseABTesting/CHANGELOG.md': '{{ab_testing}}',
'FirebaseAppCheck/CHANGELOG.md': 'App Check',
'FirebaseAppDistribution/CHANGELOG.md': 'App Distribution',
'FirebaseAuth/CHANGELOG.md': '{{auth}}',
'FirebaseCore/CHANGELOG.md': NO_HEADING,
'Crashlytics/CHANGELOG.md': '{{crashlytics}}',
'FirebaseDatabase/CHANGELOG.md': '{{database}}',
'FirebaseDynamicLinks/CHANGELOG.md': '{{ddls}}',
'FirebaseInAppMessaging/CHANGELOG.md': '{{inapp_messaging}}',
'FirebaseInstallations/CHANGELOG.md': 'Installations',
'FirebaseMessaging/CHANGELOG.md': '{{messaging}}',
'FirebaseStorage/CHANGELOG.md': '{{storage}}',
'Firestore/CHANGELOG.md': '{{firestore}}',
'FirebaseFunctions/CHANGELOG.md': '{{cloud_functions}}',
'FirebaseRemoteConfig/CHANGELOG.md': '{{remote_config}}',
'FirebasePerformance/CHANGELOG.md': '{{perfmon}}',
}
def main():
local_repo = find_local_repo()
parser = argparse.ArgumentParser(description='Create release notes.')
parser.add_argument('--repo', '-r', default=local_repo,
help='Specify which GitHub repo is local.')
parser.add_argument('--only', metavar='VERSION',
help='Convert only a specific version')
parser.add_argument('--all', action='store_true',
help='Emits entries for all versions')
parser.add_argument('changelog',
help='The CHANGELOG.md file to parse')
args = parser.parse_args()
if args.all:
text = read_file(args.changelog)
else:
text = read_changelog_section(args.changelog, args.only)
product = None
if not args.all:
product = PRODUCTS.get(args.changelog)
renderer = Renderer(args.repo, product)
translator = Translator(renderer)
result = translator.translate(text)
print(result)
def find_local_repo():
url = six.ensure_text(
subprocess.check_output(['git', 'config', '--get', 'remote.origin.url']))
# ssh or https style URL
m = re.match(r'^(?:git@github\.com:|https://github\.com/)(.*)\.git$', url)
if m:
return m.group(1)
raise LookupError('Can\'t figure local repo from remote URL %s' % url)
CHANGE_TYPE_MAPPING = {
'added': 'feature'
}
class Renderer(object):
def __init__(self, local_repo, product):
self.local_repo = local_repo
self.product = product
def heading(self, heading):
if self.product:
if self.product == NO_HEADING:
return ''
else:
return '### %s\n' % self.product
return heading
def bullet(self, spacing):
"""Renders a bullet in a list.
All bulleted lists in devsite are '*' style.
"""
return '%s* ' % spacing
def change_type(self, tag):
"""Renders a change type tag as the appropriate double-braced macro.
That is "[fixed]" is rendered as "{{fixed}}".
"""
tag = CHANGE_TYPE_MAPPING.get(tag, tag)
return '{{%s}}' % tag
def url(self, url):
m = re.match(r'^(?:https:)?(//github.com/(.*)/issues/(\d+))$', url)
if m:
link = m.group(1)
repo = m.group(2)
issue = m.group(3)
if repo == self.local_repo:
text = '#' + issue
else:
text = repo + '#' + issue
return '[%s](%s)' % (text, link)
return url
def local_issue_link(self, issues):
"""Renders a local issue link as a proper markdown URL.
Transforms (#1234, #1235) into
([#1234](//github.com/firebase/firebase-ios-sdk/issues/1234),
[#1235](//github.com/firebase/firebase-ios-sdk/issues/1235)).
"""
issue_link_list = []
issue_list = issues.split(", ")
translate = str.maketrans('', '', string.punctuation)
for issue in issue_list:
issue = issue.translate(translate)
link = '//github.com/%s/issues/%s' % (self.local_repo, issue)
issue_link_list.append('[#%s](%s)' % (issue, link))
return "(" + ", ".join(issue_link_list) + ")"
def text(self, text):
"""Passes through any other text."""
return text
class Translator(object):
def __init__(self, renderer):
self.renderer = renderer
def translate(self, text):
result = ''
while text:
for key in self.rules:
rule = getattr(self, key)
m = rule.match(text)
if not m:
continue
callback = getattr(self, 'parse_' + key)
callback_result = callback(m)
result += callback_result
text = text[len(m.group(0)):]
break
return result
heading = re.compile(
r'^#{1,6} .*'
)
def parse_heading(self, m):
return self.renderer.heading(m.group(0))
bullet = re.compile(
r'^(\s*)[*+-] '
)
def parse_bullet(self, m):
return self.renderer.bullet(m.group(1))
change_type = re.compile(
r'\[' # opening square bracket
r'(\w+)' # tag word (like "feature" or "changed")
r'\]' # closing square bracket
r'(?!\()' # not followed by opening paren (that would be a link)
)
def parse_change_type(self, m):
return self.renderer.change_type(m.group(1))
url = re.compile(r'^(https?://[^\s<]+[^<.,:;"\')\]\s])')
def parse_url(self, m):
return self.renderer.url(m.group(1))
local_issue_link = re.compile(
r'\(' # opening paren
r'(#(\d+)(, )?)+' # list of hash and issue number, comma-delimited
r'\)' # closing paren
)
def parse_local_issue_link(self, m):
return self.renderer.local_issue_link(m.group(0))
text = re.compile(
r'^[\s\S]+?(?=[(\[\n]|https?://|$)'
)
def parse_text(self, m):
return self.renderer.text(m.group(0))
rules = [
'heading', 'bullet', 'change_type', 'url', 'local_issue_link', 'text'
]
def read_file(filename):
"""Reads the contents of the file as a single string."""
with open(filename, 'r') as fd:
return fd.read()
def read_changelog_section(filename, single_version=None):
"""Reads a single section of the changelog from the given filename.
If single_version is None, reads the first section with a number in its
heading. Otherwise, reads the first section with single_version in its
heading.
Args:
- single_version: specifies a string to look for in headings.
Returns:
A string containing the heading and contents of the heading.
"""
with open(filename, 'r') as fd:
# Discard all lines until we see a heading that either has the version the
# user asked for or any version.
if single_version:
initial_heading = re.compile(
r'^(#{1,6}) .*%s' % re.escape(single_version))
else:
initial_heading = re.compile(r'^#({1,6}) ([^\d]*)\d')
heading = re.compile(r'^(#{1,6}) ')
initial = True
heading_level = 6
result = []
for line in fd:
if initial:
match = initial_heading.match(line)
if match:
initial = False
heading_level = len(match.group(1))
result.append(line)
else:
match = heading.match(line)
# We only break if we find a new header at the same, or higher,
# level.
if match and len(match.group(1)) <= heading_level:
break
result.append(line)
# Prune extra newlines
while result and result[-1] == '\n':
result.pop()
return ''.join(result)
if __name__ == '__main__':
main()
|
{
"content_hash": "6c5e3ffebd1ea5fdeff75248063632a4",
"timestamp": "",
"source": "github",
"line_count": 278,
"max_line_length": 79,
"avg_line_length": 26.949640287769785,
"alnum_prop": 0.6014415376401495,
"repo_name": "firebase/firebase-ios-sdk",
"id": "8056b17bcb975133c09441d4344dc52451b89ba0",
"size": "8092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/make_release_notes.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "365959"
},
{
"name": "C++",
"bytes": "8345652"
},
{
"name": "CMake",
"bytes": "91856"
},
{
"name": "JavaScript",
"bytes": "3675"
},
{
"name": "Objective-C",
"bytes": "10276029"
},
{
"name": "Objective-C++",
"bytes": "837306"
},
{
"name": "Python",
"bytes": "117723"
},
{
"name": "Ruby",
"bytes": "179250"
},
{
"name": "Shell",
"bytes": "127192"
},
{
"name": "Swift",
"bytes": "2052268"
},
{
"name": "sed",
"bytes": "2015"
}
],
"symlink_target": ""
}
|
from datetime import datetime
import graphene
from graphene_django.filter import DjangoFilterConnectionField
from app.timetables.models import ServingAutoUpdate, Timetable, Vendor
from .cruds.dish_crud import (DishNode, CreateDish, UpdateDish, DeleteDish,
DishFilter,)
from .cruds.event_crud import EventNode, EventFilter
from .cruds.meal_crud import (MealNode, CreateMeal, UpdateMeal, DeleteMeal,
MealFilter,)
from .cruds.review_crud import (ReviewNode, CreateReview, UpdateReview,
DeleteReview, ReviewFilter,)
from .cruds.serving_crud import ServingNode
from .cruds.timetable_crud import TimetableNode, TimetableFilter
from .cruds.user_crud import (UserNode, CreateUser, UpdateUser, DeleteUser,
UserFilter,)
from .cruds.vendor_crud import (VendorNode, CreateVendor, UpdateVendor,
DeleteVendor, VendorFilter,)
class Query(graphene.AbstractType):
user = graphene.relay.Node.Field(UserNode)
users = DjangoFilterConnectionField(UserNode, filterset_class=UserFilter)
dish = graphene.relay.Node.Field(DishNode)
dishes = DjangoFilterConnectionField(DishNode, filterset_class=DishFilter)
meal = graphene.relay.Node.Field(MealNode)
meals = DjangoFilterConnectionField(MealNode, filterset_class=MealFilter)
vendor = graphene.relay.Node.Field(VendorNode)
vendors = DjangoFilterConnectionField(VendorNode, filterset_class=VendorFilter)
timetable = graphene.relay.Node.Field(TimetableNode)
timetables = DjangoFilterConnectionField(TimetableNode, filterset_class=TimetableFilter)
event = graphene.relay.Node.Field(EventNode)
events = DjangoFilterConnectionField(EventNode, filterset_class=EventFilter)
review = graphene.relay.Node.Field(ReviewNode)
reviews = DjangoFilterConnectionField(ReviewNode, filterset_class=ReviewFilter)
servings = graphene.List(
ServingNode,
timetable=graphene.String(),
vendor=graphene.String(),
date=graphene.String()
)
def resolve_servings(self, args, context, info):
timetable = Timetable.objects.get(slug=args['timetable'])
date = datetime.strptime(args['date'], '%Y-%m-%d').date()
if 'vendor' in args:
vendor = Vendor.objects.get(slug=args['vendor'])
return ServingAutoUpdate.get_servings(
timetable, date, vendor=vendor
)
return ServingAutoUpdate.get_servings(timetable, date)
class Mutation(graphene.ObjectType):
create_user = CreateUser.Field()
update_user = UpdateUser.Field()
delete_user = DeleteUser.Field()
create_dish = CreateDish.Field()
update_dish = UpdateDish.Field()
delete_dish = DeleteDish.Field()
create_meal = CreateMeal.Field()
update_meal = UpdateMeal.Field()
delete_meal = DeleteMeal.Field()
create_vendor = CreateVendor.Field()
update_vendor = UpdateVendor.Field()
delete_vendor = DeleteVendor.Field()
create_review = CreateReview.Field()
update_review = UpdateReview.Field()
delete_review = DeleteReview.Field()
|
{
"content_hash": "eade30076221cb2f0769aa4e99e473ab",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 92,
"avg_line_length": 38.21686746987952,
"alnum_prop": 0.7049180327868853,
"repo_name": "teamtaverna/core",
"id": "2bd8b5ef8b9299a2c41de384aff426a843053ca9",
"size": "3172",
"binary": false,
"copies": "1",
"ref": "refs/heads/staging",
"path": "app/api/schema.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "163026"
}
],
"symlink_target": ""
}
|
"""Tests for distutils.file_util."""
import unittest
import os
import shutil
from distutils.file_util import move_file, write_file, copy_file
from distutils import log
from distutils.tests import support
class FileUtilTestCase(support.TempdirManager, unittest.TestCase):
def _log(self, msg, *args):
if len(args) > 0:
self._logs.append(msg % args)
else:
self._logs.append(msg)
def setUp(self):
super(FileUtilTestCase, self).setUp()
self._logs = []
self.old_log = log.info
log.info = self._log
tmp_dir = self.mkdtemp()
self.source = os.path.join(tmp_dir, 'f1')
self.target = os.path.join(tmp_dir, 'f2')
self.target_dir = os.path.join(tmp_dir, 'd1')
def tearDown(self):
log.info = self.old_log
super(FileUtilTestCase, self).tearDown()
def test_move_file_verbosity(self):
f = open(self.source, 'w')
try:
f.write('some content')
finally:
f.close()
move_file(self.source, self.target, verbose=0)
wanted = []
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
move_file(self.source, self.target, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target)]
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
self._logs = []
# now the target is a dir
os.mkdir(self.target_dir)
move_file(self.source, self.target_dir, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target_dir)]
self.assertEqual(self._logs, wanted)
def test_write_file(self):
lines = ['a', 'b', 'c']
dir = self.mkdtemp()
foo = os.path.join(dir, 'foo')
write_file(foo, lines)
content = [line.strip() for line in open(foo).readlines()]
self.assertEqual(content, lines)
def test_copy_file(self):
src_dir = self.mkdtemp()
foo = os.path.join(src_dir, 'foo')
write_file(foo, 'content')
dst_dir = self.mkdtemp()
copy_file(foo, dst_dir)
self.assertTrue(os.path.exists(os.path.join(dst_dir, 'foo')))
def test_suite():
return unittest.makeSuite(FileUtilTestCase)
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
{
"content_hash": "5d576dbfa925e19f309620319b76ff89",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 69,
"avg_line_length": 30.6625,
"alnum_prop": 0.5886669384427232,
"repo_name": "xxd3vin/spp-sdk",
"id": "dbc62839817f6e8324cdad64e33bdcc20fddb133",
"size": "2453",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "opt/Python27/Lib/distutils/tests/test_file_util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "759663"
},
{
"name": "C#",
"bytes": "9892"
},
{
"name": "C++",
"bytes": "56155"
},
{
"name": "CSS",
"bytes": "6226"
},
{
"name": "F#",
"bytes": "3065"
},
{
"name": "FORTRAN",
"bytes": "7795"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "JavaScript",
"bytes": "163687"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Pascal",
"bytes": "8738"
},
{
"name": "Python",
"bytes": "22177886"
},
{
"name": "Shell",
"bytes": "15704"
},
{
"name": "Tcl",
"bytes": "2065501"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.contrib import admin
from django.utils.translation import ugettext as _
from koalixcrm.crm.reporting.resource_price import ResourcePrice
class Estimation(models.Model):
task = models.ForeignKey("Task",
verbose_name=_('Task'),
blank=False,
null=False)
resource = models.ForeignKey("Resource")
date_from = models.DateField(verbose_name=_("Estimation From"),
blank=False,
null=False)
date_until = models.DateField(verbose_name=_("Estimation To"),
blank=False,
null=False)
amount = models.DecimalField(verbose_name=_("Amount"),
max_digits=5,
decimal_places=2,
blank=True,
null=True)
status = models.ForeignKey("EstimationStatus",
verbose_name=_('Status of the estimation'),
blank=False,
null=False)
reporting_period = models.ForeignKey("ReportingPeriod",
verbose_name=_('Reporting Period based on which the estimation was done'),
blank=False,
null=False)
def calculated_costs(self):
default_resource_price = ResourcePrice.objects.filter(id=self.resource.id)
if len(default_resource_price) == 0:
costs = 0
else:
for resource_price in default_resource_price:
costs = self.amount*resource_price.price
break
return costs
def __str__(self):
return _("Estimation of Resource Consumption") + ": " + str(self.id)
class Meta:
app_label = "crm"
verbose_name = _('Estimation of Resource Consumption')
verbose_name_plural = _('Estimation of Resource Consumptions')
class EstimationInlineAdminView(admin.TabularInline):
model = Estimation
fieldsets = (
(_('Work'), {
'fields': ('task',
'resource',
'amount',
'date_from',
'date_until',
'status',
'reporting_period')
}),
)
extra = 1
|
{
"content_hash": "2af792f6d931949d4e6e9b1ba38f5972",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 115,
"avg_line_length": 38.815384615384616,
"alnum_prop": 0.4803804994054697,
"repo_name": "dario61081/koalixcrm",
"id": "68daa1bd89eaf49036adae1756a708831e586b1b",
"size": "2548",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "koalixcrm/crm/reporting/estimation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "527"
},
{
"name": "Python",
"bytes": "275768"
},
{
"name": "XSLT",
"bytes": "287211"
}
],
"symlink_target": ""
}
|
"""Fetch some events from github and put them on a page."""
from __future__ import print_function, division
import sys
import os
import json
import shutil
from datetime import datetime
import requests
from jinja2 import Template
from flask import Flask
app = Flask(__name__)
USERS_FILE = "users.txt"
TEMPLATE_DIR = "templates" # HTML Jinja template
DATA_DIR = "data" # used for cacheing
GH_USER_EVENTS_URL = "https://api.github.com/users/{}/events/public"
def info(msg):
"""Print a line in blue (assumes we have a color terminal!)"""
print("\033[1m\033[34m", "INFO:", msg, "\033[0m")
def timeago(time):
"""Given an ISO-8601 formatted timestamp (UTC), return time ago as a
timedelta object."""
tnow = datetime.utcnow()
t = datetime.strptime(time, "%Y-%m-%dT%H:%M:%SZ")
return tnow - t
def read_users(fname):
"""Read a file and split into lines, stripping comments starting with
a hash (#) and blank lines."""
users = []
with open(fname, "r") as f:
for line in f.readlines():
pos = line.find("#")
if pos != -1:
line = line[0:pos]
line = line.strip(" \n")
if len(line) > 0:
users.append(line)
return users
def write_poll_info(fname, etag, poll_interval):
"""Write polling metadata to file `fname`."""
with open(fname, "w") as f:
f.write(etag)
f.write("\n")
f.write(datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))
f.write("\n")
f.write(str(poll_interval))
def read_poll_info(fname):
"""Return etag, poll_time (ISO 8601 formatted), poll_interval"""
with open(fname, "r") as f:
lines = [line.strip() for line in f]
etag, poll_time, poll_interval = lines
return etag, poll_time, int(poll_interval)
def fetch_user_events(user):
"""Fetch public github events for the given user."""
dirname = os.path.join(DATA_DIR, "users", user)
poll_fname = os.path.join(dirname, "poll-info")
url = GH_USER_EVENTS_URL.format(user)
# ensure user directory exists
if not os.path.exists(dirname):
os.makedirs(dirname)
# Get a list of events already locally cached and special data we saved
# about the last time we polled github.
fnames = os.listdir(dirname)
if "poll-info" in fnames:
etag, poll_time, poll_interval = read_poll_info(poll_fname)
fnames.remove("poll-info")
else:
etag, poll_time, poll_interval = None, None, None
# Check if we already polled recently, respecting the poll interval.
if poll_time is not None and poll_interval is not None:
td = timeago(poll_time)
t_sec = td.days * 86400 + td.seconds
if t_sec < poll_interval:
info("{}: polled {}s ago. Next poll allowed in {}s."
.format(user, t_sec, poll_interval - t_sec))
return
headers = {"If-None-Match": etag} if (etag is not None) else None
r = requests.get(url, headers=headers)
if r.status_code == 304:
msg = user + ": up-to-date"
# If we get this status code, it means that etag wasn't None
# and that means that poll_interval was also not None.
# update poll-info with the current time.
write_poll_info(poll_fname, etag, poll_interval)
elif r.status_code == 200:
events = r.json()
# write each new event to a separate file
new = 0
for event in events:
id = event["id"]
if id not in fnames:
fname = os.path.join(dirname, id)
with open(fname, "w") as f:
json.dump(event, f)
new += 1
msg = "{}: {} new event".format(user, new)
if new > 1:
msg += "s"
# write the polling metadata
write_poll_info(poll_fname, r.headers["etag"],
r.headers["x-poll-interval"])
else:
raise Exception("request to {} failed with status {}"
.format(url, r.status_code))
# append rate limit info to message
limit = r.headers['x-ratelimit-limit']
remaining = r.headers['x-ratelimit-remaining']
info("{:30s} [{:>4s}/{:>4s}]".format(msg, remaining, limit))
def read_user_events(user):
"""Read user events from json data already in the cache"""
dirname = os.path.join(DATA_DIR, "users", user)
fnames = os.listdir(dirname)
if "poll-info" in fnames:
fnames.remove("poll-info")
events = []
for fname in fnames:
with open(os.path.join(dirname, fname)) as f:
events.append(json.load(f))
return events
def is_merge_event(event):
if event["type"] == "PushEvent":
if len(event["payload"]["commits"]) == 0:
return False
lastmsg = event["payload"]["commits"][-1]["message"]
if lastmsg.lower().startswith("merge pull request"):
return True
return False
def filter_merges_in_user_events(events):
return list(filter(lambda x: (not is_merge_event(x)), events))
def combine_push_events(events):
"""Aggregate a set of push events into a single AggPushEvent."""
if len(events) == 1:
return events[0]
# get all commit messages
commits = []
for e in events:
commits.extend(e["payload"]["commits"])
# get total size
distinct_size = sum([e["payload"]["distinct_size"] for e in events])
# assumes that list is already sorted by time
d = {"type": "AggPushEvent",
"actor": events[0]["actor"],
"repo": events[0]["repo"],
"payload": {"commits": commits,
"distinct_size": distinct_size},
"created_at": events[0]["created_at"], # most recent
"begin": events[0]["created_at"], # most recent
"end": events[-1]["created_at"]} # least recent
return d
def aggregate_pushes_in_user_events(events):
"""Aggregate nearby PushEvents in a single user's events.
"""
# sort events by time
events.sort(key=lambda x: x["created_at"], reverse=True)
# split into repos
names = set([e["repo"]["name"] for e in events])
events_by_repo = {n: [] for n in names}
for e in events:
events_by_repo[e["repo"]["name"]].append(e)
new_events = []
for name in events_by_repo:
aggevents = None
t1 = None
for event in events_by_repo[name]:
if event["type"] != "PushEvent":
new_events.append(event)
continue
if aggevents is None:
aggevents = [event]
t1 = datetime.strptime(event["created_at"],
"%Y-%m-%dT%H:%M:%SZ")
else:
t2 = datetime.strptime(event["created_at"],
"%Y-%m-%dT%H:%M:%SZ")
dt = t1 - t2
if dt.days < 1:
aggevents.append(event)
else:
new_events.append(combine_push_events(aggevents))
aggevents = [event]
t1 = t2
# clean up remaining events
if aggevents is not None:
new_events.append(combine_push_events(aggevents))
return new_events
def fmt_timedelta(td):
if td.days > 1:
return "{} days ago".format(td.days)
elif td.days == 1:
return "1 day ago"
elif td.seconds > 7200:
return "{} hours ago".format(td.seconds // 3600)
elif td.seconds > 3600:
return "1 hour ago"
elif td.seconds > 120:
return "{} minutes ago".format(td.seconds // 60)
elif td.seconds > 60:
return "1 minute ago"
else:
return "just now"
def timeago(time):
"""Given an ISO-8601 formatted timestamp, return time ago as a timedelta
object."""
tnow = datetime.utcnow()
t = datetime.strptime(time, "%Y-%m-%dT%H:%M:%SZ")
return tnow - t
def timeago_event(event):
if event["type"] == "AggPushEvent":
s1 = fmt_timedelta(timeago(event["begin"]))
s2 = fmt_timedelta(timeago(event["end"]))
if s1 == s2:
return s1
else:
return '{} – {}'.format(s1, s2)
else:
return fmt_timedelta(timeago(event["created_at"]))
# -----------------------------------------------------------------------------
# Parsing events
#
# Different types of events have different payloads and thus will be parsed
# differently. The global PARSERS variable maps event types (e.g., "PushEvent")
# to parsing functions. Each parsing function should return either:
# (1) a dictionary with "icon" and "body" keys
# (2) `None` if the event is not of interest
def ghlink(s):
"""Return an HTML <a> tag with a link to github"""
return '<a href="https://github.com/{s}">{s}</a>'.format(s=s)
def simplebody(event, action):
return '{} {} {}'.format(ghlink(event["actor"]["login"]), action,
ghlink(event["repo"]["name"]))
def parse_watch(event):
return {"icon": "star",
"body": simplebody(event, "starred")}
def parse_fork(event):
return {"icon": "repo-forked",
"body": simplebody(event, "forked")}
def parse_public(event):
return {"icon": "heart",
"body": simplebody(event, "open-sourced")}
def parse_pullrequest(event):
"""Only return new and merged pull requests"""
action = event["payload"]["action"]
login = event["actor"]["login"]
number = event["payload"]["number"]
pr_url = event["payload"]["pull_request"]["html_url"]
pr_title = event["payload"]["pull_request"]["title"]
repo_name = event["repo"]["name"]
# correct closed to merged.
if action == "closed" and event["payload"]["pull_request"]["merged"]:
action = "merged"
if action == "opened" or action == "merged":
body = ('{} {} pull request <a href="{}" title="{}">#{}</a> on {}'
.format(ghlink(login), action, pr_url, pr_title, number,
ghlink(repo_name)))
return {"icon": "git-pull-request",
"body": body}
else:
return None
def parse_create(event):
"""Parse new repositories, new tags, but not branches"""
ref_type = event["payload"]["ref_type"]
login = event["actor"]["login"]
repo_name = event["repo"]["name"]
ref = event["payload"]["ref"]
if ref_type == "repository":
icon = "repo"
body = "{} created {}".format(ghlink(login), ghlink(repo_name))
elif ref_type == "tag":
icon = "tag"
body = "{} tagged {} on {}".format(ghlink(login), ref,
ghlink(repo_name))
else:
return None
return {"icon": icon, "body": body}
def parse_release(event):
login = event["actor"]["login"]
repo_name = event["repo"]["name"]
tag_name = event["payload"]["release"]["tag_name"]
body = "{} released {} of {}".format(ghlink(login), tag_name,
ghlink(repo_name))
return {"icon": "package", "body": body}
def parse_push(event):
login = event["actor"]["login"]
repo_name = event["repo"]["name"]
commits = event["payload"]["commits"]
ncommits = event["payload"]["distinct_size"]
msg = "\n".join([c["message"].split("\n")[0] for c in commits])
body = '{} pushed <a title="{}">{} commits</a> to {}'.format(
ghlink(login), msg, ncommits, ghlink(repo_name))
return {"icon": "git-commit", "body": body}
PARSERS = {"WatchEvent": parse_watch, # stars a repo
"PullRequestEvent": parse_pullrequest, # anything to do with a PR
"CreateEvent": parse_create, # creates a repo, branch or tag
"ForkEvent": parse_fork, # fork a repo
"PublicEvent": parse_public, # open-source a repo
"ReleaseEvent": parse_release, # draft a release
"PushEvent": parse_push, # repo branch is pushed to
"AggPushEvent": parse_push} # custom "event" type we create
def parse(event):
"""Parse an event into a dictionary or None.
If the event is one we are interested in, return a dictionary with
"icon", "body", "time" and "timeago" keys.
If the event is one we are not interested in, return None.
"""
t = event["type"]
if t not in PARSERS:
return None
d = PARSERS[t](event)
if d is None:
return None
# append timestamp & time string
d["time"] = event["created_at"]
d["timeago"] = timeago_event(event)
return d
# -----------------------------------------------------------------------------
# Main bits
def build_html(events):
"""Render contents of index.html page."""
#sort events by time
events.sort(key=lambda x: x["created_at"], reverse=True)
# parse all events
summaries = []
for event in events:
s = parse(event)
if s is not None:
summaries.append(s)
# load template
with open(os.path.join(TEMPLATE_DIR, "index.html")) as f:
template_html = f.read()
template = Template(template_html)
return template.render(events=summaries)
@app.route("/")
def index():
users = read_users(USERS_FILE)
for user in users:
fetch_user_events(user)
allevents = []
for user in users:
events = read_user_events(user)
events = filter_merges_in_user_events(events)
events = aggregate_pushes_in_user_events(events)
allevents.extend(events)
return build_html(allevents)
|
{
"content_hash": "4e0229caecb62223d3817753ce6823aa",
"timestamp": "",
"source": "github",
"line_count": 456,
"max_line_length": 79,
"avg_line_length": 29.82236842105263,
"alnum_prop": 0.5656298257224796,
"repo_name": "kbarbary/ghdash",
"id": "666683a65ecb1799ed225334428f80fb46945ff0",
"size": "13621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ghdash.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11980"
},
{
"name": "HTML",
"bytes": "814"
},
{
"name": "Python",
"bytes": "13621"
}
],
"symlink_target": ""
}
|
"""
Support the sensor of a BloomSky weather station.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/sensor.bloomsky/
"""
import logging
from homeassistant.const import TEMP_FAHRENHEIT
from homeassistant.helpers.entity import Entity
from homeassistant.loader import get_component
DEPENDENCIES = ["bloomsky"]
# These are the available sensors
SENSOR_TYPES = ["Temperature",
"Humidity",
"Pressure",
"Luminance",
"UVIndex",
"Voltage"]
# Sensor units - these do not currently align with the API documentation
SENSOR_UNITS = {"Temperature": TEMP_FAHRENHEIT,
"Humidity": "%",
"Pressure": "inHg",
"Luminance": "cd/m²",
"Voltage": "mV"}
# Which sensors to format numerically
FORMAT_NUMBERS = ["Temperature", "Pressure", "Voltage"]
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the available BloomSky weather sensors."""
logger = logging.getLogger(__name__)
bloomsky = get_component('bloomsky')
sensors = config.get('monitored_conditions', SENSOR_TYPES)
for device in bloomsky.BLOOMSKY.devices.values():
for variable in sensors:
if variable in SENSOR_TYPES:
add_devices([BloomSkySensor(bloomsky.BLOOMSKY,
device,
variable)])
else:
logger.error("Cannot find definition for device: %s", variable)
class BloomSkySensor(Entity):
"""Representation of a single sensor in a BloomSky device."""
def __init__(self, bs, device, sensor_name):
"""Initialize a bloomsky sensor."""
self._bloomsky = bs
self._device_id = device["DeviceID"]
self._sensor_name = sensor_name
self._name = "{} {}".format(device["DeviceName"], sensor_name)
self._unique_id = "bloomsky_sensor {}".format(self._name)
self.update()
@property
def name(self):
"""The name of the BloomSky device and this sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique ID for this sensor."""
return self._unique_id
@property
def state(self):
"""The current state, eg. value, of this sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the sensor units."""
return SENSOR_UNITS.get(self._sensor_name, None)
def update(self):
"""Request an update from the BloomSky API."""
self._bloomsky.refresh_devices()
state = \
self._bloomsky.devices[self._device_id]["Data"][self._sensor_name]
if self._sensor_name in FORMAT_NUMBERS:
self._state = "{0:.2f}".format(state)
else:
self._state = state
|
{
"content_hash": "22532199a995a7ff5cd5cd831a5824a3",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 32.086021505376344,
"alnum_prop": 0.599195710455764,
"repo_name": "emilhetty/home-assistant",
"id": "a9d2c0c6631c7f9c2cc8fbafd9e54b88d1fa2dc1",
"size": "2985",
"binary": false,
"copies": "6",
"ref": "refs/heads/patch-3",
"path": "homeassistant/components/sensor/bloomsky.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1307989"
},
{
"name": "JavaScript",
"bytes": "10846"
},
{
"name": "Python",
"bytes": "2562500"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "6430"
}
],
"symlink_target": ""
}
|
"""Constants for the Harmony component."""
DOMAIN = "harmony"
SERVICE_SYNC = "sync"
SERVICE_CHANGE_CHANNEL = "change_channel"
PLATFORMS = ["remote"]
UNIQUE_ID = "unique_id"
ACTIVITY_POWER_OFF = "PowerOff"
HARMONY_OPTIONS_UPDATE = "harmony_options_update"
ATTR_ACTIVITY_LIST = "activity_list"
ATTR_DEVICES_LIST = "devices_list"
ATTR_LAST_ACTIVITY = "last_activity"
ATTR_CURRENT_ACTIVITY = "current_activity"
ATTR_ACTIVITY_STARTING = "activity_starting"
PREVIOUS_ACTIVE_ACTIVITY = "Previous Active Activity"
|
{
"content_hash": "822f02fa40b3a5d27ba639d44c07bac4",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 53,
"avg_line_length": 36.142857142857146,
"alnum_prop": 0.7569169960474308,
"repo_name": "sdague/home-assistant",
"id": "f6315b57b57a498337adeae8763ff0e2ba1340b9",
"size": "506",
"binary": false,
"copies": "7",
"ref": "refs/heads/dev",
"path": "homeassistant/components/harmony/const.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "27869189"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
}
|
import re
from datetime import datetime
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.event import listens_for
from cachetools import TTLCache, cached
db = SQLAlchemy()
class Message(db.Model):
__tablename__ = "messages"
timestamp = db.Column(db.Integer, primary_key=True)
timestamp_order = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Text, db.ForeignKey("users.user_id"), nullable=False)
channel_id = db.Column(db.Text, db.ForeignKey("channels.channel_id"), nullable=False)
user = db.relationship("User", backref=db.backref("messages", lazy=True))
channel = db.relationship("Channel", backref=db.backref("messages", lazy=True))
value = db.Column(db.Text)
channel_offset = db.Column(db.Integer)
def __init__(self, timestamp, timestamp_order, user_id, channel_id, value):
self.timestamp, self.timestamp_order, self.user_id, self.channel_id, self.value = timestamp, timestamp_order, user_id, channel_id, value
def __repr__(self): return "<Message at {}>".format(self.timestamp)
def datetime(self): return datetime.fromtimestamp(self.timestamp)
class Channel(db.Model):
__tablename__ = "channels"
channel_id = db.Column(db.Text, primary_key=True)
channel_name = db.Column(db.Text)
purpose = db.Column(db.Text)
def __init__(self, channel_id, channel_name, purpose):
self.channel_id, self.channel_name, self.purpose = channel_id, channel_name, purpose
def __repr__(self): return "<Channel {}>".format(self.channel_name)
class User(db.Model):
__tablename__ = "users"
user_id = db.Column(db.Text, primary_key=True)
user_name = db.Column(db.Text)
user_real_name = db.Column(db.Text)
is_bot = db.Column(db.Integer)
def __init__(self, user_id, user_name, user_real_name, is_bot):
self.user_id, self.user_name, self.user_real_name, self.is_bot = user_id, user_name, user_real_name, is_bot
def __repr__(self): return "<User {}>".format(self.user_name)
def sqlite_regexp(pattern, string):
"""According to the [SQLite3 docs](https://sqlite.org/lang_expr.html), `X REGEXP Y` is equivalent to `regexp(Y, X)`, so to implement custom regex matching, we define a custom SQL function in `initialize_db` called `regexp` that will get called by SQLite."""
return re.search(pattern, string) is not None
def initialize_db(flask_app):
db.init_app(flask_app)
with flask_app.app_context():
db.create_all()
@listens_for(db.engine, "begin")
def do_begin(conn):
conn.connection.create_function("regexp", 2, sqlite_regexp)
result_counts_cache = TTLCache(maxsize=4096, ttl=60 * 10) # cache for result counts where entries expire after 10 minutes
def get_messages(filter_from=None, filter_to=None, filter_channel_ids=None, filter_user_ids=None, filter_text=None, result_sort="time-ascending", result_offset=0, result_limit=1000):
assert filter_from is None or isinstance(filter_from, int)
assert filter_to is None or isinstance(filter_to, int)
assert filter_channel_ids is None or isinstance(filter_channel_ids, frozenset)
assert filter_user_ids is None or isinstance(filter_user_ids, frozenset)
assert filter_text is None or isinstance(filter_text, str)
result = Message.query
if filter_from is not None:
result = result.filter(Message.timestamp >= filter_from)
if filter_to is not None:
result = result.filter(Message.timestamp <= filter_to)
if filter_channel_ids is not None:
result = result.filter(Message.channel_id.in_(filter_channel_ids))
if filter_user_ids is not None:
result = result.filter(Message.user_id.in_(filter_user_ids))
if filter_text is not None:
result = result.filter(Message.value.op("regexp")(filter_text))
if result_sort == "time-ascending":
result = result.order_by(Message.timestamp.asc(), Message.timestamp_order.asc())
elif result_sort == "time-descending":
result = result.order_by(Message.timestamp.desc(), Message.timestamp_order.desc())
elif result_sort == "channel-ascending":
result = result.join(Message.channel).order_by(Channel.channel_name.asc())
elif result_sort == "channel-descending":
result = result.join(Message.channel).order_by(Channel.channel_name.desc())
elif result_sort == "user-ascending":
result = result.join(Message.user).order_by(User.user_name.asc())
elif result_sort == "user-descending":
result = result.join(Message.user).order_by(User.user_name.desc())
# retrieve the current page of messages
# we try to retrieve an extra message right after the last requested message, to determine whether there should be a next page or not
# this extra message is removed if there is a next page
messages = result.offset(result_offset).limit(result_limit + 1).all()
has_next_page = len(messages) == result_limit + 1
if has_next_page: messages.pop()
# count results if it hasn't been counted recently
filters_key = (filter_from, filter_to, filter_channel_ids, filter_user_ids, filter_text)
if filters_key in result_counts_cache:
count = result_counts_cache[filters_key]
else:
count = result.count()
result_counts_cache[filters_key] = count
return messages, count
|
{
"content_hash": "db89a431a5c462c4c7541f03a416a6b6",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 261,
"avg_line_length": 51.14423076923077,
"alnum_prop": 0.6939274299680391,
"repo_name": "Uberi/botty-bot-bot-bot",
"id": "d553fd75caeb658dadb483060fc7621fcf83331c",
"size": "5319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "serve-history/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1773"
},
{
"name": "HTML",
"bytes": "12008"
},
{
"name": "JavaScript",
"bytes": "74896"
},
{
"name": "Python",
"bytes": "188623"
},
{
"name": "Shell",
"bytes": "2510"
}
],
"symlink_target": ""
}
|
import os
import json
from collections import defaultdict
try:
from . import shared as G
from . import utils
except (ImportError, ValueError):
import shared as G
import utils
def rename_floobits_dir():
# TODO: one day this can be removed (once all our users have updated)
old_colab_dir = os.path.realpath(os.path.expanduser(os.path.join('~', '.floobits')))
if os.path.isdir(old_colab_dir) and not os.path.exists(G.BASE_DIR):
print('renaming %s to %s' % (old_colab_dir, G.BASE_DIR))
os.rename(old_colab_dir, G.BASE_DIR)
os.symlink(G.BASE_DIR, old_colab_dir)
def get_legacy_projects():
a = ['msgs.floobits.log', 'persistent.json']
owners = os.listdir(G.COLAB_DIR)
floorc_json = defaultdict(defaultdict)
for owner in owners:
if len(owner) > 0 and owner[0] == '.':
continue
if owner in a:
continue
workspaces_path = os.path.join(G.COLAB_DIR, owner)
try:
workspaces = os.listdir(workspaces_path)
except OSError:
continue
for workspace in workspaces:
workspace_path = os.path.join(workspaces_path, workspace)
workspace_path = os.path.realpath(workspace_path)
try:
fd = open(os.path.join(workspace_path, '.floo'), 'rb')
url = json.loads(fd.read())['url']
fd.close()
except Exception:
url = utils.to_workspace_url({
'port': 3448, 'secure': True, 'host': 'floobits.com', 'owner': owner, 'workspace': workspace
})
floorc_json[owner][workspace] = {
'path': workspace_path,
'url': url
}
return floorc_json
def migrate_symlinks():
data = {}
old_path = os.path.join(G.COLAB_DIR, 'persistent.json')
if not os.path.exists(old_path):
return
old_data = utils.get_persistent_data(old_path)
data['workspaces'] = get_legacy_projects()
data['recent_workspaces'] = old_data.get('recent_workspaces')
utils.update_persistent_data(data)
try:
os.unlink(old_path)
os.unlink(os.path.join(G.COLAB_DIR, 'msgs.floobits.log'))
except Exception:
pass
|
{
"content_hash": "0190f7d06bb6e080e22fd26db504a4f7",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 112,
"avg_line_length": 33.30882352941177,
"alnum_prop": 0.5849889624724062,
"repo_name": "Floobits/floobits-neovim-old",
"id": "33f1df76a5475fbc970b0f597bf6ef5f21174e10",
"size": "2265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugin/floo/common/migrations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "211332"
},
{
"name": "VimL",
"bytes": "4532"
}
],
"symlink_target": ""
}
|
import os
import yaml
with open('/etc/rpc_deploy.OLD/rpc_environment.yml', 'r') as f:
rpc_environment = yaml.safe_load(f.read())
for root, _, files in os.walk('/etc/openstack_deploy/env.d'):
for item in files:
env_file = os.path.join(root, item)
with open(env_file, 'r') as f:
os_environment = yaml.safe_load(f.read())
if 'container_skel' not in os_environment:
continue
changed = False
for i in os_environment['container_skel']:
os_item = os_environment['container_skel'][i]
if i not in rpc_environment['container_skel']:
continue
rpc_item = rpc_environment['container_skel'][i]
if 'is_metal' in rpc_item:
rpc_metal = rpc_item['is_metal']
else:
rpc_metal = False
if 'is_metal' in os_item['properties']:
os_metal = os_item['properties']['is_metal']
else:
os_metal = False
if rpc_metal != os_metal:
changed = True
os_item['properties']['is_metal'] = rpc_metal
if changed:
with open(env_file, 'w') as fsw:
fsw.write(
yaml.safe_dump(
os_environment,
default_flow_style=False,
width=1000
)
)
|
{
"content_hash": "bbf91a677d464dfacfd3e484dcfc2a48",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 63,
"avg_line_length": 30.0625,
"alnum_prop": 0.4857934857934858,
"repo_name": "VaneCloud/openstack-ansible",
"id": "e1daf31b4f28a9eaf91d7648112d9b6021cdaebb",
"size": "2049",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/upgrade-utilities/scripts/juno-is-metal-preserve.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "666"
},
{
"name": "Python",
"bytes": "294026"
},
{
"name": "Shell",
"bytes": "157533"
}
],
"symlink_target": ""
}
|
import yaml
def create_bgp_peer(host, scope, ip, asNum, metadata=None):
assert scope in ('node', 'global')
testdata = {
'apiVersion': 'projectcalico.org/v3',
'kind': 'BGPPeer',
'metadata': {
'name': host.name,
},
'spec': {
'peerIP': ip,
'asNumber': asNum,
}
}
# Add optional params
# If node is not specified, scope is global.
if scope == "node":
testdata['spec']['node'] = host.get_hostname()
if metadata is not None:
testdata['metadata'] = metadata
host.writejson("testfile.json", testdata)
host.calicoctl("create -f testfile.json")
|
{
"content_hash": "7b178c40776264bb5f9ba9699c775e0c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 59,
"avg_line_length": 28.041666666666668,
"alnum_prop": 0.5482912332838039,
"repo_name": "gunjan5/calico",
"id": "16c03492facfe90a75e3beb33697cf2f661816cd",
"size": "1279",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "calico_node/tests/st/bgp/peer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6541"
},
{
"name": "Go",
"bytes": "83604"
},
{
"name": "HTML",
"bytes": "43636"
},
{
"name": "JavaScript",
"bytes": "1272"
},
{
"name": "Makefile",
"bytes": "37788"
},
{
"name": "Python",
"bytes": "302766"
},
{
"name": "Ruby",
"bytes": "206537"
},
{
"name": "Shell",
"bytes": "28229"
}
],
"symlink_target": ""
}
|
import json
import threading
from oslo_log import log as oslo_logging
import pbr.version
import requests
import six
_UPDATE_CHECK_URL = 'https://www.cloudbase.it/checkupdates.php?p={0}&v={1}'
_PRODUCT_NAME = 'Cloudbase-Init'
LOG = oslo_logging.getLogger(__name__)
def _read_url(url):
# Disable certificate verification on Python 2 as
# requests' CA list is incomplete. Works fine on Python3.
req = requests.get(url, verify=six.PY3,
headers={'User-Agent': _PRODUCT_NAME})
req.raise_for_status()
if req.text:
return json.loads(req.text)
def _check_latest_version(callback):
product_version = get_version()
url = _UPDATE_CHECK_URL.format(_PRODUCT_NAME, product_version)
try:
content = _read_url(url)
if not content:
return
version = content.get('new_version')
if version:
callback(version)
except Exception as exc:
LOG.debug('Failed checking for new versions: %s', exc)
return
def check_latest_version(done_callback):
"""Try to obtain the latest version of the product."""
thread = threading.Thread(target=_check_latest_version,
args=(done_callback, ))
thread.daemon = True
thread.start()
def get_version():
"""Obtain the full project version, including alpha / beta / rc tags"""
version = pbr.version.VersionInfo('cloudbase-init')
return version.release_string()
def get_canonical_version():
"""Obtain the short, released project version."""
version = pbr.version.VersionInfo('cloudbase-init')
return version.canonical_version_string()
|
{
"content_hash": "039590d3fe6ca680cf2d57a28e9cfe8e",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 75,
"avg_line_length": 28.10169491525424,
"alnum_prop": 0.6531966224366706,
"repo_name": "ader1990/cloudbase-init",
"id": "025ceb85e04bd751da6f020bb589e7b007bcc829",
"size": "2274",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cloudbaseinit/version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1245243"
}
],
"symlink_target": ""
}
|
import socket
import sys
from pydhcplib. dhcp_packet import DhcpPacket
from random import Random
from pydhcplib.type_ipv4 import ipv4
import time
hl = {}
for i in range(0,120):
hl[i] = 0
#print hl
r = Random()
r.seed()
#time.sleep(1)
def genNum():
i = []
for z in xrange(6):
i.append(r.randint(0,255))
return ':'.join(map(lambda x: "%02x"%x,i))
def genHouse():
global hl
x = r.randint(0,119)
if hl[x] < 9:
hl[x] += 1
return str(x)
else:
for i in range(0,119):
if hl[i] < 9:
hl[i] += 1
return str(i)
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind the socket to the port
server_address = ('', 9999)
print >>sys.stderr, 'starting up on %s port %s' % server_address
sock.bind(server_address)
# Listen for incoming connections
#sock.listen(1)
while True:
# Wait for a connection
print >>sys.stderr, '\nwaiting for a connection'
data, address = sock.recvfrom(1024)
print >>sys.stderr, 'received %s bytes from %s' % (len(data), address)
#print >>sys.stderr, data
#time.sleep(1)
if data:
p = DhcpPacket()
p.DecodePacket(data)
if p.GetOption('dhcp_message_type') == [1]:
house = genHouse()
houseIP = house + "." + house + "." + house + "." + house
p.SetOption('ciaddr', ipv4(houseIP).list())
# time.sleep(1)
#connect and forward to server
serverSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serverADD = ('34.234.154.205', 10000)
print >>sys.stderr, 'connecting to %s port %s' % serverADD
serverSock.connect(serverADD)
print >>sys.stderr, 'sending...'
serverSock.sendall(p.EncodePacket())
print 'sent'
rec = 0
exp = len(data)
d = ''
#while rec < exp:
dataRec = serverSock.recv(1024)
# d += dataRec
# rec += len(dataRec)
# time.sleep(1)
print 'back to client'
sent = sock.sendto(dataRec, address)
print >>sys.stderr, 'sent %s bytes back to %s' % (sent, address)
|
{
"content_hash": "4431509b082bb48af6008368018022f9",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 74,
"avg_line_length": 22.85542168674699,
"alnum_prop": 0.6536636794939378,
"repo_name": "schneor13/nodejs-dhcp-server",
"id": "1a9dbc245ca10af61a37db2d6fb6a4021b0c2de7",
"size": "1897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Consumer-DHCP/nat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10454"
},
{
"name": "Shell",
"bytes": "114"
}
],
"symlink_target": ""
}
|
import csv
import pandas as pd
import sys
def main():
output_file = sys.argv[1]
input_file = sys.argv[2]
target_name = sys.argv[3]
model_name = sys.argv[4]
data = pd.read_csv(input_file)
train = data[data["is_test"] == False]
test = data[data["is_test"] == True]
classes_list = list(set(train[target_name]))
classes_map = dict(zip(classes_list, range(len(classes_list))))
x_train = train[[c for c in train.keys() if c != target_name and c != "is_test"]]
y_train = [classes_map[target] for target in train[target_name]]
x_test = test [[c for c in test.keys() if c != target_name and c != "is_test"]]
if model_name=="Random Forest":
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(n_estimators=100)
elif model_name=="Decision Tree":
from sklearn.tree import DecisionTreeClassifier
model = DecisionTreeClassifier()
model.fit(x_train, y_train)
y_hat = model.predict(x_test)
y_hat = [classes_list[y] for y in y_hat]
f = open(output_file, "w")
f.write("result\n")
f.write("\n".join([str(x) for x in y_hat]))
f.write("\n")
f.close()
if __name__=="__main__":
main()
|
{
"content_hash": "f3f7048ad7fba3248ee66745b0b5c13c",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 85,
"avg_line_length": 31.53846153846154,
"alnum_prop": 0.616260162601626,
"repo_name": "JuliaPackageMirrors/MachineLearning.jl",
"id": "c6d5c9d6fe473cdad64a6188f71244aedd727332",
"size": "1230",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "accuracy_benchmark/classification.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Julia",
"bytes": "115057"
},
{
"name": "Python",
"bytes": "2257"
},
{
"name": "R",
"bytes": "1782"
}
],
"symlink_target": ""
}
|
from brain import Brain, ToyBrain
import tensorflow as tf
from bot import Bot
import numpy
import os
class TFBrain(Brain):
DUALCOPYFREQ = 1000
SESS = None
WRITER = None
MERGER = None
SESS_HOLDERS = 0
# https://stats.stackexchange.com/questions/200006/q-learning-with-neural-network-as-function-approximation/200146
# https://stats.stackexchange.com/questions/126994/questions-about-q-learning-using-neural-networks
def __init__(self, name, ninputs, nactions, hshapes=list((25,10)), gamma=0.9, directory="save", rewardbuffer=None):
# Make a single session if not inherited
super().__init__(name, ninputs, nactions,
directory=directory, rewardbuffer=rewardbuffer)
self.inherited_sess = False
self.name = name
self.directory = directory
if TFBrain.SESS_HOLDERS == 0:
tf.reset_default_graph()
TFBrain.SESS_HOLDERS += 1
# Put all vars into name scope
with tf.name_scope(name):
mainshape = [ninputs]+hshapes+[nactions]
self.global_step = tf.Variable(0, name='global_step', trainable=False)
# Make some models with input/output variables
self.state_in, self.Qout, self.qnetvars = self.makeqnetwork(mainshape)
self.next_state, self.dualQout, self.dualnetvars = self.makeqnetwork(mainshape)
# Copy each var to dual
self.copy_to_dual = [tf.assign(dualnetvar, qnetvar) for qnetvar, dualnetvar in
zip(self.qnetvars, self.dualnetvars)]
# Q probs
self.qprobs = tf.nn.softmax(self.Qout, 1)
# Take random sample from each and put into array
self.prob_chosen_actions = tf.reshape(tf.multinomial(tf.log(self.qprobs), 1), [-1])
# Then combine them together to get our final Q-values.
self.prob_chosen_Q = tf.reduce_sum(tf.multiply(self.Qout,
tf.one_hot(self.prob_chosen_actions, nactions,
dtype=tf.float32)),
axis=1)
# If we want just the highest Q value do the following
self.chosen_actions = tf.argmax(self.Qout, 1)
# Then combine them together to get our final Q-values.
self.chosen_Q = tf.reduce_sum(tf.multiply(self.Qout,
tf.one_hot(self.chosen_actions, nactions, dtype=tf.float32)),
axis=1)
# Below we obtain the loss by taking the sum of squares difference
# between the target and prediction Q values.
self.rewards = tf.placeholder(shape=[None], dtype=tf.float32)
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, nactions, dtype=tf.float32)
# Q of chosen actions
self.Q = tf.reduce_sum(tf.multiply(self.Qout, self.actions_onehot), axis=1)
# Q(s,a) = R + y * max_a'( Q(s',a') )
self.targetQ = self.rewards + gamma * tf.reduce_max(self.dualQout, 1)
self.td_error = tf.square(self.targetQ - self.Q)
self.loss = tf.reduce_mean(self.td_error)
self.trainer = tf.train.AdamOptimizer(learning_rate=0.0002)
self.updateModel = self.trainer.minimize(self.loss, global_step=self.global_step)
self.saver = tf.train.Saver()
with tf.name_scope('summary'):
self.losssum = tf.summary.scalar('loss', self.loss)
self.rewardsumvar = tf.placeholder(shape=(), name="episode_reward", dtype=tf.float32)
self.rewardsum = tf.summary.scalar('reward', self.rewardsumvar)
def makeqnetwork(self, shape, valueshape=None, advantageshape=None):
"""
Construct graph
:param inputshape:
:param noinit:
:return: input placeholder, output layer, list of variables
"""
# Build brain model
state_in = tf.placeholder(shape=[None, shape[0]], dtype=tf.float32)
layer = state_in
variables = []
print(shape)
for i in range(0, len(shape)-2):
W = tf.Variable(tf.random_normal([shape[i],shape[i+1]]))
b = tf.Variable(tf.random_normal([shape[i+1]]))
layer = tf.nn.relu(tf.add(tf.matmul(layer,W),b))
variables.append(W)
variables.append(b)
# Make output layer without relu
W = tf.Variable(tf.random_normal([shape[-2],shape[-1]]))
b = tf.Variable(tf.random_normal([shape[-1]]))
layer = tf.add(tf.matmul(layer, W), b)
variables.append(W)
variables.append(b)
return state_in, layer, variables
def get_checkpoint(self):
if not os.path.isdir(self.directory):
os.mkdir(self.directory)
return os.path.join(self.directory, self.name)
def has_checkpoint(self):
return tf.train.checkpoint_exists(self.get_checkpoint())
def loadcheckpoint(self):
if self.has_checkpoint():
print("Loading checkpoint... ",end="")
self.saver.restore(TFBrain.SESS, self.get_checkpoint())
print("Done!")
else:
print("No checkpoint found")
def think(self, inputs, memory):
if len(inputs) == 0:
return {}
ids = list(inputs.keys())
# Here we can choose either prob_chosen_actions or chosen_actions
acts = TFBrain.SESS.run(self.prob_chosen_actions,
feed_dict={self.state_in: [inputs[entityid] for entityid in ids]})
return {entityid: act for entityid, act in zip(ids, acts)}, {}
def debug(self,debuginput,debugmemory):
actprobs = TFBrain.SESS.run(self.qprobs, feed_dict={self.state_in: [debuginput]})
return actprobs[0]
def train(self, iters, batch, totreward=None):
"""
Train the brain for a bit based in rewards previously provided
:param niters: number of training iterations
:param batch: batch size
:return:
"""
if totreward is not None:
rewardsum, global_step = TFBrain.SESS.run([self.rewardsum, self.global_step],
feed_dict={self.rewardsumvar: totreward})
TFBrain.WRITER.add_summary(rewardsum, global_step)
print("Buffer size: {}".format(len(self.buffer)))
print("Gen niters: {}".format(int(TFBrain.DUALCOPYFREQ)))
print("\tncopies: {}".format(int(iters/TFBrain.DUALCOPYFREQ)))
for i in range(int(iters/TFBrain.DUALCOPYFREQ)):
training_gen = self.buffer.get_batch_gen(batchsize=batch, niters=int(TFBrain.DUALCOPYFREQ))
self.trainbatch(training_gen)
def trainbatch(self, gen):
# Make dual graph identical to primary
TFBrain.SESS.run(self.copy_to_dual)
# Train primary
for inputs, actions, rewards, newinputs in gen:
feed_dict = {self.state_in: inputs,
self.rewards: rewards,
self.actions: actions,
self.next_state: newinputs}
_, summary, global_step = TFBrain.SESS.run([self.updateModel, self.losssum, self.global_step],
feed_dict=feed_dict)
TFBrain.WRITER.add_summary(summary, global_step)
def startup(self):
super().startup()
if TFBrain.SESS is None:
TFBrain.SESS = tf.Session()
# Logs
TFBrain.WRITER = tf.summary.FileWriter("output", TFBrain.SESS.graph)
init = tf.global_variables_initializer()
TFBrain.SESS.run(init)
self.loadcheckpoint()
def save(self):
"""
Saves variables to directory
"""
print("Saving {}... ".format(self.name), end='')
if not os.path.exists(self.directory):
os.makedirs(self.directory)
self.saver.save(TFBrain.SESS, self.get_checkpoint())
print("Done!")
def cleanup(self):
super().cleanup()
self.save()
TFBrain.SESS_HOLDERS -= 1
if TFBrain.SESS_HOLDERS == 0:
TFBrain.SESS.close()
TFBrain.WRITER.close()
def print_diag(self, sample_in):
pass
def formatarray(array):
return " ".join("{:5.5f}".format(f) for f in array)
|
{
"content_hash": "e6e41e80128715261ad187656e94ed13",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 119,
"avg_line_length": 39.80555555555556,
"alnum_prop": 0.5758315887415678,
"repo_name": "Renmusxd/RSwarm",
"id": "0303e318677bc53f7adf66cf196225f7d75b4804",
"size": "8598",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfbrain.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87485"
}
],
"symlink_target": ""
}
|
import inspect
import json
import multiprocessing
import os
import pprint
import re
import traceback
import warnings
from datetime import datetime
from time import sleep
import znc
# noinspection PyPep8Naming
class zlog_sql(znc.Module):
description = 'Logs all channels to a MySQL/SQLite database.'
module_types = [znc.CModInfo.GlobalModule]
wiki_page = 'ZLog_SQL'
has_args = True
args_help_text = ('Connection string in format: mysql://user:pass@host/database_name'
' or postgres://user:pass@host/database_name'
' or sqlite://path/to/db.sqlite')
log_queue = multiprocessing.SimpleQueue()
internal_log = None
hook_debugging = False
def OnLoad(self, args, message):
"""
This module hook is called when a module is loaded.
:type args: const CString &
:type args: CString &
:rtype: bool
:param args: The arguments for the modules.
:param message: A message that may be displayed to the user after loading the module.
:return: True if the module loaded successfully, else False.
"""
self.internal_log = InternalLog(self.GetSavePath())
self.debug_hook()
try:
db = self.parse_args(args)
multiprocessing.Process(target=DatabaseThread.worker_safe,
args=(db, self.log_queue, self.internal_log)).start()
return True
except Exception as e:
message.s = str(e)
with self.internal_log.error() as target:
target.write('Could not initialize module caused by: {} {}\n'.format(type(e), str(e)))
target.write('Stack trace: ' + traceback.format_exc())
target.write('\n')
return False
def __del__(self):
# Terminate worker process.
self.log_queue.put(None)
def GetServer(self):
pServer = self.GetNetwork().GetCurrentServer()
if pServer is None:
return '(no server)'
sSSL = '+' if pServer.IsSSL() else ''
return pServer.GetName() + ' ' + sSSL + pServer.GetPort()
# GENERAL IRC EVENTS
# ==================
def OnIRCConnected(self):
"""
This module hook is called after a successful login to IRC.
:rtype: None
"""
self.debug_hook()
self.put_log('Connected to IRC (' + self.GetServer() + ')')
def OnIRCDisconnected(self):
"""
This module hook is called when a user gets disconnected from IRC.
:rtype: None
"""
self.debug_hook()
self.put_log('Disconnected from IRC (' + self.GetServer() + ')')
def OnBroadcast(self, message):
"""
This module hook is called when a message is broadcasted to all users.
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('Broadcast: ' + str(message))
return znc.CONTINUE
def OnRawMode(self, opNick, channel, modes, args):
"""
Called on any channel mode change.
This is called before the more detailed mode hooks like e.g. OnOp() and OnMode().
:type opNick: const CNick &
:type channel: CChan &
:type modes: const CString &
:type args: const CString &
:rtype: None
"""
self.debug_hook()
sNick = opNick.GetNick() if opNick is not None else 'Server'
self.put_log('*** ' + sNick + ' sets mode: ' + modes + ' ' + args, channel.GetName())
def OnKick(self, opNick, kickedNick, channel, message):
"""
Called when a nick is kicked from a channel.
:type opNick: const CNick &
:type kickedNick: const CString &
:type channel: CChan &
:type message: const CString &
:rtype: None
"""
self.debug_hook()
self.put_log('*** ' + kickedNick + ' was kicked by ' + opNick.GetNick() + ' (' + message + ')',
channel.GetName())
def OnQuit(self, nick, message, channels):
"""
Called when a nick quit from IRC.
:type nick: const CNick &
:type message: const CString &
:type channels: std::vector<CChan*>
:rtype: None
"""
self.debug_hook()
for channel in channels:
self.put_log(
'*** Quits: ' + nick.GetNick() + ' (' + nick.GetIdent() + '@' + nick.GetHost() + ') (' + message + ')',
channel.GetName())
def OnJoin(self, nick, channel):
"""
Called when a nick joins a channel.
:type nick: const CNick &
:type channel: CChan &
:rtype: None
"""
self.debug_hook()
self.put_log('*** Joins: ' + nick.GetNick() + ' (' + nick.GetIdent() + '@' + nick.GetHost() + ')',
channel.GetName())
def OnPart(self, nick, channel, message):
"""
Called when a nick parts a channel.
:type nick: const CNick &
:type channel: CChan &
:type message: const CString &
:rtype: None
"""
self.debug_hook()
self.put_log(
'*** Parts: ' + nick.GetNick() + ' (' + nick.GetIdent() + '@' + nick.GetHost() + ') (' + message + ')',
channel.GetName())
def OnNick(self, oldNick, newNick, channels):
"""
Called when a nickname change occurs.
:type oldNick: const CNick &
:type newNick: const CString &
:type channels: std::vector<CChan*>
:rtype: None
"""
self.debug_hook()
for channel in channels:
self.put_log('*** ' + oldNick.GetNick() + ' is now known as ' + newNick, channel.GetName())
def OnTopic(self, nick, channel, topic):
"""
Called when we receive a channel topic change from IRC.
:type nick: CNick &
:type channel: CChan &
:type topic: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('*** ' + nick.GetNick() + ' changes topic to "' + str(topic) + '"', channel.GetName())
return znc.CONTINUE
# NOTICES
# =======
def OnUserNotice(self, target, message):
"""
This module hook is called when a user sends a NOTICE message.
:type target: CString &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
network = self.GetNetwork()
if network:
self.put_log('-' + network.GetCurNick() + '- ' + str(message), str(target))
return znc.CONTINUE
def OnPrivNotice(self, nick, message):
"""
Called when we receive a private NOTICE message from IRC.
:type nick: CNick &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('-' + nick.GetNick() + '- ' + str(message), nick.GetNick())
return znc.CONTINUE
def OnChanNotice(self, nick, channel, message):
"""
Called when we receive a channel NOTICE message from IRC.
:type nick: CNick &
:type channel: CChan &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('-' + nick.GetNick() + '- ' + str(message), channel.GetName())
return znc.CONTINUE
# ACTIONS
# =======
def OnUserAction(self, target, message):
"""
Called when a client sends a CTCP ACTION request ("/me").
:type target: CString &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
pNetwork = self.GetNetwork()
if pNetwork:
self.put_log('* ' + pNetwork.GetCurNick() + ' ' + str(message), str(target))
return znc.CONTINUE
def OnPrivAction(self, nick, message):
"""
Called when we receive a private CTCP ACTION ("/me" in query) from IRC.
:type nick: CNick &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('* ' + nick.GetNick() + ' ' + str(message), nick.GetNick())
return znc.CONTINUE
def OnChanAction(self, nick, channel, message):
"""
Called when we receive a channel CTCP ACTION ("/me" in a channel) from IRC.
:type nick: CNick &
:type channel: CChan &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('* ' + nick.GetNick() + ' ' + str(message), channel.GetName())
return znc.CONTINUE
# MESSAGES
# ========
def OnUserMsg(self, target, message):
"""
This module hook is called when a user sends a PRIVMSG message.
:type target: CString &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
network = self.GetNetwork()
if network:
self.put_log('<' + network.GetCurNick() + '> ' + str(message), str(target))
return znc.CONTINUE
def OnPrivMsg(self, nick, message):
"""
Called when we receive a private PRIVMSG message from IRC.
:type nick: CNick &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('<' + nick.GetNick() + '> ' + str(message), nick.GetNick())
return znc.CONTINUE
def OnChanMsg(self, nick, channel, message):
"""
Called when we receive a channel PRIVMSG message from IRC.
:type nick: CNick &
:type channel: CChan &
:type message: CString &
:rtype: EModRet
"""
self.debug_hook()
self.put_log('<' + nick.GetNick() + '> ' + str(message), channel.GetName())
return znc.CONTINUE
# LOGGING
# =======
def put_log(self, line, window="Status"):
"""
Adds the log line to database write queue.
"""
self.log_queue.put({
'created_at': datetime.utcnow().isoformat(),
'user': self.GetUser().GetUserName() if self.GetUser() is not None else None,
'network': self.GetNetwork().GetName() if self.GetUser() is not None else None,
'window': window,
'message': line.encode('utf8', 'replace').decode('utf8')})
# DEBUGGING HOOKS
# ===============
def debug_hook(self):
"""
Dumps parent calling method name and its arguments to debug logfile.
"""
if self.hook_debugging is not True:
return
frameinfo = inspect.stack()[1]
argvals = frameinfo.frame.f_locals
with self.internal_log.debug() as target:
target.write('Called method: ' + frameinfo.function + '()\n')
for argname in argvals:
if argname == 'self':
continue
target.write(' ' + argname + ' -> ' + pprint.pformat(argvals[argname]) + '\n')
target.write('\n')
# ARGUMENT PARSING
# ================
def parse_args(self, args):
if args.strip() == '':
raise Exception('Missing argument. Provide connection string as an argument.')
match = re.search('^\s*sqlite(?:://(.+))?\s*$', args)
if match:
if match.group(1) is None:
return SQLiteDatabase({'database': os.path.join(self.GetSavePath(), 'logs.sqlite')})
else:
return SQLiteDatabase({'database': match.group(1)})
match = re.search('^\s*mysql://(.+?):(.+?)@(.+?)/(.+)\s*$', args)
if match:
return MySQLDatabase({'host': match.group(3),
'user': match.group(1),
'passwd': match.group(2),
'db': match.group(4)})
match = re.search('^\s*postgres://(.+?):(.+?)@(.+?)/(.+)\s*$', args)
if match:
return PostgresDatabase({'host': match.group(3),
'user': match.group(1),
'password': match.group(2),
'database': match.group(4)})
raise Exception('Unrecognized connection string. Check the documentation.')
class DatabaseThread:
@staticmethod
def worker_safe(db, log_queue: multiprocessing.SimpleQueue, internal_log) -> None:
try:
DatabaseThread.worker(db, log_queue, internal_log)
except Exception as e:
with internal_log.error() as target:
target.write('Unrecoverable exception in worker thread: {0} {1}\n'.format(type(e), str(e)))
target.write('Stack trace: ' + traceback.format_exc())
target.write('\n')
raise
@staticmethod
def worker(db, log_queue: multiprocessing.SimpleQueue, internal_log) -> None:
db.connect()
while True:
item = log_queue.get()
if item is None:
break
try:
db.ensure_connected()
db.insert_into('logs', item)
except Exception as e:
sleep_for = 10
with internal_log.error() as target:
target.write('Could not save to database caused by: {0} {1}\n'.format(type(e), str(e)))
if 'open' in dir(db.conn):
target.write('Database handle state: {}\n'.format(db.conn.open))
target.write('Stack trace: ' + traceback.format_exc())
target.write('Current log: ')
json.dump(item, target)
target.write('\n\n')
target.write('Retry in {} s\n'.format(sleep_for))
sleep(sleep_for)
with internal_log.error() as target:
target.write('Retrying now.\n'.format(sleep_for))
log_queue.put(item)
class InternalLog:
def __init__(self, save_path: str):
self.save_path = save_path
def debug(self):
return self.open('debug')
def error(self):
return self.open('error')
def open(self, level: str):
target = open(os.path.join(self.save_path, level + '.log'), 'a')
line = 'Log opened at: {} UTC\n'.format(datetime.utcnow())
target.write(line)
target.write('=' * len(line) + '\n\n')
return target
class Database:
def __init__(self, dsn: dict):
self.dsn = dsn
self.conn = None
class PostgresDatabase(Database):
def connect(self) -> None:
import psycopg2
self.conn = psycopg2.connect(**self.dsn)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.conn.cursor().execute('''
CREATE TABLE IF NOT EXISTS logs (
"id" BIGSERIAL NOT NULL,
"created_at" TIMESTAMP WITH TIME ZONE NOT NULL,
"user" VARCHAR(128) DEFAULT NULL,
"network" VARCHAR(128) DEFAULT NULL,
"window" VARCHAR(255) NOT NULL,
"message" TEXT,
PRIMARY KEY (id)
);
''')
self.conn.commit()
def ensure_connected(self):
if self.conn.status == 0:
self.connect()
def insert_into(self, table, row):
cols = ', '.join('"{}"'.format(col) for col in row.keys())
vals = ', '.join('%({})s'.format(col) for col in row.keys())
sql = 'INSERT INTO {} ({}) VALUES ({})'.format(table, cols, vals)
self.conn.cursor().execute(sql, row)
self.conn.commit()
class MySQLDatabase(Database):
def connect(self) -> None:
import pymysql
self.conn = pymysql.connect(use_unicode=True, charset='utf8mb4', **self.dsn)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.conn.cursor().execute('''
CREATE TABLE IF NOT EXISTS `logs` (
`id` INT(11) NOT NULL AUTO_INCREMENT,
`created_at` DATETIME NOT NULL,
`user` VARCHAR(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`network` VARCHAR(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`window` VARCHAR(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`message` TEXT COLLATE utf8mb4_unicode_ci,
PRIMARY KEY (`id`),
KEY `created_at` (`created_at`),
KEY `user` (`user`),
KEY `network` (`network`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=COMPRESSED;
''')
self.conn.commit()
def ensure_connected(self):
if self.conn.open is False:
self.connect()
def insert_into(self, table, row):
cols = ', '.join('`{}`'.format(col) for col in row.keys())
vals = ', '.join('%({})s'.format(col) for col in row.keys())
sql = 'INSERT INTO `{}` ({}) VALUES ({})'.format(table, cols, vals)
self.conn.cursor().execute(sql, row)
self.conn.commit()
class SQLiteDatabase(Database):
def connect(self) -> None:
import sqlite3
self.conn = sqlite3.connect(**self.dsn)
self.conn.cursor().execute('''
CREATE TABLE IF NOT EXISTS [logs](
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[created_at] DATETIME NOT NULL,
[user] VARCHAR,
[network] VARCHAR,
[window] VARCHAR,
[message] TEXT);
''')
self.conn.commit()
def ensure_connected(self):
pass
def insert_into(self, table: str, row: dict) -> None:
cols = ', '.join('[{}]'.format(col) for col in row.keys())
vals = ', '.join(':{}'.format(col) for col in row.keys())
sql = 'INSERT INTO [{}] ({}) VALUES ({})'.format(table, cols, vals)
self.conn.cursor().execute(sql, row)
self.conn.commit()
|
{
"content_hash": "8e9eeefae39c8bb2cefb578f0f7067e7",
"timestamp": "",
"source": "github",
"line_count": 529,
"max_line_length": 119,
"avg_line_length": 33.332703213610586,
"alnum_prop": 0.5438099018885045,
"repo_name": "stil/zlog-sql",
"id": "0a102123fab49ffb17e90494a24f32fe313a6915",
"size": "17633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zlog_sql.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17633"
}
],
"symlink_target": ""
}
|
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
import warnings
from google.api_core import gapic_v1, grpc_helpers
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.servicedirectory_v1.types import lookup_service
from .base import DEFAULT_CLIENT_INFO, LookupServiceTransport
class LookupServiceGrpcTransport(LookupServiceTransport):
"""gRPC backend transport for LookupService.
Service Directory API for looking up service data at runtime.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "servicedirectory.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: Optional[grpc.Channel] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "servicedirectory.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service."""
return self._grpc_channel
@property
def resolve_service(
self,
) -> Callable[
[lookup_service.ResolveServiceRequest], lookup_service.ResolveServiceResponse
]:
r"""Return a callable for the resolve service method over gRPC.
Returns a [service][google.cloud.servicedirectory.v1.Service]
and its associated endpoints. Resolving a service is not
considered an active developer method.
Returns:
Callable[[~.ResolveServiceRequest],
~.ResolveServiceResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "resolve_service" not in self._stubs:
self._stubs["resolve_service"] = self.grpc_channel.unary_unary(
"/google.cloud.servicedirectory.v1.LookupService/ResolveService",
request_serializer=lookup_service.ResolveServiceRequest.serialize,
response_deserializer=lookup_service.ResolveServiceResponse.deserialize,
)
return self._stubs["resolve_service"]
def close(self):
self.grpc_channel.close()
@property
def kind(self) -> str:
return "grpc"
__all__ = ("LookupServiceGrpcTransport",)
|
{
"content_hash": "120f27a71b27f9a42e7769c87d193e7c",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 88,
"avg_line_length": 44.681102362204726,
"alnum_prop": 0.6105383734249714,
"repo_name": "googleapis/python-service-directory",
"id": "965a4db1bc1decb8f9a3fa791e4efe3d76dc65cb",
"size": "11949",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/servicedirectory_v1/services/lookup_service/transports/grpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1427148"
},
{
"name": "Shell",
"bytes": "30693"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentcuration', '0001_squashed_0094_auto_20180910_2342'),
]
operations = [
migrations.AddField(
model_name='channel',
name='thumbnail_encoding_json',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
]
|
{
"content_hash": "b865729fc1f6460bf39e79d1b376782a",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 79,
"avg_line_length": 25.31578947368421,
"alnum_prop": 0.6548856548856549,
"repo_name": "fle-internal/content-curation",
"id": "919b892d9f98aa3bed1bde685f826457302db963",
"size": "555",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "contentcuration/contentcuration/migrations/0002_auto_20181220_1734.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "133945"
},
{
"name": "HTML",
"bytes": "441946"
},
{
"name": "JavaScript",
"bytes": "531756"
},
{
"name": "Makefile",
"bytes": "2252"
},
{
"name": "Python",
"bytes": "467434"
},
{
"name": "Shell",
"bytes": "1156"
},
{
"name": "Vue",
"bytes": "21574"
}
],
"symlink_target": ""
}
|
from graphql.core.language.location import SourceLocation
from graphql.core.validation.rules import LoneAnonymousOperation
from utils import expect_passes_rule, expect_fails_rule
def anon_not_alone(line, column):
return {
'message': LoneAnonymousOperation.anonymous_operation_not_alone_message(),
'locations': [SourceLocation(line, column)]
}
def test_no_operations():
expect_passes_rule(LoneAnonymousOperation, '''
fragment fragA on Type {
field
}
''')
def test_one_anon_operation():
expect_passes_rule(LoneAnonymousOperation, '''
{
field
}
''')
def test_multiple_named_operation():
expect_passes_rule(LoneAnonymousOperation, '''
query Foo {
field
}
query Bar {
field
}
''')
def test_anon_operation_with_fragment():
expect_passes_rule(LoneAnonymousOperation, '''
{
...Foo
}
fragment Foo on Type {
field
}
''')
def test_multiple_anon_operations():
expect_fails_rule(LoneAnonymousOperation, '''
{
fieldA
}
{
fieldB
}
''', [
anon_not_alone(2, 7),
anon_not_alone(5, 7),
])
def test_anon_operation_with_another_operation():
expect_fails_rule(LoneAnonymousOperation, '''
{
fieldA
}
mutation Foo {
fieldB
}
''', [
anon_not_alone(2, 7)
])
|
{
"content_hash": "467b2f8d99a1857a16d54e2fd79e2047",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 82,
"avg_line_length": 19.092105263157894,
"alnum_prop": 0.5761543762922122,
"repo_name": "gabriel-laet/graphql-py",
"id": "a9b74885da26b7129b2108433789adf26caad5a7",
"size": "1451",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/core_validation/test_lone_anonymous_operation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "468582"
}
],
"symlink_target": ""
}
|
"""
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import time
import traceback
import signal
import sys
from django.core.wsgi import get_wsgi_application
sys.path.insert(0, '/usr/local/cog/cog_install')
os.environ["COG_CONFIG_DIR"] = "/usr/local/cog/cog_config"
os.environ['HTTPS'] = "on" # instructs Django to prepend 'https' to fully generated links
os.environ["SSL_CERT_DIR"] = "/etc/grid-security/certificates"
os.environ["SSL_CERT_FILE"] = "/etc/certs/esgf-ca-bundle.crt"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# print debugging information
print 'Using Python version: %s' % sys.version
print 'Using Python path: %s' % sys.path
print 'PYTHONPATH=%s' % os.environ.get('PYTHONPATH', None)
print 'LD_LIBRARY_PATH=%s' % os.environ.get('LD_LIBRARY_PATH', None)
print 'SSL_CERT_DIR=%s' % os.environ.get('SSL_CERT_DIR', None)
print 'SSL_CERT_FILE=%s' % os.environ.get('SSL_CERT_FILE', None)
try:
application = get_wsgi_application()
print 'WSGI without exception'
except Exception:
print 'handling WSGI exception'
# Error loading applications
if 'mod_wsgi' in sys.modules:
traceback.print_exc()
os.kill(os.getpid(), signal.SIGINT)
time.sleep(2.5)
|
{
"content_hash": "a67a1fe1fd12378ec7723bac9c243b88",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 89,
"avg_line_length": 38.361702127659576,
"alnum_prop": 0.7398779811425402,
"repo_name": "EarthSystemCoG/COG",
"id": "27733709cefe5dac3096b5ba8ebf3f4b04b43713",
"size": "1803",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apache/wsgi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "CSS",
"bytes": "893678"
},
{
"name": "Classic ASP",
"bytes": "48011"
},
{
"name": "HTML",
"bytes": "96546078"
},
{
"name": "Java",
"bytes": "483882"
},
{
"name": "JavaScript",
"bytes": "13287152"
},
{
"name": "MATLAB",
"bytes": "30087"
},
{
"name": "PHP",
"bytes": "80287"
},
{
"name": "Python",
"bytes": "852780"
},
{
"name": "Rich Text Format",
"bytes": "6112"
},
{
"name": "Shell",
"bytes": "10602"
}
],
"symlink_target": ""
}
|
import abc
import itertools
from neutron_lib import constants as const
from neutron_lib.db import constants as db_const
from neutron_lib.utils import net
from oslo_serialization import jsonutils
from neutron.common import constants
from neutron.objects import common_types
from neutron.tests import base as test_base
from neutron.tests import tools
class TestField(object):
def test_coerce_good_values(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual(out_val, self.field.coerce('obj', 'attr', in_val))
def test_coerce_bad_values(self):
for in_val in self.coerce_bad_values:
self.assertRaises((TypeError, ValueError),
self.field.coerce, 'obj', 'attr', in_val)
def test_to_primitive(self):
for in_val, prim_val in self.to_primitive_values:
self.assertEqual(prim_val, self.field.to_primitive('obj', 'attr',
in_val))
def test_to_primitive_json_serializable(self):
for in_val, _ in self.to_primitive_values:
prim = self.field.to_primitive('obj', 'attr', in_val)
jsencoded = jsonutils.dumps(prim)
self.assertEqual(prim, jsonutils.loads(jsencoded))
def test_from_primitive(self):
class ObjectLikeThing(object):
_context = 'context'
for prim_val, out_val in self.from_primitive_values:
from_prim = self.field.from_primitive(ObjectLikeThing, 'attr',
prim_val)
self.assertEqual(out_val, from_prim)
# ensure it's coercable for sanity
self.field.coerce('obj', 'attr', from_prim)
@abc.abstractmethod
def test_stringify(self):
'''This test should validate stringify() format for new field types.'''
class IPV6ModeEnumFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(IPV6ModeEnumFieldTest, self).setUp()
self.field = common_types.IPV6ModeEnumField()
self.coerce_good_values = [(mode, mode)
for mode in const.IPV6_MODES]
self.coerce_bad_values = ['6', 4, 'type', 'slaacc']
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("'%s'" % in_val, self.field.stringify(in_val))
class DscpMarkFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(DscpMarkFieldTest, self).setUp()
self.field = common_types.DscpMarkField()
self.coerce_good_values = [(val, val)
for val in const.VALID_DSCP_MARKS]
self.coerce_bad_values = ['6', 'str', [], {}, object()]
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("%s" % in_val, self.field.stringify(in_val))
class IPNetworkPrefixLenFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(IPNetworkPrefixLenFieldTest, self).setUp()
self.field = common_types.IPNetworkPrefixLenField()
self.coerce_good_values = [(x, x) for x in (0, 32, 128, 42)]
self.coerce_bad_values = ['len', '1', 129, -1]
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("%s" % in_val, self.field.stringify(in_val))
class MACAddressFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(MACAddressFieldTest, self).setUp()
self.field = common_types.MACAddressField()
mac1 = tools.get_random_EUI()
mac2 = tools.get_random_EUI()
self.coerce_good_values = [(mac1, mac1), (mac2, mac2)]
self.coerce_bad_values = [
'XXXX', 'ypp', 'g3:vvv',
# the field type is strict and does not allow to pass strings, even
# if they represent a valid MAC address
net.get_random_mac('fe:16:3e:00:00:00'.split(':')),
]
self.to_primitive_values = ((a1, str(a2))
for a1, a2 in self.coerce_good_values)
self.from_primitive_values = ((a2, a1)
for a1, a2 in self.to_primitive_values)
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual('%s' % in_val, self.field.stringify(in_val))
class IPNetworkFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(IPNetworkFieldTest, self).setUp()
self.field = common_types.IPNetworkField()
addrs = [
tools.get_random_ip_network(version=ip_version)
for ip_version in constants.IP_ALLOWED_VERSIONS
]
self.coerce_good_values = [(addr, addr) for addr in addrs]
self.coerce_bad_values = [
'ypp', 'g3:vvv',
# the field type is strict and does not allow to pass strings, even
# if they represent a valid IP network
'10.0.0.0/24',
]
self.to_primitive_values = ((a1, str(a2))
for a1, a2 in self.coerce_good_values)
self.from_primitive_values = ((a2, a1)
for a1, a2 in self.to_primitive_values)
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual('%s' % in_val, self.field.stringify(in_val))
class IPVersionEnumFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(IPVersionEnumFieldTest, self).setUp()
self.field = common_types.IPVersionEnumField()
self.coerce_good_values = [(val, val)
for val in constants.IP_ALLOWED_VERSIONS]
self.coerce_bad_values = [5, 0, -1, 'str']
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("%s" % in_val, self.field.stringify(in_val))
class FlowDirectionEnumFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(FlowDirectionEnumFieldTest, self).setUp()
self.field = common_types.FlowDirectionEnumField()
self.coerce_good_values = [(val, val)
for val in const.VALID_DIRECTIONS]
self.coerce_bad_values = ['test', '8', 10, []]
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("'%s'" % in_val, self.field.stringify(in_val))
class DomainNameFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(DomainNameFieldTest, self).setUp()
self.field = common_types.DomainNameField()
self.coerce_good_values = [
(val, val)
for val in ('www.google.com', 'hostname', '1abc.com')
]
self.coerce_bad_values = ['x' * (db_const.FQDN_FIELD_SIZE + 1), 10, []]
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("'%s'" % in_val, self.field.stringify(in_val))
class EtherTypeEnumFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(EtherTypeEnumFieldTest, self).setUp()
self.field = common_types.EtherTypeEnumField()
self.coerce_good_values = [(val, val)
for val in constants.VALID_ETHERTYPES]
self.coerce_bad_values = ['IpV4', 8, 'str', 'ipv6']
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("'%s'" % in_val, self.field.stringify(in_val))
class IpProtocolEnumFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(IpProtocolEnumFieldTest, self).setUp()
self.field = common_types.IpProtocolEnumField()
self.coerce_good_values = [
(val, val)
for val in itertools.chain(
const.IP_PROTOCOL_MAP.keys(),
[str(v) for v in range(256)]
)
]
self.coerce_bad_values = ['test', 'Udp', 256]
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual("'%s'" % in_val, self.field.stringify(in_val))
class UUIDFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(UUIDFieldTest, self).setUp()
self.field = common_types.UUIDField()
self.coerce_good_values = [
('f1d9cb3f-c263-45d3-907c-d12a9ef1629e',
'f1d9cb3f-c263-45d3-907c-d12a9ef1629e'),
('7188f6637cbd4097a3b1d1bb7897c7c0',
'7188f6637cbd4097a3b1d1bb7897c7c0')]
self.coerce_bad_values = [
'f1d9cb3f-c263-45d3-907c-d12a9ef16zzz',
'7188f6637cbd4097a3b1d1bb7897']
self.to_primitive_values = self.coerce_good_values
self.from_primitive_values = self.coerce_good_values
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual('%s' % in_val, self.field.stringify(in_val))
class DictOfMiscValuesFieldTest(test_base.BaseTestCase, TestField):
def setUp(self):
super(DictOfMiscValuesFieldTest, self).setUp()
self.field = common_types.DictOfMiscValues
test_dict_1 = {'a': True,
'b': 1.23,
'c': ['1', 1.23, True],
'd': {'aa': 'zz'},
'e': '10.0.0.1'}
test_dict_str = jsonutils.dumps(test_dict_1)
self.coerce_good_values = [
(test_dict_1, test_dict_1),
(test_dict_str, test_dict_1)
]
self.coerce_bad_values = [str(test_dict_1), '{"a":}']
self.to_primitive_values = [
(test_dict_1, test_dict_str)
]
self.from_primitive_values = [
(test_dict_str, test_dict_1)
]
def test_stringify(self):
for in_val, out_val in self.coerce_good_values:
self.assertEqual(jsonutils.dumps(in_val),
self.field.stringify(in_val))
|
{
"content_hash": "f6a15093d18309c8b5cb7166490583e9",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 79,
"avg_line_length": 40.7389705882353,
"alnum_prop": 0.6017507445176428,
"repo_name": "noironetworks/neutron",
"id": "e3587b6ed89d25bd086e61eeb6c57d788533f294",
"size": "11692",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/objects/test_common_types.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "11420614"
},
{
"name": "Shell",
"bytes": "38791"
}
],
"symlink_target": ""
}
|
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
db = SQLAlchemy()
bcrypt = Bcrypt()
|
{
"content_hash": "e651a41bfcf7b71342b8998a62fd6e95",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 39,
"avg_line_length": 21.8,
"alnum_prop": 0.7889908256880734,
"repo_name": "soasme/flask-perm",
"id": "d30bee83d7b7c1f8c063ebf1b7207cd0189186e8",
"size": "134",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flask_perm/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1448"
},
{
"name": "JavaScript",
"bytes": "5557"
},
{
"name": "Makefile",
"bytes": "378"
},
{
"name": "Python",
"bytes": "60761"
}
],
"symlink_target": ""
}
|
from django.contrib import admin, messages
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
class RawMixin(object):
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name in self.raw_id_fields:
kwargs.pop("request", None)
type = db_field.rel.__class__.__name__
if type == "ManyToOneRel":
kwargs['widget'] = VerboseForeignKeyRawIdWidget(db_field.rel, site)
elif type == "ManyToManyRel":
kwargs['widget'] = VerboseManyToManyRawIdWidget(db_field.rel, site)
return db_field.formfield(**kwargs)
return super(RawMixin, self).formfield_for_dbfield(db_field, **kwargs)
class RegistrationAdmin(RawMixin,admin.ModelAdmin):
actions = ['activate_users', 'resend_activation_email']
list_display = ('user', 'expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name', 'user__last_name')
def activate_users(self, request, queryset):
for profile in queryset:
RegistrationProfile.objects.activate_user(profile.activation_key)
activate_users.short_description = _("Activate users")
def resend_activation_email(self, request, queryset):
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
{
"content_hash": "177d0b7ae36f89248ccf39a8809f68a3",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 76,
"avg_line_length": 38.80952380952381,
"alnum_prop": 0.7220858895705522,
"repo_name": "chriscauley/django-registration",
"id": "119854a99fcce10c838ffed80997dff625abb231",
"size": "1630",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "registration/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "5677"
},
{
"name": "Python",
"bytes": "19165"
}
],
"symlink_target": ""
}
|
"""
Resource class and its manager for LBaaS backend members in Networking V2 API
"""
from osclient2 import base
from osclient2 import mapper
from osclient2 import utils
ATTRIBUTE_MAPPING = [
('id', 'id', mapper.Noop),
('address', 'address', mapper.Noop),
('protocol_port', 'protocol_port', mapper.Noop),
('weight', 'weight', mapper.Noop),
('subnet', 'subnet_id', mapper.Resource('neutron.subnet')),
('project', 'tenant_id', mapper.Resource('project')),
('is_enabled', 'admin_state_up', mapper.Noop),
]
class Resource(base.Resource):
"""Resource class for LBaaS backend members in Networking V2 API"""
def update(self, weight=None, is_enabled=None):
"""
Update a LBaaS member for a pool
@keyword weight: Weight
@type weight: int
@keyword is_enabled: Whether the member is enabled
@type is_enabled: bool
@rtype: None
"""
super(Resource, self).update(weight=weight,
is_enabled=is_enabled)
class Manager(base.SubManager):
"""Manager class for LBaaS backend members in Networking V2 API"""
resource_class = Resource
service_type = 'network'
_attr_mapping = ATTRIBUTE_MAPPING
_json_resource_key = 'member'
_json_resources_key = 'members'
_url_resource_path = '/v2.0/lbaas/pools/%s/members'
def create(self, address=None, port=None, weight=None, is_enabled=None):
"""
Register a LBaaS member for a pool
@keyword address: IP address (required)
@type address: str
@keyword port: Protocol port (required)
@type port: int
@keyword weight: Member weight
@type weight: int
@keyword project: Project
@type project: osclient2.project.Resource
@keyword is_enabled: Whether the member is enabled
@type is_enabled: bool
@return: Registered member
@rtype: osclient2.neutron.v2.lbaas.member.Resource
"""
return super(Manager, self).create(address=address,
port=port,
weight=weight,
project=project,
is_enabled=is_enabled)
|
{
"content_hash": "5aa5eaff8af80aabbd177535d6bf60d4",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 77,
"avg_line_length": 33.21739130434783,
"alnum_prop": 0.5885689354275742,
"repo_name": "yosshy/osclient2",
"id": "427ae8be502ff9f3fb3ade4643f700911a8f1203",
"size": "2959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osclient2/neutron/v2/lbaas/member.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "397008"
},
{
"name": "Shell",
"bytes": "398"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class UserProfile(models.Model):
user = models.OneToOneField(User)
display_name = models.CharField(max_length=128)
can_direct_edit = models.BooleanField(default=False)
def __unicode__(self):
return self.user.username
|
{
"content_hash": "ef4cef36a3205ba6b632b02beef6257b",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 56,
"avg_line_length": 31.181818181818183,
"alnum_prop": 0.7346938775510204,
"repo_name": "AndrewRook/game_designer",
"id": "ccad7a6020ae6c55624e30176804fd4ae4299ccc",
"size": "343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "user_control/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35336"
}
],
"symlink_target": ""
}
|
"""Discrete distribution used for nonparametric functions."""
import math
from distributions.distribution import Distribution
class NumericDistribution(Distribution):
"""Class representing a discretization of a distribution, which is
required after certain mathematical operations (eg convolution).
The distribution consists of a list of PDF values and an offset. It is
automatically scaled to the sum of those PDF values to avoid numeric
error."""
def __init__(self, values, offset=0):
"""@p values is a list of PDF values P[i] (automatically normalized)
representing the probability of an outcome between offset+i and
offset+i+1."""
self._values = values
self._offset = int(offset)
self._scale = 1 / sum(values)
assert self._scale > 0, (
"NumericDistribution(%s) had zero scale" % values)
assert self._scale < float("inf"), (
"NumericDistribution(%s) had inf scale" % values)
BEFORE, AFTER = [-3, -2]
def _bucket(self, x):
if x < self._offset:
return self.BEFORE
try:
bucket = math.floor(x) - self._offset
except OverflowError as _:
return self.AFTER
if bucket >= len(self._values):
return self.AFTER
return bucket
def pdf(self, x):
bucket = self._bucket(x)
if bucket in (self.BEFORE, self.AFTER):
return 0
return self._values[bucket] * self._scale
def cdf(self, x):
bucket = self._bucket(x)
if bucket == self.BEFORE:
return 0
elif bucket == self.AFTER:
return 1
point_in_bucket = x - bucket - self._offset
return (sum(self._values[:bucket]) +
(self._values[bucket] * point_in_bucket)) * self._scale
def point_on_curve(self):
return self._offset + (len(self._values) / 2)
# Leave the default quantile() implementation in place because it is
# probably fast enough.
def contains_point_masses(self):
return False
def __repr__(self):
return "NumericDistribution(offset=%d, scale=%f, %s)" % (
self._offset, self._scale, self._values)
|
{
"content_hash": "12890d9ba6491110d04f9a5b093119ed",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 76,
"avg_line_length": 33.343283582089555,
"alnum_prop": 0.6029543419874664,
"repo_name": "ggould256/libpmp",
"id": "49de2818c354ea151b9328641b26c393407dde36",
"size": "2810",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "distributions/numeric.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1923"
},
{
"name": "Python",
"bytes": "63087"
}
],
"symlink_target": ""
}
|
"""
WSGI config for group_meeting project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "group_meeting.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
{
"content_hash": "b8a5c012a325dd8442a0fca5bde3ba50",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 28.642857142857142,
"alnum_prop": 0.7755610972568578,
"repo_name": "scavedo159/group_meeting",
"id": "8d5e199d6c4a87c4def06e0ef9810f2672e9a495",
"size": "401",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "group_meeting/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2114"
},
{
"name": "JavaScript",
"bytes": "92478"
},
{
"name": "Python",
"bytes": "42664"
}
],
"symlink_target": ""
}
|
import functools as ft
import types
import typing
import warnings
from typing import Any, Callable, Dict
import jax
import jax.interpreters.ad as ad
import jax.tree_util as jtu
from jaxtyping import Array, PyTree
from .custom_types import BoolAxisSpec, sentinel
from .doc_utils import doc_strip_annotations
from .filters import (
combine,
filter,
is_array,
is_inexact_array,
is_inexact_array_like,
partition,
)
from .make_jaxpr import filter_make_jaxpr
from .module import Module, module_update_wrapper, Static, static_field
class _ValueAndGradWrapper(Module):
_fun: Callable
_arg: PyTree[BoolAxisSpec]
_gradkwargs: Dict[str, Any]
# Try to avoid clashes with existing argument names.
# TODO: use "/" once we're on Python 3.8.
def __call__(__self, __x, *args, **kwargs):
@ft.partial(jax.value_and_grad, argnums=0, **__self._gradkwargs)
def fun_value_and_grad(_diff_x, _nondiff_x, *_args, **_kwargs):
_x = combine(_diff_x, _nondiff_x)
return __self._fun(_x, *_args, **_kwargs)
diff_x, nondiff_x = partition(__x, __self._arg)
return fun_value_and_grad(diff_x, nondiff_x, *args, **kwargs)
def __get__(self, instance, owner):
if instance is None:
return self
return jtu.Partial(self, instance)
class _GradWrapper(Module):
_fun_value_and_grad: _ValueAndGradWrapper
_has_aux: bool
def __call__(__self, *args, **kwargs):
value, grad = __self._fun_value_and_grad(*args, **kwargs)
if __self._has_aux:
_, aux = value
return grad, aux
else:
return grad
def __get__(self, instance, owner):
if instance is None:
return self
return jtu.Partial(self, instance)
@doc_strip_annotations
def filter_value_and_grad(
fun: Callable = sentinel,
*,
arg: PyTree[BoolAxisSpec] = is_inexact_array,
**gradkwargs,
) -> Callable:
"""As [`equinox.filter_grad`][], except that it is `jax.value_and_grad` that is
wrapped.
"""
if fun is sentinel:
return ft.partial(filter_value_and_grad, arg=arg, **gradkwargs)
filter_spec = gradkwargs.pop("filter_spec", None)
if filter_spec is not None:
warnings.warn("For brevity the `filter_spec` argument has been renamed `arg`")
arg = filter_spec
argnums = gradkwargs.pop("argnums", None)
if argnums is not None:
raise ValueError(
"`argnums` should not be passed. If you need to differentiate "
"multiple objects then collect them into a tuple and pass that "
"as the first argument."
)
return module_update_wrapper(_ValueAndGradWrapper(fun, arg, gradkwargs), fun)
@doc_strip_annotations
def filter_grad(
fun: Callable = sentinel,
*,
arg: PyTree[BoolAxisSpec] = is_inexact_array,
**gradkwargs,
):
"""As `jax.grad`, but accepts arbitrary PyTrees as inputs. (Not just JAXable types.)
!!! info
By default, all inexact (floating-point) JAX arrays are differentiated. Any
nondifferentiable leaves will have `None` as the gradient.
**Arguments:**
- `fun` is a pure function to JIT compile.
- `arg` is a PyTree whose structure should be a prefix of the structure of
the **first** argument to `fun`. It behaves as the `filter_spec` argument to
[`equinox.filter`][]. Truthy values will be differentiated; falsey values will
not.
- `**gradkwargs` are any other keyword arguments to `jax.grad`.
**Returns:**
A function computing the derivative of `fun` with respect to its first input. Any
nondifferentiable leaves will have `None` as the gradient. See
[`equinox.apply_updates`][] for a convenience function that will only attempt to
apply non-`None` updates.
!!! tip
If you need to differentiate multiple objects, then put them together into a
tuple and pass that through the first argument:
```python
# We want to differentiate `func` with respect to both `x` and `y`.
def func(x, y):
...
@equinox.filter_grad
def grad_func(x__y):
x, y = x__y
return func(x, y)
```
"""
if fun is sentinel:
return ft.partial(filter_grad, arg=arg, **gradkwargs)
has_aux = gradkwargs.get("has_aux", False)
fun_value_and_grad = filter_value_and_grad(fun, arg=arg, **gradkwargs)
return module_update_wrapper(_GradWrapper(fun_value_and_grad, has_aux), fun)
def _is_none(x):
return x is None
def _is_jvp_tracer(x):
return isinstance(x, ad.JVPTracer)
def filter_jvp(fn, primals, tangents):
"""Like `jax.jvp`, but accepts arbitrary PyTrees. (Not just JAXable types.)
**Arguments:**
- `fn`: Function to be differentiated. Its arguments can be Python objects, and
its return type can be any Python object.
- `primals`: The primal values at which `fn` should be evaluated. Should be a
sequence of arguments, and its length should be equal to the number of
positional parameter of `fn`.
- `tangents`: The tangent vector for which the Jacobian-vector product should be
calculated. Should be a PyTree with the same structure as `primals`. The leaves
of `tangents` must be either floating-point JAX arrays, or Python floats, or
`None`s. The tangent must be `None` for any primal which is not itself a
floating-point JAX array or Python float.
**Returns:**
A pair `(primals_out, tangents_out)` is returned,
where `primals_out = fn(*primals)` and `tangents_out` is the Jacobian-vector
product of `fn` evaluated at `primals` with `tangents`.
The `tangents_out` has the same structure as `primals_out`, but has `None` for
any leaves that aren't differentiable.
!!! Tip
Unlike `jax.jvp`, this function does not support a `has_aux` argument. It isn't
needed, as unlike `jax.jvp` the output of this function can be of arbitrary type.
"""
if jtu.tree_structure(primals, is_leaf=_is_none) != jtu.tree_structure(
tangents, is_leaf=_is_none
):
raise ValueError("primals and tangents must have the same pytree structure")
filter_spec = jtu.tree_map(_is_none, tangents, is_leaf=_is_none)
static_primals, dynamic_primals = partition(primals, filter_spec)
flat_dynamic_primals, treedef = jtu.tree_flatten(dynamic_primals)
flat_tangents = jtu.tree_leaves(tangents) # all non-None tangents are dynamic
def _fn(*_flat_dynamic):
_dynamic = jtu.tree_unflatten(treedef, _flat_dynamic)
_in = combine(_dynamic, static_primals)
_out = fn(*_in)
_dynamic_out, _static_out = partition(_out, _is_jvp_tracer)
return _dynamic_out, Static(_static_out)
primal_out, tangent_out = jax.jvp(_fn, flat_dynamic_primals, flat_tangents)
dynamic_primal_out, static_primal_out = primal_out
primal_out = combine(dynamic_primal_out, static_primal_out.value)
tangent_out, _ = tangent_out
return primal_out, tangent_out
def filter_vjp(fun, *primals, has_aux=False):
"""Filtered version of `jax.vjp`.
**Arguments:**
- `fun`: The function to be differentiated. Will be called as `fun(*primals)`. Can
return an arbitrary PyTree.
- `primals`: The arguments at which `fun` will be evaluated and differentiated.
Can be arbitrary PyTrees.
- `has_aux`: Indicates whether `fun` returns a pair, with the first element the
output to be differentiated, and the latter auxiliary data. Defaults to `False`.
**Returns:**
If `has_aux is False` then returns a `(primals_out, vjpfun)` pair, where
`primals_out = fun(*primals)` and `vjpfun` is a function from a cotangent vector
with the same shape as `primals_out` to a tuple of cotangent vectors with the same
shape as `primals`, representing the vector-Jacobian product of `fun` evaluated at
`primals`.
If `has_aux is True` then returns a tuple `(primals_out, vjpfun, aux)`, where `aux`
is the auxiliary data returned from `fun`.
The cotangent passed to `vjpfun` should have arrays corresponding to all
floating-point arrays in `primals_out`, and `None` for all other PyTree leaves. The
cotangents returned from `vjpfun` will likewise have arrays for all `primals` that
are floating-point arrays, and `None` for all other PyTree leaves.
"""
diff, nondiff = partition(primals, is_inexact_array)
def diff_fun(*_diff):
_primals = combine(_diff, nondiff)
_out = fun(*_primals)
if has_aux:
_out, _aux = _out
else:
_aux = None
_diff_out, _nondiff_out = partition(_out, is_inexact_array)
return _diff_out, (_nondiff_out, _aux)
diff_out, vjp_fn, (nondiff_out, aux) = jax.vjp(diff_fun, *diff, has_aux=True)
out = combine(diff_out, nondiff_out)
if has_aux:
return out, vjp_fn, aux
else:
return out, vjp_fn
class _ClosureConvert(Module):
jaxpr: jax.core.Jaxpr = static_field()
consts: PyTree[Array] # Captured in the PyTree structure of _ClosureConvert
out_dynamic_struct: PyTree[jax.ShapeDtypeStruct] = static_field()
out_static: PyTree[Any] = static_field()
def __call__(self, *args, **kwargs):
dynamic = filter((args, kwargs), is_array)
dynamic_flat = jtu.tree_leaves(dynamic)
out_dynamic_flat = jax.core.eval_jaxpr(self.jaxpr, self.consts, *dynamic_flat)
out_dynamic_struct_flat, out_dynamic_treedef = jtu.tree_flatten(
self.out_dynamic_struct
)
assert len(out_dynamic_flat) == len(out_dynamic_struct_flat)
for o1, o2 in zip(out_dynamic_flat, out_dynamic_struct_flat):
assert o1.shape == o2.shape
assert o1.dtype == o2.dtype
out = jtu.tree_unflatten(out_dynamic_treedef, out_dynamic_flat)
out = combine(out, self.out_static)
return out
def filter_closure_convert(fn, *args, **kwargs):
"""As `jax.closure_convert`, but works on functions accepting and returning
arbitrary PyTree objects. In addition, all JAX arrays are hoisted into constants
(not just floating point arrays).
This is useful for explicitly capturing any closed-over JAX tracers
before crossing an API boundary, such as `jax.grad`, `jax.custom_vjp`, or the
rule of a custom primitive.
**Arguments:**
- `fn`: The function to call. Will be called as `fun(*args, **kwargs)`.
- `args`, `kwargs`: Example arguments at which to call the function. The function is
not actually evaluated on these arguments; all JAX arrays are subsituted for
tracers. Note that Python builtins (`bool`, `int`, `float`, `complex`) are
not substituted for tracers and are passed through as-is.
**Returns:**
A new function, which can be called in the same way, using `*args` and `**kwargs`.
Will contain all closed-over tracers of `fn` as part of its PyTree structure.
!!! Example
```python
@jax.grad
def f(x, y):
z = x + y
g = lambda a: z + a # closes over z
g2 = filter_closure_convert(g, 1)
assert [id(b) for b in g2.consts] == [id(z)]
return z
f(1., 1.)
```
"""
if fn.__closure__ is None:
# In this case, it's not possible to have any closed-over tracers.
return fn
closed_jaxpr, out_dynamic_struct, out_static = filter_make_jaxpr(fn)(
*args, **kwargs
)
jaxpr = closed_jaxpr.jaxpr
consts = closed_jaxpr.consts
return _ClosureConvert(jaxpr, consts, out_dynamic_struct, out_static)
class filter_custom_jvp:
"""Filtered version of `jax.custom_jvp`.
Works in the same way as `jax.custom_jvp`, except that you do not need to specify
`nondiff_argnums`. Instead, arguments are automatically split into differentiable
and nondifferentiable based on whether or not they are a floating-point JAX array.
The tangents of the nondifferentiable arguments will be passed as `None`.
The return types must still all be JAX types.
Example:
```python
@equinox.filter_custom_jvp
def call(fn, x):
return fn(x)
@call.defjvp
def call_jvp(primals, tangents):
fn, x = primals
_, tx = tangents
primal_out = call(fn, x)
tangent_out = tx**2
return primal_out, tangent_out
```
"""
def __init__(self, fn):
def fn_wrapper(static, dynamic):
return fn(*combine(dynamic, static))
self.fn = jax.custom_jvp(fn_wrapper, nondiff_argnums=(0,))
def defjvp(self, fn_jvp):
def fn_jvp_wrapper(static, dynamic, tangents):
(dynamic,) = dynamic
(tangents,) = tangents
primals = combine(dynamic, static)
return fn_jvp(primals, tangents)
self.fn.defjvp(fn_jvp_wrapper)
def defjvps(self, *a, **kw):
raise NotImplementedError("filter_custom_jvp().defjvps is not implemented")
def __call__(self, *args):
dynamic, static = partition(args, is_inexact_array_like)
return self.fn(static, dynamic)
class filter_custom_vjp:
"""As `jax.custom_vjp`, but with a nicer interface.
Usage is:
```python
@equinox.filter_custom_vjp
def fn(vjp_arg, *args, **kwargs):
# vjp_arg is some PyTree of arbitrary Python objects.
# args, kwargs contain arbitrary Python objects.
...
return obj # some PyTree of arbitrary Python objects.
def fn_fwd(vjp_arg, *args, **kwargs):
...
# Should return `obj` as before. `residuals` can be any collection of JAX
# arrays you want to keep around for the backward pass.
return obj, residuals
def fn_bwd(residuals, grad_obj, vjp_arg, *args, **kwargs):
# grad_obj will have `None` as the gradient for any leaves of `obj` that were
# not JAX arrays
...
# grad_vjp_arg should have `None` as the gradient for any leaves of `vjp_arg`
# that were not JAX arrays.
return grad_vjp_arg
fn.defvjp(fn_fwd, fn_bwd)
```
The key differences to `jax.custom_vjp` are that:
- Only the gradient of the first argument, `vjp_arg`, should be computed on the
backward pass. Everything else will automatically have zero gradient.
- You do not need to distinguish differentiable from nondifferentiable manually.
Instead you should return gradients for all inexact JAX arrays in the first
argument. (And just put `None` on every other leaf of the PyTree.)
- As a convenience, all of the inputs from the forward pass are additionally made
available to you on the backward pass.
!!! tip
If you need gradients with respect to multiple arguments, then just pack them
together as a tuple via the first argument `vjp_arg`. (See also
[`equinox.filter_grad`][] for a similar trick.)
"""
def __init__(self, fn):
self.fn = fn
self.fn_wrapped = None
def defvjp(self, fn_fwd, fn_bwd):
def fn_wrapped(
nonarray_vjp_arg,
nonarray_args_kwargs,
diff_array_vjp_arg,
nondiff_array_vjp_arg,
array_args_kwargs,
):
vjp_arg = combine(
nonarray_vjp_arg, diff_array_vjp_arg, nondiff_array_vjp_arg
)
args, kwargs = combine(nonarray_args_kwargs, array_args_kwargs)
out = self.fn(vjp_arg, *args, **kwargs)
array_out, nonarray_out = partition(out, is_array)
diff_array_out, nondiff_array_out = partition(array_out, is_inexact_array)
return diff_array_out, nondiff_array_out, Static(nonarray_out)
def fn_fwd_wrapped(
nonarray_vjp_arg,
nonarray_args_kwargs,
diff_array_vjp_arg,
nondiff_array_vjp_arg,
array_args_kwargs,
):
vjp_arg = combine(
nonarray_vjp_arg, diff_array_vjp_arg, nondiff_array_vjp_arg
)
args, kwargs = combine(nonarray_args_kwargs, array_args_kwargs)
out, residuals = fn_fwd(vjp_arg, *args, **kwargs)
array_out, nonarray_out = partition(out, is_array)
diff_array_out, nondiff_array_out = partition(array_out, is_inexact_array)
out = diff_array_out, nondiff_array_out, Static(nonarray_out)
return out, (
residuals,
diff_array_vjp_arg,
nondiff_array_vjp_arg,
array_args_kwargs,
)
def fn_bwd_wrapped(nonarray_vjp_arg, nonarray_args_kwargs, residuals, grad_out):
(
residuals,
diff_array_vjp_arg,
nondiff_array_vjp_arg,
array_args_kwargs,
) = residuals
vjp_arg = combine(
nonarray_vjp_arg, diff_array_vjp_arg, nondiff_array_vjp_arg
)
args, kwargs = combine(nonarray_args_kwargs, array_args_kwargs)
grad_diff_array_out, _, _ = grad_out
out = fn_bwd(residuals, grad_diff_array_out, vjp_arg, *args, **kwargs)
if jtu.tree_structure(out) != jtu.tree_structure(diff_array_vjp_arg):
raise RuntimeError(
"custom_vjp gradients must have the same structure as "
"`equinox.filter(vjp_arg, equinox.is_inexact_array)`, where "
"`vjp_arg` is the first argument used in the forward pass."
)
# None is the gradient through nondiff_array_vjp_arg and array_args_kwargs
return out, None, None
fn_wrapped = jax.custom_vjp(fn_wrapped, nondiff_argnums=(0, 1))
fn_wrapped.defvjp(fn_fwd_wrapped, fn_bwd_wrapped)
self.fn_wrapped = fn_wrapped
def __call__(__self, __vjp_arg, *args, **kwargs):
# Try and avoid name collisions with the arguments of the wrapped function.
# TODO: once we switch to Python 3.8, use (self, vjp_arg, /, *args, **kwargs).
self = __self
vjp_arg = __vjp_arg
del __self, __vjp_arg
if self.fn_wrapped is None:
raise RuntimeError(f"defvjp not yet called for {self.fn.__name__}")
array_vjp_arg, nonarray_vjp_arg = partition(vjp_arg, is_array)
diff_array_vjp_arg, nondiff_array_vjp_arg = partition(
array_vjp_arg, is_inexact_array
)
array_args_kwargs, nonarray_args_kwargs = partition((args, kwargs), is_array)
out = self.fn_wrapped(
nonarray_vjp_arg,
nonarray_args_kwargs,
diff_array_vjp_arg,
nondiff_array_vjp_arg,
array_args_kwargs,
)
diff_array_out, nondiff_array_out, nonarray_out = out
return combine(diff_array_out, nondiff_array_out, nonarray_out.value)
if getattr(typing, "GENERATING_DOCUMENTATION", False):
_filter_custom_jvp_doc = filter_custom_jvp.__doc__
_filter_custom_vjp_doc = filter_custom_vjp.__doc__
def defjvp(fn_jvp):
pass
def filter_custom_jvp(fn):
return types.SimpleNamespace(defjvp=defjvp)
def defvjp(fn_fwd, fn_bwd):
pass
def filter_custom_vjp(fn):
return types.SimpleNamespace(defvjp=defvjp)
filter_custom_jvp.__doc__ = _filter_custom_jvp_doc
filter_custom_vjp.__doc__ = _filter_custom_vjp_doc
|
{
"content_hash": "7da41c94630f1ffc9c4855464884b0e2",
"timestamp": "",
"source": "github",
"line_count": 537,
"max_line_length": 89,
"avg_line_length": 36.4096834264432,
"alnum_prop": 0.6248465630114566,
"repo_name": "patrick-kidger/equinox",
"id": "b34dca02e0fedf5521ab01cf2bda3cb3d607cb53",
"size": "19552",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "equinox/grad.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "402270"
}
],
"symlink_target": ""
}
|
from typing import List
#---------------------------- FIO HIST LOG PARSE CODE -----------------------------------------------------------------
# Copy-paste from fio/tools/hist/fiologparser_hist.py.
# Because that's impossible to understand or improve,
# you can only copy such a pearl.
def _plat_idx_to_val(idx: int , edge: float = 0.5, FIO_IO_U_PLAT_BITS: int = 6, FIO_IO_U_PLAT_VAL: int = 64) -> float:
""" Taken from fio's stat.c for calculating the latency value of a bin
from that bin's index.
idx : the value of the index into the histogram bins
edge : fractional value in the range [0,1]** indicating how far into
the bin we wish to compute the latency value of.
** edge = 0.0 and 1.0 computes the lower and upper latency bounds
respectively of the given bin index. """
# MSB <= (FIO_IO_U_PLAT_BITS-1), cannot be rounded off. Use
# all bits of the sample as index
if (idx < (FIO_IO_U_PLAT_VAL << 1)):
return idx
# Find the group and compute the minimum value of that group
error_bits = (idx >> FIO_IO_U_PLAT_BITS) - 1
base = 1 << (error_bits + FIO_IO_U_PLAT_BITS)
# Find its bucket number of the group
k = idx % FIO_IO_U_PLAT_VAL
# Return the mean (if edge=0.5) of the range of the bucket
return base + ((k + edge) * (1 << error_bits))
def plat_idx_to_val_coarse(idx: int, coarseness: int, edge: float = 0.5) -> float:
""" Converts the given *coarse* index into a non-coarse index as used by fio
in stat.h:plat_idx_to_val(), subsequently computing the appropriate
latency value for that bin.
"""
# Multiply the index by the power of 2 coarseness to get the bin
# bin index with a max of 1536 bins (FIO_IO_U_PLAT_GROUP_NR = 24 in stat.h)
stride = 1 << coarseness
idx = idx * stride
lower = _plat_idx_to_val(idx, edge=0.0)
upper = _plat_idx_to_val(idx + stride, edge=1.0)
return lower + (upper - lower) * edge
def get_lat_vals(columns: int, coarseness: int = 0) -> List[float]:
# convert ns to ms
if columns == 1216:
coef = 1
elif columns == 1856:
coef = 1000
return [plat_idx_to_val_coarse(val, coarseness) / coef for val in range(columns)]
|
{
"content_hash": "7099b45ec7d21936dd9425d8839ca66f",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 120,
"avg_line_length": 37.833333333333336,
"alnum_prop": 0.6044052863436123,
"repo_name": "Mirantis/disk_perf_test_tool",
"id": "fc32d0dae8a8422494f6ca939b99b21efc35e0c3",
"size": "2270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wally/suits/io/fio_hist.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1916"
},
{
"name": "HTML",
"bytes": "25130"
},
{
"name": "JavaScript",
"bytes": "3474"
},
{
"name": "Makefile",
"bytes": "635"
},
{
"name": "Python",
"bytes": "370984"
},
{
"name": "Shell",
"bytes": "27277"
}
],
"symlink_target": ""
}
|
"""Creates a RevNet with the bottleneck residual function.
Implements the following equations described in the RevNet paper:
y1 = x1 + f(x2)
y2 = x2 + g(y1)
However, in practice, the authors use the following equations to downsample
tensors inside a RevNet block:
y1 = h(x1) + f(x2)
y2 = h(x2) + g(y1)
In this case, h is the downsampling function used to change number of channels.
These modified equations are evident in the authors' code online:
https://github.com/renmengye/revnet-public
For reference, the original paper can be found here:
https://arxiv.org/pdf/1707.04585.pdf
"""
import functools
from tensor2tensor.layers import common_hparams
from tensor2tensor.utils import contrib
from tensor2tensor.utils import registry
from tensor2tensor.utils import t2t_model
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1 import estimator as tf_estimator
def wrapped_partial(fn, *args, **kwargs):
partial = functools.partial(fn, *args, **kwargs)
wrapped = functools.update_wrapper(partial, fn)
return wrapped
conv_initializer = tf.initializers.variance_scaling(
scale=2.0, mode='fan_out')
CONFIG = {'2d': {'conv': wrapped_partial(
tf.layers.conv2d, kernel_initializer=conv_initializer),
'max_pool': tf.layers.max_pooling2d,
'avg_pool': tf.layers.average_pooling2d,
'split_axis': 3,
'reduction_dimensions': [1, 2]
},
'3d': {'conv': wrapped_partial(
tf.layers.conv3d, kernel_initializer=conv_initializer),
'max_pool': tf.layers.max_pooling3d,
'avg_pool': tf.layers.average_pooling2d,
'split_axis': 4,
'reduction_dimensions': [1, 2, 3]
}
}
def f(x, depth1, depth2, dim='2d', first_batch_norm=True, stride=1,
training=True, bottleneck=True, padding='SAME'):
"""Applies residual function for RevNet.
Args:
x: input tensor
depth1: Number of output channels for the first and second conv layers.
depth2: Number of output channels for the third conv layer.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
first_batch_norm: Whether to keep the first batch norm layer or not.
Typically used in the first RevNet block.
stride: Stride for the first conv filter. Note that this particular
RevNet architecture only varies the stride for the first conv
filter. The stride for the second conv filter is always set to 1.
training: True for train phase, False for eval phase.
bottleneck: If true, apply bottleneck 1x1 down/up sampling.
padding: Padding for each conv layer.
Returns:
Output tensor after applying residual function for RevNet.
"""
conv = CONFIG[dim]['conv']
with tf.variable_scope('f', reuse=tf.AUTO_REUSE):
if first_batch_norm:
net = tf.layers.batch_normalization(x, training=training)
net = tf.nn.relu(net)
else:
net = x
if bottleneck:
net = conv(net, depth1, 1, strides=stride,
padding=padding, activation=None)
net = tf.layers.batch_normalization(net, training=training)
net = tf.nn.relu(net)
net = conv(net, depth1, 3, strides=1,
padding=padding, activation=None)
net = tf.layers.batch_normalization(net, training=training)
net = tf.nn.relu(net)
net = conv(net, depth2, 1, strides=1,
padding=padding, activation=None)
else:
net = conv(net, depth2, 3, strides=stride,
padding=padding, activation=None)
net = tf.layers.batch_normalization(x, training=training)
net = tf.nn.relu(net)
net = conv(net, depth2, 3, strides=stride,
padding=padding, activation=None)
return net
def downsample_bottleneck(x, output_channels, dim='2d', stride=1, scope='h'):
"""Downsamples 'x' by `stride` using a 1x1 convolution filter.
Args:
x: input tensor of size [N, H, W, C]
output_channels: Desired number of output channels.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
stride: What stride to use. Usually 1 or 2.
scope: Optional variable scope.
Returns:
A downsampled tensor of size [N, H/2, W/2, output_channels] if stride
is 2, else returns a tensor of size [N, H, W, output_channels] if
stride is 1.
"""
conv = CONFIG[dim]['conv']
with tf.variable_scope(scope):
x = conv(x, output_channels, 1, strides=stride, padding='SAME',
activation=None)
return x
def downsample_residual(x, output_channels, dim='2d', stride=1, scope='h'):
"""Downsamples 'x' by `stride` using average pooling.
Args:
x: input tensor of size [N, H, W, C]
output_channels: Desired number of output channels.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
stride: What stride to use. Usually 1 or 2.
scope: Optional variable scope.
Returns:
A downsampled tensor of size [N, H/2, W/2, output_channels] if stride
is 2, else returns a tensor of size [N, H, W, output_channels] if
stride is 1.
"""
with tf.variable_scope(scope):
if stride > 1:
avg_pool = CONFIG[dim]['avg_pool']
x = avg_pool(x,
pool_size=(stride, stride),
strides=(stride, stride),
padding='VALID')
input_channels = tf.shape(x)[3]
diff = output_channels - input_channels
x = tf.pad(
x, [[0, 0], [0, 0], [0, 0],
[diff // 2, diff // 2]])
return x
def init(images, num_channels, dim='2d', stride=2,
kernel_size=7, maxpool=True, training=True, scope='init'):
"""Standard ResNet initial block used as first RevNet block.
Args:
images: [N, H, W, 3] tensor of input images to the model.
num_channels: Output depth of convolutional layer in initial block.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
stride: stride for the convolution and pool layer.
kernel_size: Size of the initial convolution filter
maxpool: If true, apply a maxpool after the convolution
training: True for train phase, False for eval phase.
scope: Optional scope for the init block.
Returns:
Two [N, H, W, C] output activations from input images.
"""
conv = CONFIG[dim]['conv']
pool = CONFIG[dim]['max_pool']
with tf.variable_scope(scope):
net = conv(images, num_channels, kernel_size, strides=stride,
padding='SAME', activation=None)
net = tf.layers.batch_normalization(net, training=training)
net = tf.nn.relu(net)
if maxpool:
net = pool(net, pool_size=3, strides=stride)
x1, x2 = tf.split(net, 2, axis=CONFIG[dim]['split_axis'])
return x1, x2
def unit(x1, x2, block_num, depth, num_layers, dim='2d',
bottleneck=True, first_batch_norm=True, stride=1, training=True):
"""Implements bottleneck RevNet unit from authors' RevNet architecture.
Args:
x1: [N, H, W, C] tensor of network activations.
x2: [N, H, W, C] tensor of network activations.
block_num: integer ID of block
depth: First depth in bottleneck residual unit.
num_layers: Number of layers in the RevNet block.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
bottleneck: Should a bottleneck layer be used.
first_batch_norm: Whether to keep the first batch norm layer or not.
Typically used in the first RevNet block.
stride: Stride for the residual function.
training: True for train phase, False for eval phase.
Returns:
Two [N, H, W, C] output activation tensors.
"""
scope_name = 'unit_%d' % block_num
if bottleneck:
depth1 = depth
depth2 = depth * 4
else:
depth1 = depth2 = depth
residual = wrapped_partial(f,
depth1=depth1, depth2=depth2, dim=dim,
training=training, bottleneck=bottleneck)
with tf.variable_scope(scope_name):
downsample = downsample_bottleneck if bottleneck else downsample_residual
# Manual implementation of downsampling
with tf.variable_scope('downsampling'):
with tf.variable_scope('x1'):
hx1 = downsample(x1, depth2, dim=dim, stride=stride)
fx2 = residual(x2, stride=stride, first_batch_norm=first_batch_norm)
x1 = hx1 + fx2
with tf.variable_scope('x2'):
hx2 = downsample(x2, depth2, dim=dim, stride=stride)
fx1 = residual(x1)
x2 = hx2 + fx1
# Full block using memory-efficient rev_block implementation.
with tf.variable_scope('full_block'):
x1, x2 = contrib.layers().rev_block(
x1, x2, residual, residual, num_layers=num_layers)
return x1, x2
def final_block(x1, x2, dim='2d', training=True, scope='final_block'):
"""Converts activations from last RevNet block to pre-logits.
Args:
x1: [NxHxWxC] tensor of network activations.
x2: [NxHxWxC] tensor of network activations.
dim: '2d' if 2-dimensional, '3d' if 3-dimensional.
training: True for train phase, False for eval phase.
scope: Optional variable scope for the final block.
Returns:
[N, hidden_dim] pre-logits tensor from activations x1 and x2.
"""
# Final batch norm and relu
with tf.variable_scope(scope):
y = tf.concat([x1, x2], axis=CONFIG[dim]['split_axis'])
y = tf.layers.batch_normalization(y, training=training)
y = tf.nn.relu(y)
# Global average pooling
net = tf.reduce_mean(y, CONFIG[dim]['reduction_dimensions'],
name='final_pool', keep_dims=True)
return net
def revnet(inputs, hparams, reuse=None):
"""Uses Tensor2Tensor memory optimized RevNet block to build a RevNet.
Args:
inputs: [NxHxWx3] tensor of input images to the model.
hparams: HParams object that contains the following parameters,
in addition to the parameters contained in the basic_params1() object in
the common_hparams module:
num_channels_first - A Python list where each element represents the
depth of the first and third convolutional layers in the bottleneck
residual unit for a given block.
num_channels_second - A Python list where each element represents the
depth of the second convolutional layer in the bottleneck residual
unit for a given block.
num_layers_per_block - A Python list containing the number of RevNet
layers for each block.
first_batch_norm - A Python list containing booleans representing the
presence of a batch norm layer at the beginning of a given block.
strides - A Python list containing integers representing the stride of
the residual function for each block.
num_channels_init_block - An integer representing the number of channels
for the convolutional layer in the initial block.
dimension - A string (either "2d" or "3d") that decides if the RevNet is
2-dimensional or 3-dimensional.
reuse: Whether to reuse the default variable scope.
Returns:
[batch_size, hidden_dim] pre-logits tensor from the bottleneck RevNet.
"""
training = hparams.mode == tf_estimator.ModeKeys.TRAIN
with tf.variable_scope('RevNet', reuse=reuse):
x1, x2 = init(inputs,
num_channels=hparams.num_channels_init_block,
dim=hparams.dim,
kernel_size=hparams.init_kernel_size,
maxpool=hparams.init_maxpool,
stride=hparams.init_stride,
training=training)
for block_num in range(len(hparams.num_layers_per_block)):
block = {'depth': hparams.num_channels[block_num],
'num_layers': hparams.num_layers_per_block[block_num],
'first_batch_norm': hparams.first_batch_norm[block_num],
'stride': hparams.strides[block_num],
'bottleneck': hparams.bottleneck}
x1, x2 = unit(x1, x2, block_num, dim=hparams.dim, training=training,
**block)
pre_logits = final_block(x1, x2, dim=hparams.dim, training=training)
return pre_logits
@registry.register_model
class Revnet(t2t_model.T2TModel):
def body(self, features):
return revnet(features['inputs'], self.hparams)
def revnet_base():
"""Default hparams for Revnet."""
hparams = common_hparams.basic_params1()
hparams.add_hparam('num_channels', [64, 128, 256, 416])
hparams.add_hparam('num_layers_per_block', [1, 1, 10, 1])
hparams.add_hparam('bottleneck', True)
hparams.add_hparam('first_batch_norm', [False, True, True, True])
hparams.add_hparam('init_stride', 2)
hparams.add_hparam('init_kernel_size', 7)
hparams.add_hparam('init_maxpool', True)
hparams.add_hparam('strides', [1, 2, 2, 2])
hparams.add_hparam('num_channels_init_block', 64)
hparams.add_hparam('dim', '2d')
# Variable init
hparams.initializer = 'normal_unit_scaling'
hparams.initializer_gain = 2.
# Optimization
hparams.optimizer = 'Momentum'
hparams.optimizer_momentum_momentum = 0.9
hparams.optimizer_momentum_nesterov = True
hparams.weight_decay = 1e-4
hparams.clip_grad_norm = 0.0
# (base_lr=0.1) * (batch_size=128*8 (on TPU, or 8 GPUs)=1024) / (256.)
hparams.learning_rate = 0.4
hparams.learning_rate_decay_scheme = 'cosine'
# For image_imagenet224, 120k training steps, which effectively makes this a
# cosine decay (i.e. no cycles).
hparams.learning_rate_cosine_cycle_steps = 120000
# Can run with a batch size of 128 with Problem ImageImagenet224
hparams.batch_size = 128
return hparams
@registry.register_hparams
def revnet_104():
return revnet_base()
def revnet_cifar_base():
"""Tiny hparams suitable for CIFAR/etc."""
hparams = revnet_base()
hparams.num_channels_init_block = 32
hparams.first_batch_norm = [False, True, True]
hparams.init_stride = 1
hparams.init_kernel_size = 3
hparams.init_maxpool = False
hparams.strides = [1, 2, 2]
hparams.batch_size = 128
hparams.weight_decay = 1e-4
hparams.learning_rate = 0.1
hparams.learning_rate_cosine_cycle_steps = 5000
return hparams
@registry.register_hparams
def revnet_38_cifar():
hparams = revnet_cifar_base()
hparams.bottleneck = False
hparams.num_channels = [16, 32, 56]
hparams.num_layers_per_block = [2, 2, 2]
hparams.initializer = 'normal_unit_scaling'
hparams.initializer_gain = 1.5
return hparams
@registry.register_hparams
def revnet_110_cifar():
"""Tiny hparams suitable for CIFAR/etc."""
hparams = revnet_cifar_base()
hparams.bottleneck = False
hparams.num_channels = [16, 32, 64]
hparams.num_layers_per_block = [8, 8, 8]
return hparams
@registry.register_hparams
def revnet_164_cifar():
"""Tiny hparams suitable for CIFAR/etc."""
hparams = revnet_cifar_base()
hparams.bottleneck = True
hparams.num_channels = [16, 32, 64]
hparams.num_layers_per_block = [8, 8, 8]
return hparams
@registry.register_ranged_hparams
def revnet_range(rhp):
"""Hyperparameters for tuning revnet."""
rhp.set_float('learning_rate', 0.05, 0.2, scale=rhp.LOG_SCALE)
rhp.set_float('weight_decay', 1e-5, 1e-3, scale=rhp.LOG_SCALE)
rhp.set_discrete('num_channels_init_block', [64, 128])
return rhp
|
{
"content_hash": "a3f9e6ece0b1b3bdc541102fa32a9ba0",
"timestamp": "",
"source": "github",
"line_count": 424,
"max_line_length": 80,
"avg_line_length": 35.695754716981135,
"alnum_prop": 0.663164849686158,
"repo_name": "tensorflow/tensor2tensor",
"id": "05b21b0f7cdf354c3298f1607ac1149c175930ef",
"size": "15741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensor2tensor/models/revnet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "32015"
},
{
"name": "HTML",
"bytes": "34684"
},
{
"name": "JavaScript",
"bytes": "78408"
},
{
"name": "Jupyter Notebook",
"bytes": "2859453"
},
{
"name": "Python",
"bytes": "5109255"
},
{
"name": "Shell",
"bytes": "11941"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import Question
class QuestionAdmin(admin.ModelAdmin):
fields = ['pub_date', 'question_text']
admin.site.register(Question, QuestionAdmin)
|
{
"content_hash": "5c5ba391e5df3b95a4390181c13da7be",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 44,
"avg_line_length": 21.333333333333332,
"alnum_prop": 0.7708333333333334,
"repo_name": "zbassett/curling-robot",
"id": "3edfcb78bc8e761f3fcb491ea6be4bef3be86bdc",
"size": "192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RaspberryPi/DjangoSite/mysite/polls/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "23791"
},
{
"name": "C++",
"bytes": "28526"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "20489"
},
{
"name": "JavaScript",
"bytes": "1581"
},
{
"name": "Makefile",
"bytes": "2494"
},
{
"name": "Python",
"bytes": "69600"
}
],
"symlink_target": ""
}
|
import os
from supriya.tools import osctools
from supriya.tools.requesttools.BufferAllocateRequest import BufferAllocateRequest
class BufferAllocateReadRequest(BufferAllocateRequest):
r'''A /b_allocRead request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.BufferAllocateReadRequest(
... buffer_id=23,
... file_path='pulse_44100sr_16bit_octo.wav',
... )
>>> print(request)
BufferAllocateReadRequest(
buffer_id=23,
file_path='pulse_44100sr_16bit_octo.wav'
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(29, 23, '...pulse_44100sr_16bit_octo.wav', 0, -1)
::
>>> message.address == requesttools.RequestId.BUFFER_ALLOCATE_READ
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_file_path',
'_frame_count',
'_starting_frame',
)
### INITIALIZER ###
def __init__(
self,
buffer_id=None,
completion_message=None,
file_path=None,
frame_count=None,
starting_frame=None,
):
BufferAllocateRequest.__init__(
self,
buffer_id=buffer_id,
frame_count=frame_count,
completion_message=completion_message,
)
self._file_path = str(file_path)
if starting_frame is not None:
starting_frame = int(starting_frame)
assert 0 <= starting_frame
self._starting_frame = starting_frame
### PRIVATE METHODS ###
def _get_osc_message_contents(self):
request_id = int(self.request_id)
buffer_id = int(self.buffer_id)
frame_count = self.frame_count
if frame_count is None:
frame_count = -1
starting_frame = self.starting_frame
if starting_frame is None:
starting_frame = 0
file_path = os.path.abspath(os.path.expanduser(str(self.file_path)))
contents = [
request_id,
buffer_id,
file_path,
starting_frame,
frame_count,
]
return contents
### PUBLIC METHODS ###
def to_osc_message(self):
contents = self._get_osc_message_contents()
self._coerce_completion_message_output(contents)
message = osctools.OscMessage(*contents)
return message
### PUBLIC PROPERTIES ###
@property
def buffer_id(self):
return self._buffer_id
@property
def completion_message(self):
return self._completion_message
@property
def file_path(self):
return self._file_path
@property
def frame_count(self):
return self._frame_count
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.DoneResponse: {
'action': ('/b_allocRead', self.buffer_id),
},
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.BUFFER_ALLOCATE_READ
@property
def starting_frame(self):
return self._starting_frame
|
{
"content_hash": "cbb729ba4f01f621da874a0727ee8f49",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 82,
"avg_line_length": 25.5859375,
"alnum_prop": 0.5661068702290076,
"repo_name": "andrewyoung1991/supriya",
"id": "74749da400b2784cf1eae063dc871352ca91ca23",
"size": "3301",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supriya/tools/requesttools/BufferAllocateReadRequest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6712"
},
{
"name": "CSS",
"bytes": "446"
},
{
"name": "HTML",
"bytes": "1083"
},
{
"name": "JavaScript",
"bytes": "6163"
},
{
"name": "Makefile",
"bytes": "6775"
},
{
"name": "Python",
"bytes": "2693776"
}
],
"symlink_target": ""
}
|
"""Introduce Export model
Revision ID: 5b681871c1ab
Revises: 18f53aae83ae
Create Date: 2020-07-28 11:26:26.392701
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "5b681871c1ab"
down_revision = "18f53aae83ae"
def upgrade():
op.create_table(
"export",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("label", sa.Unicode(), nullable=True),
sa.Column("operation", sa.Unicode(), nullable=True),
sa.Column("creator_id", sa.Integer(), nullable=True),
sa.Column("collection_id", sa.Integer(), nullable=True),
sa.Column("expires_at", sa.DateTime(), nullable=True),
sa.Column("deleted", sa.Boolean(), nullable=True),
sa.Column("export_status", sa.Unicode(), nullable=True),
sa.Column("content_hash", sa.Unicode(length=65), nullable=True),
sa.Column("file_size", sa.BigInteger(), nullable=True),
sa.Column("file_name", sa.Unicode(), nullable=True),
sa.Column("mime_type", sa.Unicode(), nullable=True),
sa.Column("meta", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(["collection_id"], ["collection.id"],),
sa.ForeignKeyConstraint(["creator_id"], ["role.id"],),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_export_collection_id"), "export", ["collection_id"], unique=False
)
op.create_index(
op.f("ix_export_content_hash"), "export", ["content_hash"], unique=False
)
def downgrade():
op.drop_index(op.f("ix_export_content_hash"), table_name="export")
op.drop_index(op.f("ix_export_collection_id"), table_name="export")
op.drop_table("export")
|
{
"content_hash": "136568fb70471b170efcac9b2b840bdf",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 82,
"avg_line_length": 37.431372549019606,
"alnum_prop": 0.643268727082242,
"repo_name": "pudo/aleph",
"id": "9ba8ce11d87321b861214c9f5f4a4431f6734daf",
"size": "1909",
"binary": false,
"copies": "1",
"ref": "refs/heads/dependabot/pip/develop/jsonschema-4.1.2",
"path": "aleph/migrate/versions/5b681871c1ab_introduce_export_model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15949"
},
{
"name": "HTML",
"bytes": "170476"
},
{
"name": "JavaScript",
"bytes": "111287"
},
{
"name": "Makefile",
"bytes": "1319"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "492593"
}
],
"symlink_target": ""
}
|
from pysb.core import *
from pysb.annotation import Annotation
__all__ = ['Observable', 'Initial', 'MatchOnce', 'Model', 'Monomer',
'Parameter', 'Compartment', 'Rule', 'Expression', 'ANY', 'WILD',
'Annotation']
try:
import reinteract # fails if reinteract not installed
reinteract.custom_result # fails if this code is run outside of the reinteract shell
except (ImportError, AttributeError) as e:
pass # silently skip applying the mixin below
else:
import pysb.reinteract_integration
pysb.reinteract_integration.apply_mixins()
|
{
"content_hash": "e09b8ad741bfebf106a3eb7342cab145",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 89,
"avg_line_length": 40.13333333333333,
"alnum_prop": 0.6694352159468439,
"repo_name": "spgarbet/pysb",
"id": "33b41ede61c518885f97969f9cfcc307a60556ea",
"size": "602",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pysb/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "DOT",
"bytes": "1238"
},
{
"name": "Puppet",
"bytes": "3461"
},
{
"name": "Python",
"bytes": "483478"
},
{
"name": "Ruby",
"bytes": "2124"
},
{
"name": "Shell",
"bytes": "4507"
}
],
"symlink_target": ""
}
|
"""
Integration tests which cover state convergence (aka smart recreate) performed
by `docker-compose up`.
"""
from __future__ import unicode_literals
import os
import shutil
import tempfile
from .testcases import DockerClientTestCase
from compose.config import config
from compose.const import LABEL_CONFIG_HASH
from compose.project import Project
from compose.service import ConvergenceStrategy
class ProjectTestCase(DockerClientTestCase):
def run_up(self, cfg, **kwargs):
kwargs.setdefault('timeout', 1)
kwargs.setdefault('detached', True)
project = self.make_project(cfg)
project.up(**kwargs)
return set(project.containers(stopped=True))
def make_project(self, cfg):
details = config.ConfigDetails(
'working_dir',
[config.ConfigFile(None, cfg)])
return Project.from_dicts(
name='composetest',
client=self.client,
service_dicts=config.load(details))
class BasicProjectTest(ProjectTestCase):
def setUp(self):
super(BasicProjectTest, self).setUp()
self.cfg = {
'db': {'image': 'busybox:latest'},
'web': {'image': 'busybox:latest'},
}
def test_no_change(self):
old_containers = self.run_up(self.cfg)
self.assertEqual(len(old_containers), 2)
new_containers = self.run_up(self.cfg)
self.assertEqual(len(new_containers), 2)
self.assertEqual(old_containers, new_containers)
def test_partial_change(self):
old_containers = self.run_up(self.cfg)
old_db = [c for c in old_containers if c.name_without_project == 'db_1'][0]
old_web = [c for c in old_containers if c.name_without_project == 'web_1'][0]
self.cfg['web']['command'] = '/bin/true'
new_containers = self.run_up(self.cfg)
self.assertEqual(len(new_containers), 2)
preserved = list(old_containers & new_containers)
self.assertEqual(preserved, [old_db])
removed = list(old_containers - new_containers)
self.assertEqual(removed, [old_web])
created = list(new_containers - old_containers)
self.assertEqual(len(created), 1)
self.assertEqual(created[0].name_without_project, 'web_1')
self.assertEqual(created[0].get('Config.Cmd'), ['/bin/true'])
def test_all_change(self):
old_containers = self.run_up(self.cfg)
self.assertEqual(len(old_containers), 2)
self.cfg['web']['command'] = '/bin/true'
self.cfg['db']['command'] = '/bin/true'
new_containers = self.run_up(self.cfg)
self.assertEqual(len(new_containers), 2)
unchanged = old_containers & new_containers
self.assertEqual(len(unchanged), 0)
new = new_containers - old_containers
self.assertEqual(len(new), 2)
class ProjectWithDependenciesTest(ProjectTestCase):
def setUp(self):
super(ProjectWithDependenciesTest, self).setUp()
self.cfg = {
'db': {
'image': 'busybox:latest',
'command': 'tail -f /dev/null',
},
'web': {
'image': 'busybox:latest',
'command': 'tail -f /dev/null',
'links': ['db'],
},
'nginx': {
'image': 'busybox:latest',
'command': 'tail -f /dev/null',
'links': ['web'],
},
}
def test_up(self):
containers = self.run_up(self.cfg)
self.assertEqual(
set(c.name_without_project for c in containers),
set(['db_1', 'web_1', 'nginx_1']),
)
def test_change_leaf(self):
old_containers = self.run_up(self.cfg)
self.cfg['nginx']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
self.assertEqual(
set(c.name_without_project for c in new_containers - old_containers),
set(['nginx_1']),
)
def test_change_middle(self):
old_containers = self.run_up(self.cfg)
self.cfg['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
self.assertEqual(
set(c.name_without_project for c in new_containers - old_containers),
set(['web_1', 'nginx_1']),
)
def test_change_root(self):
old_containers = self.run_up(self.cfg)
self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
self.assertEqual(
set(c.name_without_project for c in new_containers - old_containers),
set(['db_1', 'web_1', 'nginx_1']),
)
def test_change_root_no_recreate(self):
old_containers = self.run_up(self.cfg)
self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(
self.cfg,
strategy=ConvergenceStrategy.never)
self.assertEqual(new_containers - old_containers, set())
def test_service_removed_while_down(self):
next_cfg = {
'web': {
'image': 'busybox:latest',
'command': 'tail -f /dev/null',
},
'nginx': self.cfg['nginx'],
}
containers = self.run_up(self.cfg)
self.assertEqual(len(containers), 3)
project = self.make_project(self.cfg)
project.stop(timeout=1)
containers = self.run_up(next_cfg)
self.assertEqual(len(containers), 2)
def converge(service,
strategy=ConvergenceStrategy.changed,
do_build=True):
"""Create a converge plan from a strategy and execute the plan."""
plan = service.convergence_plan(strategy)
return service.execute_convergence_plan(plan, do_build=do_build, timeout=1)
class ServiceStateTest(DockerClientTestCase):
"""Test cases for Service.convergence_plan."""
def test_trigger_create(self):
web = self.create_service('web')
self.assertEqual(('create', []), web.convergence_plan())
def test_trigger_noop(self):
web = self.create_service('web')
container = web.create_container()
web.start()
web = self.create_service('web')
self.assertEqual(('noop', [container]), web.convergence_plan())
def test_trigger_start(self):
options = dict(command=["top"])
web = self.create_service('web', **options)
web.scale(2)
containers = web.containers(stopped=True)
containers[0].stop()
containers[0].inspect()
self.assertEqual([c.is_running for c in containers], [False, True])
self.assertEqual(
('start', containers[0:1]),
web.convergence_plan(),
)
def test_trigger_recreate_with_config_change(self):
web = self.create_service('web', command=["top"])
container = web.create_container()
web = self.create_service('web', command=["top", "-d", "1"])
self.assertEqual(('recreate', [container]), web.convergence_plan())
def test_trigger_recreate_with_nonexistent_image_tag(self):
web = self.create_service('web', image="busybox:latest")
container = web.create_container()
web = self.create_service('web', image="nonexistent-image")
self.assertEqual(('recreate', [container]), web.convergence_plan())
def test_trigger_recreate_with_image_change(self):
repo = 'composetest_myimage'
tag = 'latest'
image = '{}:{}'.format(repo, tag)
image_id = self.client.images(name='busybox')[0]['Id']
self.client.tag(image_id, repository=repo, tag=tag)
try:
web = self.create_service('web', image=image)
container = web.create_container()
# update the image
c = self.client.create_container(image, ['touch', '/hello.txt'])
self.client.commit(c, repository=repo, tag=tag)
self.client.remove_container(c)
web = self.create_service('web', image=image)
self.assertEqual(('recreate', [container]), web.convergence_plan())
finally:
self.client.remove_image(image)
def test_trigger_recreate_with_build(self):
context = tempfile.mkdtemp()
base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n"
try:
dockerfile = os.path.join(context, 'Dockerfile')
with open(dockerfile, 'w') as f:
f.write(base_image)
web = self.create_service('web', build=context)
container = web.create_container()
with open(dockerfile, 'w') as f:
f.write(base_image + 'CMD echo hello world\n')
web.build()
web = self.create_service('web', build=context)
self.assertEqual(('recreate', [container]), web.convergence_plan())
finally:
shutil.rmtree(context)
class ConfigHashTest(DockerClientTestCase):
def test_no_config_hash_when_one_off(self):
web = self.create_service('web')
container = web.create_container(one_off=True)
self.assertNotIn(LABEL_CONFIG_HASH, container.labels)
def test_no_config_hash_when_overriding_options(self):
web = self.create_service('web')
container = web.create_container(environment={'FOO': '1'})
self.assertNotIn(LABEL_CONFIG_HASH, container.labels)
def test_config_hash_with_custom_labels(self):
web = self.create_service('web', labels={'foo': '1'})
container = converge(web)[0]
self.assertIn(LABEL_CONFIG_HASH, container.labels)
self.assertIn('foo', container.labels)
def test_config_hash_sticks_around(self):
web = self.create_service('web', command=["top"])
container = converge(web)[0]
self.assertIn(LABEL_CONFIG_HASH, container.labels)
web = self.create_service('web', command=["top", "-d", "1"])
container = converge(web)[0]
self.assertIn(LABEL_CONFIG_HASH, container.labels)
|
{
"content_hash": "257f5851495f0a116eaddc494a00cec6",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 85,
"avg_line_length": 32.83387622149837,
"alnum_prop": 0.5913690476190476,
"repo_name": "mnowster/compose",
"id": "3230aefc61a5863195edec8dddfeb42316e1839a",
"size": "10080",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/integration/state_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "2536"
},
{
"name": "Python",
"bytes": "378509"
},
{
"name": "Shell",
"bytes": "23641"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from django import template
from django.template import Template, Context
from django.shortcuts import render_to_response
# Parser creation factory for testing
import django.template.base as template_base
def make_parser(template_string):
return template_base.Parser(
template_base.Lexer(
template_string,
template_base.StringOrigin(template_string)).tokenize())
# Tests for repeatedblocks.py
from .templatetags.repeatedblocks import set_repeated_blocks, BlockNode
class RepeatedBlocksTagsTests(TestCase):
# generate test templates from these base
# strings to be more dry/in case tag syntax
# changes later.
LOAD_STRING = "{% load repeatedblocks %}"
# define first repeated_block strings
RBLOCK_1_NAME = "rblock1"
DEFINE_RBLOCK_1 = (
"{{% repeated_block {0} %}}".format(RBLOCK_1_NAME) +
"string1"
"{% endblock %}")
REPEAT_RBLOCK_1 = "{{% repeat {0} %}}".format(RBLOCK_1_NAME)
# define second repeated_block strings
RBLOCK_2_NAME = "rblock2"
DEFINE_RBLOCK_2 = (
"{{% repeated_block {0} %}}".format(RBLOCK_2_NAME) +
"string2"
"{% endblock %}")
REPEAT_RBLOCK_2 = "{{% repeat {0} %}}".format(RBLOCK_2_NAME)
# too few and too many args
TOO_FEW_REPEATED_BLOCK = "{% repeated_block %}"
TOO_MANY_REPEATED_BLOCK = "{% repeated_block arg1 arg2 %}"
TOO_FEW_REPEAT = "{% repeat %}"
TOO_MANY_REPEAT = "{% repeat arg1 arg2 %}"
# test assumptions
def test_creates_normal_block(self):
""" It is assumed that the block node created
by the repeated block tag, and the block node
created by the usual block tag are identical.
"""
t = Template(
self.LOAD_STRING + self.DEFINE_RBLOCK_1)
# assert that there is only two nodes in template
# first node is the load tag
# second node is the block node
self.assertEqual(len(t.nodelist), 2)
# make sure that the second node is a block node
self.assertIsInstance(t.nodelist[1], BlockNode)
# make sure that node is the block node we expect it to be
self.assertEqual(t.nodelist[1].name, self.RBLOCK_1_NAME)
# test functionality
## test set_repeated_blocks
def test_set_repeated_blocks_sets_variable(self):
""" Set repeated blocks must set the _repeated_blocks
variable on the parser.
"""
p = make_parser("A short template")
# check that the parser doesn't initially have the
# _repeated_blocks attribute
self.assertFalse(hasattr(p, "_repeated_blocks"))
# check that set_repeated_blocks actually sets the
# attribute.
set_repeated_blocks(p)
self.assertTrue(hasattr(p, "_repeated_blocks"))
def test_set_repeated_blocks_doesnt_overwrite(self):
""" Set repeated blocks must not overwrite the
_repeated_blocks variable on the parser, if it
already exists.
"""
p = make_parser("A short template")
# set the attribute to a specific dict
dic = {'foo': 'bar', }
p._repeated_blocks = dic
# call set_repeated_blocks and test that
# the dict hasn't been overwritten.
set_repeated_blocks(p)
self.assertEqual(p._repeated_blocks, dic)
def test_set_repeated_blocks_initialize_empty(self):
""" Set repeated blocks must initialize the
the _repeated_blocks variable as an empty dict.
"""
p = make_parser("A short template")
set_repeated_blocks(p)
self.assertEqual(p._repeated_blocks, {})
## test repeated_block and repeat
def test_repeated_block_repeats_once(self):
""" The repeated blocks should be able to repeat
an arbitrary number of times, this tests that it
repeats once.
"""
t = Template(
self.LOAD_STRING + self.DEFINE_RBLOCK_1 +
self.REPEAT_RBLOCK_1)
c = Context({})
self.assertEqual(t.render(c), "string1string1")
def test_repeated_block_repeats_twice(self):
""" The repeated blocks should be able to repeat
an arbitrary number of times, this tests that it
repeats twice.
"""
t = Template(
self.LOAD_STRING + self.DEFINE_RBLOCK_1 +
self.REPEAT_RBLOCK_1 +
self.REPEAT_RBLOCK_1)
c = Context({})
self.assertEqual(t.render(c), "string1string1string1")
def test_repeated_block_repeats_thrice(self):
""" The repeated blocks should be able to repeat
an arbitrary number of times, this tests that it
repeats thrice.
"""
t = Template(
self.LOAD_STRING + self.DEFINE_RBLOCK_1 +
self.REPEAT_RBLOCK_1 +
self.REPEAT_RBLOCK_1 +
self.REPEAT_RBLOCK_1)
c = Context({})
self.assertEqual(t.render(c),
"string1string1string1string1")
def test_two_distinct_repeat_blocks(self):
""" Multiple repeated blocks should be able to
exist and work at the same time.
"""
t = Template(
self.LOAD_STRING + self.DEFINE_RBLOCK_1 +
self.REPEAT_RBLOCK_1 +
self.DEFINE_RBLOCK_2 +
self.REPEAT_RBLOCK_1 +
self.REPEAT_RBLOCK_2 +
self.REPEAT_RBLOCK_1)
c = Context({})
self.assertEqual(t.render(c),
"string1string1string2string1string2string1")
# test exceptions
def test_repeat_coming_before_repeated_block(self):
""" If the repeat tag comes before the repeated
block tag, it should throw an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^No repeated block .+ tag was found before the .+ tag$",
Template,
# call Template on the following string/template
self.LOAD_STRING + self.REPEAT_RBLOCK_1 +
self.DEFINE_RBLOCK_1,)
def test_repeat_having_no_block(self):
""" If repeat is called without a repeated block
definition existing, than repeat should throw an
exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^No repeated block .+ tag was found before the .+ tag$",
Template,
# call Template on the following string/template
self.LOAD_STRING + self.REPEAT_RBLOCK_2)
def test_repeat_having_no_block_of_same_name(self):
""" If repeat is called without a repeated block of
the corresponding name existing, than repeat should
throw an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^No repeated block {0} tag was found before the .+ tag$".format(
self.RBLOCK_2_NAME),
Template,
# call Template on the following string/template
self.LOAD_STRING + self.DEFINE_RBLOCK_1 +
self.REPEAT_RBLOCK_2)
def test_repeated_block_with_no_args(self):
""" repeated_block should throw an exception when
called without arguments.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag takes only one argument$",
Template,
self.LOAD_STRING + self.TOO_FEW_REPEATED_BLOCK)
def test_repeated_block_with_too_many_args(self):
""" repeated_block should throw an exception when
called with too many arguments.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag takes only one argument$",
Template,
self.LOAD_STRING + self.TOO_MANY_REPEATED_BLOCK)
def test_repeat_with_no_args(self):
""" repeat should throw an exception when
called without arguments.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag takes only one argument$",
Template,
self.LOAD_STRING + self.TOO_FEW_REPEAT)
def test_repeat_with_too_many_args(self):
""" repeat should throw an exception when
called with too many arguments.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag takes only one argument$",
Template,
self.LOAD_STRING + self.TOO_MANY_REPEAT)
# Tests for macros.py
from .templatetags.macros import _setup_macros_dict
class MacrosTests(TestCase):
# Template pieces
# (all templates are at the top so they can
# be easily changed/reused
LOAD_MACROS = "{% load macros %}"
# load macros test
TEST_LOADMACROS_TAG = (
"{% loadmacros 'macros/tests/testmacros.html' %}"
"{% use_macro test_macro 'foo' 'bar' %}")
# define a macro
MACRO1_DEFINITION = (
"{% macro macro1 first_arg second_arg first_kwarg=''"
" second_kwarg='default' %}"
"first arg: {{ first_arg }}; "
"second arg: {{ second_arg }}; "
"first_kwarg: {{ first_kwarg }}; "
"second_kwarg: {{ second_kwarg }};"
"{% endmacro %}")
# test default values
USE_MACRO1_WITH_DEFAULTS = (
"{% use_macro macro1 'foo' 'bar' %}")
MACRO1_BLOCK_WITH_DEFAULTS = (
"{% macro_block macro1 %}"
"{% macro_arg %}foo{% endmacro_arg %}"
"{% macro_arg %}bar{% endmacro_arg %}"
"{% endmacro_block %}")
MACRO1_WITH_DEFAULTS_RENDERED = (
"first arg: foo; second arg: bar; "
"first_kwarg: ; second_kwarg: default;")
# test using only one default, overriding the other
USE_MACRO1_WITH_ONE_DEFAULT = (
"{% use_macro macro1 'bar' 'foo' first_kwarg='value' %}")
MACRO1_BLOCK_WITH_ONE_DEFAULT = (
"{% macro_block macro1 %}"
"{% macro_arg %}bar{% endmacro_arg %}"
"{% macro_arg %}foo{% endmacro_arg %}"
"{% macro_kwarg first_kwarg %}value{% endmacro_kwarg %}"
"{% endmacro_block %}")
MACRO1_WITH_ONE_DEFAULT_RENDERED = (
"first arg: bar; second arg: foo; "
"first_kwarg: value; second_kwarg: default;")
# test overriding all defaults
USE_MACRO1_WITH_NO_DEFAULTS = (
"{% use_macro macro1 'one' 'two' "
"first_kwarg='value1' second_kwarg='value2' %}")
MACRO1_BLOCK_WITH_NO_DEFAULTS = (
"{% macro_block macro1 %}"
"{% macro_arg %}one{% endmacro_arg %}"
"{% macro_arg %}two{% endmacro_arg %}"
"{% macro_kwarg first_kwarg %}value1{% endmacro_kwarg %}"
"{% macro_kwarg second_kwarg %}value2{% endmacro_kwarg %}"
"{% endmacro_block %}")
MACRO1_WITH_NO_DEFAULTS_RENDERED = (
"first arg: one; second arg: two; "
"first_kwarg: value1; second_kwarg: value2;")
# test using macro with no args
USE_MACRO1_WITH_NO_ARGS = (
"{% use_macro macro1 %}")
MACRO1_BLOCK_WITH_NO_ARGS = (
"{% macro_block macro1 %}{% endmacro_block %}")
MACRO1_WITH_NO_ARGS_RENDERED = (
"first arg: ; second arg: ; "
"first_kwarg: ; second_kwarg: default;")
# test using macro_block with mixed syntax
MACRO1_BLOCK_WITH_MIXED_SYNTAX = (
"{% macro_block macro1 'a1' first_kwarg='kw1' %}"
"{% macro_arg %}a2{% endmacro_arg %}"
"{% macro_kwarg second_kwarg %}kw2{% endmacro_kwarg %}"
"{% endmacro_block %}")
MACRO1_WITH_MIXED_SYNTAX_RENDERED = (
"first arg: a1; second arg: a2; "
"first_kwarg: kw1; second_kwarg: kw2;")
# Define a second macro (test lexical scoping of args)
MACRO2_DEFINITION = (
"{% macro macro2 first_arg second_arg "
"first_kwarg='one' second_kwarg='two' %}"
"second macro contents:{{ first_arg }},"
"{{ second_arg }},{{ first_kwarg }},"
"{{ second_kwarg }};"
"{% endmacro %}")
USE_MACRO2 = (
"{% use_macro macro2 'first' 'second' "
"first_kwarg='new_one' %}")
MACRO2_BLOCK = (
"{% macro_block macro2 %}"
"{% macro_arg %}first{% endmacro_arg %}"
"{% macro_arg %}second{% endmacro_arg %}"
"{% macro_kwarg first_kwarg %}new_one{% endmacro_kwarg %}"
"{% endmacro_block %}")
MACRO2_RENDERED = (
"second macro contents:first,second,new_one,two;")
# test argument parsing with equals signs in them
USE_MACRO2_WITH_ARG_EQUALS_SIGN = (
"{% use_macro macro2 'a=b' %}")
MACRO2_WITH_ARG_EQUALS_SIGN_RENDERED = (
"second macro contents:a=b,,one,two;")
USE_MACRO2_WITH_KWARG_EQUALS_SIGN = (
'{% use_macro macro2 first_kwarg="a=b" %}')
MACRO2_WITH_KWARG_EQUALS_SIGN_RENDERED = (
'second macro contents:,,a=b,two;')
# test defining a macro with an equals sign in a default argument.
MACRO3_DEFINITION = (
"{% macro macro3 arg kwarg='a=b' %}"
"{{ arg }}{{ kwarg }};"
"{% endmacro %}")
USE_MACRO3 = (
"{% use_macro macro3 %}")
MACRO3_RENDERED = "a=b;"
# test using context variable with macros
USE_MACRO3_WITH_VARIABLE_ARG = (
"{% use_macro macro3 foo kwarg='' %}")
USE_MACRO3_WITH_VARIABLE_KWARG = (
"{% use_macro macro3 kwarg=foo %}")
MACRO3_BLOCK_WITH_VARIABLE_INSIDE = (
"{% macro_block macro3 %}"
"{% macro_kwarg kwarg %}"
"{{ foo }}"
"{% endmacro_kwarg %}"
"{% endmacro_block %}")
MACRO3_WITH_VARIABLE_RENDERED = "bar;"
FOO_VALUE = "bar"
# test using a context variable to define a macro default
MACRO4_DEFINITION = (
"{% macro macro4 kwarg=foo %}"
"{{ kwarg }};"
"{% endmacro %}")
USE_MACRO4_WITH_VALUE = (
"{% use_macro macro4 kwarg='value' %}")
USE_MACRO4_WITHOUT_VALUE = (
"{% use_macro macro4 %}")
MACRO4_WITH_VALUE_RENDERED = (
"value;")
MACRO4_WITHOUT_VALUE_RENDERED = (
"bar;")
# test defining a macro with no args or kwargs
MACRO5_DEFINITION = (
"{% macro macro5 %}"
"contents"
"{% endmacro %}")
USE_MACRO5 = "{% use_macro macro5 %}"
MACRO5_RENDERED = "contents"
# exceptions testing templates are kept in the definition
# of the exceptions tests.
# test functionality
## test _setup_macros_dict
def test_set__setup_macros_dict_sets_variable(self):
""" _setup_macros_dict must set the _macros
variable on the parser.
"""
p = make_parser("A short template")
# check that the parser doesn't initially have the
# _macros attribute
self.assertFalse(hasattr(p, "_macros"))
# check that _setup_macros_dict actually sets the
# attribute.
_setup_macros_dict(p)
self.assertTrue(hasattr(p, "_macros"))
def test_set__setup_macros_dict_doesnt_overwrite(self):
""" _setup_macros_dict must not overwrite the
_macros variable on the parser, if it already
exists.
"""
p = make_parser("A short template")
# set the attribute to a specific dict
dic = {'foo': 'bar', }
p._macros = dic
# call _setup_macros_dict and test that
# the dict hasn't been overwritten.
_setup_macros_dict(p)
self.assertEqual(p._macros, dic)
def test_set__setup_macros_dict_initialize_empty(self):
""" Set repeated blocks must initialize the
the _repeated_blocks variable as an empty dict.
"""
p = make_parser("A short template")
_setup_macros_dict(p)
self.assertEqual(p._macros, {})
## test load macros
#### contents of testmacros.html:
"""
{% load macros %}
{% macro test_macro arg1 arg2 kwarg1="default" %}
arg1: {{ arg1 }};
arg2: {{ arg2 }};
kwarg1: {{ kwarg1 }};
{% endmacro %}
"""
def test_load_macros_works(self):
""" Load macros should make the test macro
available to the template.
"""
p = make_parser(self.LOAD_MACROS + self.TEST_LOADMACROS_TAG)
# parse the template to run the template tags
nodelist = p.parse()
# check that the macro is added to the parser
self.assertTrue(hasattr(p, "_macros"))
self.assertIn("test_macro", p._macros)
# render nodelist
c = Context({})
rendered_template = nodelist.render(c)
# check that the macro renders in the new template
self.assertEqual(rendered_template,
"arg1: foo;arg2: bar;kwarg1: default;")
## test macro, use_macro, and macro_block
def test_macro_sets_in_parser(self):
""" check that the macro tag actually sets
the node in the parser.
"""
p = make_parser(self.LOAD_MACROS + self.MACRO1_DEFINITION)
# parse the template to run the template tags
nodelist = p.parse()
# check that the macro is added to the parser
self.assertTrue(hasattr(p, "_macros"))
self.assertIn("macro1", p._macros)
def test_use_macro_with_defaults(self):
""" make sure that the use_macro tag uses default
values for kwargs when values aren't supplied
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.USE_MACRO1_WITH_DEFAULTS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_DEFAULTS_RENDERED)
def test_macro_block_with_defaults(self):
""" make sure that the macro_block tag uses default
values for kwargs when values aren't supplied
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO1_BLOCK_WITH_DEFAULTS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_DEFAULTS_RENDERED)
def test_use_macro_with_one_default(self):
""" make sure that the use_macro tag uses one default
value for a kwarg when value isn't supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.USE_MACRO1_WITH_ONE_DEFAULT)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_ONE_DEFAULT_RENDERED)
def test_macro_block_with_one_default(self):
""" make sure that the macro_block tag uses one default
value for a kwarg when value isn't supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO1_BLOCK_WITH_ONE_DEFAULT)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_ONE_DEFAULT_RENDERED)
def test_use_macro_with_no_defaults(self):
""" make sure that the use_macro tag uses no default
values for a kwarg when both values are supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.USE_MACRO1_WITH_NO_DEFAULTS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_NO_DEFAULTS_RENDERED)
def test_macro_block_with_no_defaults(self):
""" make sure that the macro_block tag uses no default
values for kwargs when both values are supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO1_BLOCK_WITH_NO_DEFAULTS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_NO_DEFAULTS_RENDERED)
def test_use_macro_with_no_args(self):
""" make sure that the use_macro tag fails variables
silently when no args are supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.USE_MACRO1_WITH_NO_ARGS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_NO_ARGS_RENDERED)
def test_macro_block_with_no_args(self):
""" make sure that the macro_block tag fails variables
silently when no args are supplied.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO1_BLOCK_WITH_NO_ARGS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_NO_ARGS_RENDERED)
def test_macro_block_with_mixed_syntax(self):
""" make sure that macro_block accepts arguments to the tag itself
as well as child tags.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO1_BLOCK_WITH_MIXED_SYNTAX)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_MIXED_SYNTAX_RENDERED)
def test_lexical_scoping(self):
""" make sure that args and kwargs in macros are lexically
to just that macro.
"""
c = Context({})
# first template test: use_macro with use_macro
# test to look for conflicts between scopes,
# defaults overriding non-defaults across scope,
# and vice versa.
t1 = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO2_DEFINITION + self.USE_MACRO1_WITH_ONE_DEFAULT +
self.USE_MACRO2)
self.assertEqual(t1.render(c),
self.MACRO1_WITH_ONE_DEFAULT_RENDERED + self.MACRO2_RENDERED)
# second template test
# test use_macro with macro_block
t2 = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO2_DEFINITION + self.USE_MACRO1_WITH_ONE_DEFAULT +
self.MACRO2_BLOCK)
self.assertEqual(t2.render(c),
self.MACRO1_WITH_ONE_DEFAULT_RENDERED + self.MACRO2_RENDERED)
# third template test
# test macro_block with use_macro
t3 = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO2_DEFINITION + self.MACRO1_BLOCK_WITH_ONE_DEFAULT +
self.USE_MACRO2)
self.assertEqual(t3.render(c),
self.MACRO1_WITH_ONE_DEFAULT_RENDERED + self.MACRO2_RENDERED)
# fourth template test
# test macro_block with macro_block
t4 = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.MACRO2_DEFINITION + self.MACRO1_BLOCK_WITH_ONE_DEFAULT +
self.MACRO2_BLOCK)
self.assertEqual(t4.render(c),
self.MACRO1_WITH_ONE_DEFAULT_RENDERED + self.MACRO2_RENDERED)
# test a different combination of macro useage
t5 = Template(self.LOAD_MACROS + self.MACRO2_DEFINITION +
self.MACRO1_DEFINITION + self.USE_MACRO1_WITH_NO_ARGS +
self.USE_MACRO2)
self.assertEqual(t5.render(c),
self.MACRO1_WITH_NO_ARGS_RENDERED + self.MACRO2_RENDERED)
def test_use_macro_with_macro_block(self):
""" test that use_macro and macro_block may
be used in the same template.
"""
t = Template(self.LOAD_MACROS + self.MACRO1_DEFINITION +
self.USE_MACRO1_WITH_DEFAULTS + ";" +
self.MACRO1_BLOCK_WITH_DEFAULTS)
c = Context({})
self.assertEqual(t.render(c), self.MACRO1_WITH_DEFAULTS_RENDERED +
";" + self.MACRO1_WITH_DEFAULTS_RENDERED)
def test_define_macro_with_equals_sign(self):
""" test that when a kwarg's default value has an equals
sign, it won't throw a bug.
"""
t = Template(self.LOAD_MACROS + self.MACRO3_DEFINITION +
self.USE_MACRO3)
c = Context({})
self.assertEqual(t.render(c),
self.MACRO3_RENDERED)
def test_arg_with_equals_sign(self):
""" test that when an arg has an equals sign surrounded
by quotes, the arg still parses correctly.
"""
t = Template(self.LOAD_MACROS + self.MACRO2_DEFINITION +
self.USE_MACRO2_WITH_ARG_EQUALS_SIGN)
c = Context({})
self.assertEqual(t.render(c),
self.MACRO2_WITH_ARG_EQUALS_SIGN_RENDERED)
def test_kwarg_with_equals_sign(self):
""" test that when a kwarg is set to a value with an equals
sign in it, that the kwarg still parses correctly.
"""
t = Template(self.LOAD_MACROS + self.MACRO2_DEFINITION +
self.USE_MACRO2_WITH_KWARG_EQUALS_SIGN)
c = Context({})
self.assertEqual(t.render(c),
self.MACRO2_WITH_KWARG_EQUALS_SIGN_RENDERED)
def test_using_context_variable_in_use_macro_arg(self):
""" Use macro is meant to be able to accept context variables
in its args.
"""
t = Template(self.LOAD_MACROS + self.MACRO3_DEFINITION +
self.USE_MACRO3_WITH_VARIABLE_ARG)
c = Context({'foo': self.FOO_VALUE})
self.assertEqual(t.render(c), self.MACRO3_WITH_VARIABLE_RENDERED)
def test_using_context_variable_in_use_macro_kwarg(self):
""" Use macro is meant to be able to accept context variables
in its kwargs.
"""
t = Template(self.LOAD_MACROS + self.MACRO3_DEFINITION +
self.USE_MACRO3_WITH_VARIABLE_KWARG)
c = Context({'foo': self.FOO_VALUE})
self.assertEqual(t.render(c), self.MACRO3_WITH_VARIABLE_RENDERED)
def test_using_context_variable_in_macro_block(self):
""" Macro block is meant to be able to accept context variables
inside it's sub blocks.
"""
t = Template(self.LOAD_MACROS + self.MACRO3_DEFINITION +
self.MACRO3_BLOCK_WITH_VARIABLE_INSIDE)
c = Context({'foo': self.FOO_VALUE})
self.assertEqual(t.render(c), self.MACRO3_WITH_VARIABLE_RENDERED)
def test_using_context_variable_in_defining_macro(self):
""" People should be able to use context variables in defining
default values for templates.
"""
t = Template(self.LOAD_MACROS + self.MACRO4_DEFINITION +
self.USE_MACRO4_WITH_VALUE + self.USE_MACRO4_WITHOUT_VALUE)
c = Context({'foo': self.FOO_VALUE})
self.assertEqual(t.render(c), self.MACRO4_WITH_VALUE_RENDERED +
self.MACRO4_WITHOUT_VALUE_RENDERED)
def test_default_template_variables_set_at_definition(self):
""" when a macro tag uses a template variable to
set a default value for a kwarg, the default value
should be what the context variable was at the definition
of the macro, and so should not change later if the variable
does.
"""
t = Template(self.LOAD_MACROS + self.MACRO4_DEFINITION +
"{% with 'new value' as foo %}" +
self.USE_MACRO4_WITHOUT_VALUE +
"{% endwith %}")
c = Context({'foo': self.FOO_VALUE})
# default value should still be 'bar' or self.FOO_VALUE as
# the variable was at the macro's definition.
self.assertEqual(t.render(c), self.MACRO4_WITHOUT_VALUE_RENDERED)
def test_defining_macro_with_no_args(self):
""" Macros should be useable with no arguments, and just a macro
name.
"""
t = Template(self.LOAD_MACROS + self.MACRO5_DEFINITION +
self.USE_MACRO5)
c = Context({})
self.assertEqual(t.render(c), self.MACRO5_RENDERED)
# test exceptions
def test_macro_with_no_end_tag(self):
""" when the macro tag doesn't have an end tag,
it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Unclosed tag .+\. Looking for one of: .+$",
Template,
self.LOAD_MACROS + "{% macro macro_name %}some text")
def test_macro_with_no_macro_name_exception(self):
""" A macro tag without a macro name should raise
a too few arguments exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires at least one argument \(macro name\)$",
Template,
self.LOAD_MACROS + "{% macro %}{% endmacro %}")
def test_macro_raises_malformed_argument_exception_for_arg(self):
""" A macro tag should raise an exception if an arg
is malformed.
"""
# quotes around the arg definition
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name 'arg' %}"
"{% endmacro %}")
# end quote on the arg definition
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name arg' %}"
"{% endmacro %}")
def test_macro_raises_malformed_argument_exception_for_kwarg(self):
""" A macro tag should raise an exception if a kwarg
is malformed.
"""
# default value not entirely in quotes
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name kw=a'arg' %}"
"{% endmacro %}")
# keyword in quotes
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name 'kw'=aarg %}"
"{% endmacro %}")
def test_macro_raises_variable_missing_exception(self):
""" if a macro tag is called with a default set to
a variable that is not in the context, it should
raise a VariableDoesNotExist error.
"""
# argument only throws error when the template is
# actually rendered, because that is when variables
# are resolved into contexts.
t = Template(self.LOAD_MACROS +
"{% macro some_macro kwarg=foo %}text{% endmacro %}")
self.assertRaises(
template.VariableDoesNotExist,
t.render,
Context({}))
def test_macro_raises_malformed_argument_exception_for_filter(self):
""" If the user attempts to use a filter on an argument,
the macro tag should raise a malformed arguments exception.
"""
# use filter on template variable
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name kwarg=arg|join:\",\" %}"
"{% endmacro %}")
# use filter on hard-coded string
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + "{% macro macro_name kwarg='arg'|join:\",\" %}"
"{% endmacro %}")
def test_load_macros_raises_no_arguments_exception(self):
""" If the loadmacros tag is called without a filename,
it should raise a template syntax error.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires exactly one argument \(filename\)$",
Template,
self.LOAD_MACROS + "{% loadmacros %}")
def test_load_macros_raises_for_too_many_arguments(self):
""" If the loadmacros tag is called with two or more
arguments, it should raise an error.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires exactly one argument \(filename\)$",
Template,
self.LOAD_MACROS +
"{% loadmacros 'macros/tests/testmacros.html' "
"'macros/tests/testmacros.html' %}")
def test_load_macros_malformed_arguments_exception(self):
""" if the loadmacros tag's filename argument is not
wrapped in quotes, then the tag should raise a
template syntax error.
"""
# malformed argument: no quotes
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed argument to the .+ template tag. "
"Argument must be in quotes.$",
Template,
self.LOAD_MACROS +
"{% loadmacros macros/tests/testmacros.html %}")
# malformed argument: mismatched quotes ("')
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed argument to the .+ template tag. "
"Argument must be in quotes.$",
Template,
self.LOAD_MACROS +
"{% loadmacros \"macros/tests/testmacros.html' %}")
# malformed argument: mismatched quotes ('")
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed argument to the .+ template tag. "
"Argument must be in quotes.$",
Template,
self.LOAD_MACROS +
"{% loadmacros 'macros/tests/testmacros.html\" %}")
# malformed argument: only one quote
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed argument to the .+ template tag. "
"Argument must be in quotes.$",
Template,
self.LOAD_MACROS +
"{% loadmacros 'macros/tests/testmacros.html %}")
def test_use_macro_with_no_macro_name(self):
""" if use_macro is called without any arguments, it
raises an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires at least one argument \(macro name\)$",
Template,
self.LOAD_MACROS + "{% use_macro %}")
def test_use_macro_without_macro_definition(self):
""" if use_macro is called without a macro definition
or with the macro definition after use_macro, then it
should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Macro .+ is not defined previously to the .+ tag$",
Template,
self.LOAD_MACROS + "{% use_macro macro_name %}")
def test_use_macro_before_macro_definition(self):
""" if use_macro comes before the definition of the macro
it uses, then it should throw an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Macro .+ is not defined previously to the .+ tag$",
Template,
self.LOAD_MACROS + self.USE_MACRO2 +
self.MACRO2_DEFINITION)
def test_use_macro_with_malformed_arguments(self):
""" if use_macro is passed malformed arguments, it should
raise an exception.
"""
# malformed arg
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% use_macro macro3 'foo'o %}")
# malformed kwarg
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Malformed arguments to the .+ tag.$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% use_macro macro3 kwar'g'='a=b' %}")
def test_macro_block_with_no_macro_name(self):
""" if macro_block is called without a macro_name, it should
raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"macro_block tag requires at least one argument \(macro name\)",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block %}{% endmacro_block %}")
def test_macro_block_before_macro_definition(self):
""" if macro_block is called before a macro's definition,
it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Macro .+ is not defined previously to the .+ tag$",
Template,
self.LOAD_MACROS + "{% macro_block macro3 %}{% endmacro_block %}"
+ self.MACRO3_DEFINITION)
def test_macro_block_with_no_macro_definition(self):
""" if macro_block is called on a macro that hasn't
been defined, then it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^Macro .+ is not defined previously to the .+ tag$",
Template,
self.LOAD_MACROS + "{% macro_block macro3 %}{% endmacro_block %}")
def test_macro_block_with_repeated_keyword(self):
""" if the macro_block is passed the same keyword
argument twice, it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ template tag was supplied "
r"the same keyword argument multiple times.$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block macro3 %}"
"{% macro_kwarg kwarg %}"
"contents"
"{% endmacro_kwarg %}"
"{% macro_kwarg kwarg %}"
"values"
"{% endmacro_kwarg %}"
"{% endmacro_block %}")
def test_macro_block_with_undefined_keyword(self):
""" if macro_block is called with a keyword
argument not defined in its macro, it should raise
an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ template tag was supplied with a "
r"keyword argument not defined by the .+ macro.$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block macro3 %}"
"{% macro_kwarg foo %}"
"contents"
"{% endmacro_kwarg %}"
"{% endmacro_block %}")
## removed these tests because I've removed the validation that prevents white
## space/text/template tags in the macro blocks.
##
## def test_macro_block_with_text(self):
## """ if macro_block is called with text inside it
## not wrapped in an arg or kwarg tag, it should raise
## an exception.
## """
## self.assertRaisesRegexp(
## template.TemplateSyntaxError,
## r"^.+ template tag received an argument that "
## r"is neither a arg or a kwarg tag. Make sure there's "
## r"text or template tags directly descending from the .+ tag.$",
## Template,
## self.LOAD_MACROS + self.MACRO3_DEFINITION +
## "{% macro_block macro3 %}"
## "some text outside a tag"
## "{% macro_kwarg kwarg %}"
## "contents"
## "{% endmacro_kwarg %}"
## "{% endmacro_block %}")
##
## def test_macro_block_with_bad_node(self):
## """ if macro_block is called with a template tag as
## a direct descendent of it that is not an arg or
## kwarg tag, it should raise an exception.
## """
## self.assertRaisesRegexp(
## template.TemplateSyntaxError,
## r"^.+ template tag received an argument that "
## r"is neither a arg or a kwarg tag. Make sure there's "
## r"text or template tags directly descending from the .+ tag.$",
## Template,
## self.LOAD_MACROS + self.MACRO3_DEFINITION +
## "{% macro_block macro3 %}"
## "{% if True %}{% endif %}"
## "{% macro_kwarg kwarg %}"
## "contents"
## "{% endmacro_kwarg %}"
## "{% endmacro_block %}")
def test_macro_block_with_too_many_args(self):
""" if macro_block is called with more args than
defined in its macro, it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ template tag was supplied too many "
r"arg block tags.$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block macro3 %}"
"{% macro_arg %}"
"contents one"
"{% endmacro_arg %}"
"{% macro_arg %}"
"contents two"
"{% endmacro_arg %}"
"{% endmacro_block %}")
def test_macro_kwarg_with_too_few_arguments(self):
""" if macro_kwarg tag is called with too few arguments,
it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires exactly one argument, a keyword$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block macro3 %}"
"{% macro_kwarg %}"
"contents"
"{% endmacro_kwarg %}"
"{% endmacro_block %}")
def test_macro_kwarg_with_too_many_arguments(self):
""" if macro_kwarg tag is called with too many arguments,
it should raise an exception.
"""
self.assertRaisesRegexp(
template.TemplateSyntaxError,
r"^.+ tag requires exactly one argument, a keyword$",
Template,
self.LOAD_MACROS + self.MACRO3_DEFINITION +
"{% macro_block macro3 %}"
"{% macro_kwarg kwarg kwarg %}"
"contents"
"{% endmacro_kwarg %}"
"{% endmacro_block %}")
|
{
"content_hash": "3811e2fe8ae3a6b856fb891ec9a873cf",
"timestamp": "",
"source": "github",
"line_count": 1052,
"max_line_length": 78,
"avg_line_length": 39.79277566539924,
"alnum_prop": 0.5780421384549234,
"repo_name": "nalourie/django-macros",
"id": "8e811e0a7187bc5eada6621d692b9cd192ee07f5",
"size": "41862",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test_project_python27/test_project_python27/macros/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "540"
},
{
"name": "Python",
"bytes": "242463"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mybbs', '0003_auto_20150731_1504'),
]
operations = [
migrations.AlterField(
model_name='bbs',
name='cai_id',
field=models.TextField(default='x'),
preserve_default=True,
),
migrations.AlterField(
model_name='bbs',
name='zan_id',
field=models.TextField(default='x'),
preserve_default=True,
),
]
|
{
"content_hash": "54d0651d1f1f4c658359acd7cd172100",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 48,
"avg_line_length": 23.56,
"alnum_prop": 0.5449915110356537,
"repo_name": "oliver1996/BLEACH_BBS",
"id": "5fcc1533fa389345cbd4ae5ed71bb866ec446e3c",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mybbs/migrations/0004_auto_20150731_1542.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
}
|
from setuptools import find_packages, setup
import os
import re
# Recommendations from https://packaging.python.org/
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
def read(*parts):
with open(os.path.join(here, *parts), "r") as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name="e3nn_jax",
version=find_version("e3nn_jax", "__init__.py"),
description="Equivariant convolutional neural networks "
"for the group E(3) of 3 dimensional rotations, translations, and mirrors.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://e3nn.org",
packages=find_packages(exclude=["examples"]),
install_requires=[
"jax",
"dm-haiku",
"optax",
"sympy",
"numpy",
],
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
license="MIT",
license_files=["LICENSE"],
)
|
{
"content_hash": "ac3ca76295cbd624952ddbb355d216ec",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 88,
"avg_line_length": 29.6,
"alnum_prop": 0.6142506142506142,
"repo_name": "e3nn/e3nn-jax",
"id": "11ad0a9a43903a87bbf266523e81dc4d18726622",
"size": "1628",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "781"
},
{
"name": "MATLAB",
"bytes": "18740"
},
{
"name": "Python",
"bytes": "367391"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('campaigns', '0002_auto_20150318_1926'),
]
operations = [
migrations.AlterField(
model_name='campaignpixel',
name='pixel_type',
field=models.IntegerField(default=0, choices=[(0, b'Listing'), (1, b'Detail')]),
),
]
|
{
"content_hash": "d73d9f4516bc073ba89cb84d35dccf5b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 92,
"avg_line_length": 24.055555555555557,
"alnum_prop": 0.5981524249422633,
"repo_name": "theonion/django-bulbs",
"id": "e2ec33c1770ef8f9225b964e9f61e387c4528eec",
"size": "457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bulbs/campaigns/migrations/0003_auto_20150528_1434.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "36651"
},
{
"name": "HTML",
"bytes": "73968"
},
{
"name": "JavaScript",
"bytes": "57288"
},
{
"name": "Python",
"bytes": "1055540"
},
{
"name": "Ruby",
"bytes": "397"
},
{
"name": "Shell",
"bytes": "1629"
}
],
"symlink_target": ""
}
|
import datetime
import calendar
import pandas as pd
import numpy as np
import zipline.finance.risk as risk
from zipline.utils import factory
from zipline.finance.trading import SimulationParameters
from zipline.testing.fixtures import WithTradingEnvironment, ZiplineTestCase
from zipline.finance.risk.period import RiskMetricsPeriod
RETURNS_BASE = 0.01
RETURNS = [RETURNS_BASE] * 251
BENCHMARK_BASE = 0.005
BENCHMARK = [BENCHMARK_BASE] * 251
DECIMAL_PLACES = 8
class TestRisk(WithTradingEnvironment, ZiplineTestCase):
def init_instance_fixtures(self):
super(TestRisk, self).init_instance_fixtures()
self.start_session = pd.Timestamp("2006-01-01", tz='UTC')
self.end_session = self.trading_calendar.minute_to_session_label(
pd.Timestamp("2006-12-31", tz='UTC'),
direction="previous"
)
self.sim_params = SimulationParameters(
start_session=self.start_session,
end_session=self.end_session,
trading_calendar=self.trading_calendar,
)
self.algo_returns = factory.create_returns_from_list(
RETURNS,
self.sim_params
)
self.benchmark_returns = factory.create_returns_from_list(
BENCHMARK,
self.sim_params
)
self.metrics = risk.RiskReport(
self.algo_returns,
self.sim_params,
benchmark_returns=self.benchmark_returns,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
)
def test_factory(self):
returns = [0.1] * 100
r_objects = factory.create_returns_from_list(returns, self.sim_params)
self.assertTrue(r_objects.index[-1] <=
pd.Timestamp('2006-12-31', tz='UTC'))
def test_drawdown(self):
np.testing.assert_equal(
all(x.max_drawdown == 0 for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(x.max_drawdown == 0 for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(x.max_drawdown == 0 for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(x.max_drawdown == 0 for x in self.metrics.year_periods),
True)
def test_benchmark_returns_06(self):
np.testing.assert_almost_equal(
[x.benchmark_period_returns
for x in self.metrics.month_periods],
[(1 + BENCHMARK_BASE) ** len(x.benchmark_returns) - 1
for x in self.metrics.month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.benchmark_period_returns
for x in self.metrics.three_month_periods],
[(1 + BENCHMARK_BASE) ** len(x.benchmark_returns) - 1
for x in self.metrics.three_month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.benchmark_period_returns
for x in self.metrics.six_month_periods],
[(1 + BENCHMARK_BASE) ** len(x.benchmark_returns) - 1
for x in self.metrics.six_month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.benchmark_period_returns
for x in self.metrics.year_periods],
[(1 + BENCHMARK_BASE) ** len(x.benchmark_returns) - 1
for x in self.metrics.year_periods],
DECIMAL_PLACES)
def test_trading_days(self):
self.assertEqual([x.num_trading_days
for x in self.metrics.year_periods],
[251])
self.assertEqual([x.num_trading_days
for x in self.metrics.month_periods],
[20, 19, 23, 19, 22, 22, 20, 23, 20, 22, 21, 20])
def test_benchmark_volatility(self):
# Volatility is calculated by a empyrical function so testing
# of period volatility will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.benchmark_volatility, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.benchmark_volatility, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.benchmark_volatility, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.benchmark_volatility, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_returns(self):
np.testing.assert_almost_equal(
[x.algorithm_period_returns
for x in self.metrics.month_periods],
[(1 + RETURNS_BASE) ** len(x.algorithm_returns) - 1
for x in self.metrics.month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.algorithm_period_returns
for x in self.metrics.three_month_periods],
[(1 + RETURNS_BASE) ** len(x.algorithm_returns) - 1
for x in self.metrics.three_month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.algorithm_period_returns
for x in self.metrics.six_month_periods],
[(1 + RETURNS_BASE) ** len(x.algorithm_returns) - 1
for x in self.metrics.six_month_periods],
DECIMAL_PLACES)
np.testing.assert_almost_equal(
[x.algorithm_period_returns
for x in self.metrics.year_periods],
[(1 + RETURNS_BASE) ** len(x.algorithm_returns) - 1
for x in self.metrics.year_periods],
DECIMAL_PLACES)
def test_algorithm_volatility(self):
# Volatility is calculated by a empyrical function so testing
# of period volatility will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.algorithm_volatility, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.algorithm_volatility, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.algorithm_volatility, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.algorithm_volatility, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_sharpe(self):
# The sharpe ratio is calculated by a empyrical function so testing
# of period sharpe ratios will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.sharpe, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sharpe, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sharpe, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sharpe, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_downside_risk(self):
# Downside risk is calculated by a empyrical function so testing
# of period downside risk will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.downside_risk, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.downside_risk, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.downside_risk, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.downside_risk, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_sortino(self):
# The sortino ratio is calculated by a empyrical function so testing
# of period sortino ratios will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.sortino, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sortino, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sortino, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.sortino, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_beta(self):
# Beta is calculated by a empyrical function so testing
# of period beta will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.beta, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.beta, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.beta, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.beta, float)
for x in self.metrics.year_periods),
True)
def test_algorithm_alpha(self):
# Alpha is calculated by a empyrical function so testing
# of period alpha will be limited to determine if the value is
# numerical. This tests for its existence and format.
np.testing.assert_equal(
all(isinstance(x.alpha, float)
for x in self.metrics.month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.alpha, float)
for x in self.metrics.three_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.alpha, float)
for x in self.metrics.six_month_periods),
True)
np.testing.assert_equal(
all(isinstance(x.alpha, float)
for x in self.metrics.year_periods),
True)
def test_treasury_returns(self):
returns = factory.create_returns_from_range(self.sim_params)
metrics = risk.RiskReport(returns, self.sim_params,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
benchmark_returns=self.env.benchmark_returns)
self.assertEqual([round(x.treasury_period_return, 4)
for x in metrics.month_periods],
[0.0037,
0.0034,
0.0039,
0.0038,
0.0040,
0.0037,
0.0043,
0.0043,
0.0038,
0.0044,
0.0043,
0.004])
self.assertEqual([round(x.treasury_period_return, 4)
for x in metrics.three_month_periods],
[0.0114,
0.0116,
0.0122,
0.0125,
0.0129,
0.0127,
0.0123,
0.0128,
0.0125,
0.0127])
self.assertEqual([round(x.treasury_period_return, 4)
for x in metrics.six_month_periods],
[0.0260,
0.0257,
0.0258,
0.0252,
0.0259,
0.0256,
0.0257])
self.assertEqual([round(x.treasury_period_return, 4)
for x in metrics.year_periods],
[0.0500])
def test_benchmarkrange(self):
start_session = self.trading_calendar.minute_to_session_label(
pd.Timestamp("2008-01-01", tz='UTC')
)
end_session = self.trading_calendar.minute_to_session_label(
pd.Timestamp("2010-01-01", tz='UTC'), direction="previous"
)
sim_params = SimulationParameters(
start_session=start_session,
end_session=end_session,
trading_calendar=self.trading_calendar,
)
returns = factory.create_returns_from_range(sim_params)
metrics = risk.RiskReport(returns, self.sim_params,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
benchmark_returns=self.env.benchmark_returns)
self.check_metrics(metrics, 24, start_session)
def test_partial_month(self):
start_session = self.trading_calendar.minute_to_session_label(
pd.Timestamp("1993-02-01", tz='UTC')
)
# 1992 and 1996 were leap years
total_days = 365 * 5 + 2
end_session = start_session + datetime.timedelta(days=total_days)
sim_params90s = SimulationParameters(
start_session=start_session,
end_session=end_session,
trading_calendar=self.trading_calendar,
)
returns = factory.create_returns_from_range(sim_params90s)
returns = returns[:-10] # truncate the returns series to end mid-month
metrics = risk.RiskReport(returns, sim_params90s,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
benchmark_returns=self.env.benchmark_returns)
total_months = 60
self.check_metrics(metrics, total_months, start_session)
def check_metrics(self, metrics, total_months, start_date):
"""
confirm that the right number of riskmetrics were calculated for each
window length.
"""
self.assert_range_length(
metrics.month_periods,
total_months,
1,
start_date
)
self.assert_range_length(
metrics.three_month_periods,
total_months,
3,
start_date
)
self.assert_range_length(
metrics.six_month_periods,
total_months,
6,
start_date
)
self.assert_range_length(
metrics.year_periods,
total_months,
12,
start_date
)
def assert_last_day(self, period_end):
# 30 days has september, april, june and november
if period_end.month in [9, 4, 6, 11]:
self.assertEqual(period_end.day, 30)
# all the rest have 31, except for february
elif(period_end.month != 2):
self.assertEqual(period_end.day, 31)
else:
if calendar.isleap(period_end.year):
self.assertEqual(period_end.day, 29)
else:
self.assertEqual(period_end.day, 28)
def assert_month(self, start_month, actual_end_month):
if start_month == 1:
expected_end_month = 12
else:
expected_end_month = start_month - 1
self.assertEqual(expected_end_month, actual_end_month)
def assert_range_length(self, col, total_months,
period_length, start_date):
if (period_length > total_months):
self.assertEqual(len(col), 0)
else:
self.assertEqual(
len(col),
total_months - (period_length - 1),
"mismatch for total months - \
expected:{total_months}/actual:{actual}, \
period:{period_length}, start:{start_date}, \
calculated end:{end}".format(total_months=total_months,
period_length=period_length,
start_date=start_date,
end=col[-1]._end_session,
actual=len(col))
)
self.assert_month(start_date.month, col[-1]._end_session.month)
self.assert_last_day(col[-1]._end_session)
def test_algorithm_leverages(self):
# Max leverage for an algorithm with 'None' as leverage is 0.
np.testing.assert_equal(
[x.max_leverage for x in self.metrics.month_periods],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
np.testing.assert_equal(
[x.max_leverage for x in self.metrics.three_month_periods],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
np.testing.assert_equal(
[x.max_leverage for x in self.metrics.six_month_periods],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
np.testing.assert_equal(
[x.max_leverage for x in self.metrics.year_periods],
[0.0])
def test_returns_beyond_treasury(self):
# The last treasury value is used when return dates go beyond
# treasury curve data
treasury_curves = self.env.treasury_curves
treasury = treasury_curves[treasury_curves.index < self.start_session]
test_period = RiskMetricsPeriod(
start_session=self.start_session,
end_session=self.end_session,
returns=self.algo_returns,
benchmark_returns=self.benchmark_returns,
trading_calendar=self.trading_calendar,
treasury_curves=treasury,
algorithm_leverages=[.01, .02, .03]
)
assert test_period.treasury_curves.equals(treasury[-1:])
# This return period has a list instead of None for algorithm_leverages
# Confirm that max_leverage is set to the max of those values
assert test_period.max_leverage == .03
def test_index_mismatch_exception(self):
# An exception is raised when returns and benchmark returns
# have indexes that do not match
bench_params = SimulationParameters(
start_session=pd.Timestamp("2006-02-01", tz='UTC'),
end_session=pd.Timestamp("2006-02-28", tz='UTC'),
trading_calendar=self.trading_calendar,
)
benchmark = factory.create_returns_from_list(
[BENCHMARK_BASE]*19,
bench_params
)
with np.testing.assert_raises(Exception):
RiskMetricsPeriod(
start_session=self.start_session,
end_session=self.end_session,
returns=self.algo_returns,
benchmark_returns=benchmark,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
)
def test_sharpe_value_when_null(self):
# Sharpe is displayed as '0.0' instead of np.nan
null_returns = factory.create_returns_from_list(
[0.0]*251,
self.sim_params
)
test_period = RiskMetricsPeriod(
start_session=self.start_session,
end_session=self.end_session,
returns=null_returns,
benchmark_returns=self.benchmark_returns,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
)
assert test_period.sharpe == 0.0
def test_representation(self):
test_period = RiskMetricsPeriod(
start_session=self.start_session,
end_session=self.end_session,
returns=self.algo_returns,
benchmark_returns=self.benchmark_returns,
trading_calendar=self.trading_calendar,
treasury_curves=self.env.treasury_curves,
)
metrics = [
"algorithm_period_returns",
"benchmark_period_returns",
"excess_return",
"num_trading_days",
"benchmark_volatility",
"algorithm_volatility",
"sharpe",
"sortino",
"beta",
"alpha",
"max_drawdown",
"max_leverage",
"algorithm_returns",
"benchmark_returns",
]
representation = repr(test_period)
assert all(metric in representation for metric in metrics)
|
{
"content_hash": "a4de8bbb10a6071faa86ece705775784",
"timestamp": "",
"source": "github",
"line_count": 547,
"max_line_length": 79,
"avg_line_length": 39.44058500914077,
"alnum_prop": 0.5518216371558358,
"repo_name": "bartosh/zipline",
"id": "4ed87f476d21850bceacc69d8ad695a3d96c88f6",
"size": "22157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/risk/test_risk_period.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7014"
},
{
"name": "Emacs Lisp",
"bytes": "138"
},
{
"name": "Jupyter Notebook",
"bytes": "162383"
},
{
"name": "PowerShell",
"bytes": "3269"
},
{
"name": "Python",
"bytes": "3611405"
},
{
"name": "Shell",
"bytes": "7420"
}
],
"symlink_target": ""
}
|
import collections
import os
import re
import numpy
from . import spectra_reader
ENVI_TO_NUMPY_DTYPE = {'1': numpy.uint8,
'2': numpy.int16,
'3': numpy.int32,
'4': numpy.float32,
'5': numpy.float64,
'6': numpy.complex64,
'9': numpy.complex128,
'12': numpy.uint16,
'13': numpy.uint32,
'14': numpy.int64,
'15': numpy.uint64}
class ENVIFormat(spectra_reader.SpectraReader):
"""
Reader for ENVI spectral library.
"""
def read_hdr_file(self, rawfilename):
"""
Read information from ENVI header file to a dictionary.
"""
# Get the filename without path or extension
filename = os.path.basename(rawfilename)
filesplit = os.path.splitext(filename)
filebase = filesplit[0]
dirname = os.path.dirname(rawfilename)
# See if we can find the header file to use
if os.path.isfile(os.path.join(dirname, filebase + '.hdr')):
hdrfilename = os.path.join(dirname, filebase + '.hdr')
elif os.path.isfile(os.path.join(dirname, filename + '.hdr')):
hdrfilename = os.path.join(dirname, filename + '.hdr')
else:
raise IOError('Could not find coresponding header file')
hdrfile = open(hdrfilename, 'r')
output = collections.OrderedDict()
inblock = False
# Read line, split it on equals, strip whitespace from resulting strings
# and add key/value pair to output
for currentline in hdrfile:
# ENVI headers accept blocks bracketed by curly braces - check for these
if not inblock:
# Split line on first equals sign
if re.search('=', currentline) is not None:
linesplit = re.split('=', currentline, 1)
# Convert all values to lower case
key = linesplit[0].strip().lower()
value = linesplit[1].strip()
# If value starts with an open brace, it's the start of a block
# - strip the brace off and read the rest of the block
if re.match('{', value) is not None:
inblock = True
value = re.sub('^{', '', value, 1)
# If value ends with a close brace it's the end
# of the block as well - strip the brace off
if re.search('}$', value):
inblock = False
value = re.sub('}$', '', value, 1)
value = value.strip()
output[key] = value
else:
# If we're in a block, just read the line, strip whitespace
# (and any closing brace ending the block) and add the whole thing
value = currentline.strip()
if re.search('}$', value):
inblock = False
value = re.sub('}$', '', value, 1)
value = value.strip()
output[key] = output[key] + value
hdrfile.close()
return output
def get_spectra(self, filename, spectra_number=1):
"""
Extracts spectra from ENVI file. To get a list of all spectra within
a file use 'print_spectra_names'.
Requires:
* filename
* spectra_number - multiple spectra are often present in the same file. Use to specify required spectra.
Returns:
* Spectra object with values, radiance, pixel and line
"""
in_header = self.read_hdr_file(filename)
# Get samples lines and data type
lines = int(in_header['lines'])
samples = int(in_header['samples'])
data_type = in_header['data type']
byte_order = int(in_header['byte order'])
# Get wavelengths as NumPy array.
wavelengths = in_header['wavelength'].split(',')
wavelengths = [float(w) for w in wavelengths]
wavelengths = numpy.array(wavelengths)
# Read to numpy array
data = numpy.fromfile(filename,
dtype=ENVI_TO_NUMPY_DTYPE[data_type])
if byte_order == 0:
data = data.reshape((lines, samples))
else:
data = data.byteswap()
data = data.reshape((lines, samples))
reflectance = data[spectra_number-1,:]
self.spectra.file_name = filename
self.spectra.wavelengths = wavelengths
self.spectra.values = reflectance
if in_header['wavelength units'].lower() == 'micrometers':
self.spectra.wavelength_units = 'um'
else:
self.spectra.wavelength_units = 'nm'
self.spectra.value_units = 'reflectance'
try:
scale_factor = float(in_header['reflectance scale factor'])
self.spectra.value_scaling = scale_factor
except KeyError:
self.spectra.value_scaling = 1
return self.spectra
def print_spectra_names(self, filename):
"""
Prints the names of spectra within a spectral library and the
coresponding number, which can be used in 'get_spectra'
"""
in_header = self.read_hdr_file(filename)
spectra_names = in_header['spectra names']
for i, name in enumerate(spectra_names.split(',')):
print("{:0>3}: {}".format(i+1, name.strip()))
|
{
"content_hash": "720a9ca4ced9be6e362b4239dcdfbf6d",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 112,
"avg_line_length": 36.1025641025641,
"alnum_prop": 0.5330255681818182,
"repo_name": "pmlrsg/PySpectra",
"id": "c24277ba7f21b666a52458566c04a5d325fa03a6",
"size": "5891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PySpectra/envi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "140698"
},
{
"name": "Standard ML",
"bytes": "35124"
}
],
"symlink_target": ""
}
|
""" Cisco_IOS_XR_tty_server_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR tty\-server package operational data.
This module contains definitions
for the following management objects\:
tty\: TTY Line Configuration
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class LineStateEnum(Enum):
"""
LineStateEnum
Line state
.. data:: none = 0
Line not connected
.. data:: registered = 1
Line registered
.. data:: in_use = 2
Line active and in use
"""
none = 0
registered = 1
in_use = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['LineStateEnum']
class SessionOperationEnum(Enum):
"""
SessionOperationEnum
Session operation
.. data:: none = 0
No sessions on the line
.. data:: setup = 1
Session getting set up
.. data:: shell = 2
Session active with a shell
.. data:: transitioning = 3
Session in transitioning phase
.. data:: packet = 4
Session ready to receive packets
"""
none = 0
setup = 1
shell = 2
transitioning = 3
packet = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['SessionOperationEnum']
class Tty(object):
"""
TTY Line Configuration
.. attribute:: auxiliary_nodes
List of Nodes attached with an auxiliary line
**type**\: :py:class:`AuxiliaryNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes>`
.. attribute:: console_nodes
List of Nodes for console
**type**\: :py:class:`ConsoleNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes>`
.. attribute:: vty_lines
List of VTY lines
**type**\: :py:class:`VtyLines <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.auxiliary_nodes = Tty.AuxiliaryNodes()
self.auxiliary_nodes.parent = self
self.console_nodes = Tty.ConsoleNodes()
self.console_nodes.parent = self
self.vty_lines = Tty.VtyLines()
self.vty_lines.parent = self
class ConsoleNodes(object):
"""
List of Nodes for console
.. attribute:: console_node
Console line configuration on a node
**type**\: list of :py:class:`ConsoleNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.console_node = YList()
self.console_node.parent = self
self.console_node.name = 'console_node'
class ConsoleNode(object):
"""
Console line configuration on a node
.. attribute:: id <key>
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: console_line
Console line
**type**\: :py:class:`ConsoleLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.id = None
self.console_line = Tty.ConsoleNodes.ConsoleNode.ConsoleLine()
self.console_line.parent = self
class ConsoleLine(object):
"""
Console line
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration>`
.. attribute:: console_statistics
Statistics of the console line
**type**\: :py:class:`ConsoleStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.configuration = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration()
self.configuration.parent = self
self.console_statistics = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics()
self.console_statistics.parent = self
self.state = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State()
self.state.parent = self
class ConsoleStatistics(object):
"""
Statistics of the console line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics>`
.. attribute:: rs232
RS232 statistics of console line
**type**\: :py:class:`Rs232 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa()
self.aaa.parent = self
self.exec_ = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics()
self.general_statistics.parent = self
self.rs232 = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232()
self.rs232.parent = self
class Rs232(object):
"""
RS232 statistics of console line
.. attribute:: baud_rate
Inbound/Outbound baud rate in bps
**type**\: int
**range:** 0..4294967295
**units**\: bit/s
.. attribute:: data_bits
Number of databits
**type**\: int
**range:** 0..4294967295
**units**\: bit
.. attribute:: exec_disabled
Exec disabled on TTY
**type**\: bool
.. attribute:: framing_error_count
Framing error count
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_flow_control_status
Hardware flow control status
**type**\: int
**range:** 0..4294967295
.. attribute:: overrun_error_count
Overrun error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_error_count
Parity error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_status
Parity status
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_bits
Number of stopbits
**type**\: int
**range:** 0..4294967295
**units**\: bit
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.baud_rate = None
self.data_bits = None
self.exec_disabled = None
self.framing_error_count = None
self.hardware_flow_control_status = None
self.overrun_error_count = None
self.parity_error_count = None
self.parity_status = None
self.stop_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:rs232'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.baud_rate is not None:
return True
if self.data_bits is not None:
return True
if self.exec_disabled is not None:
return True
if self.framing_error_count is not None:
return True
if self.hardware_flow_control_status is not None:
return True
if self.overrun_error_count is not None:
return True
if self.parity_error_count is not None:
return True
if self.parity_status is not None:
return True
if self.stop_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Rs232']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:console-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.aaa is not None and self.aaa._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
if self.rs232 is not None and self.rs232._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.ConsoleStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General()
self.general.parent = self
self.template = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine.Configuration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:console-line'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.configuration is not None and self.configuration._has_data():
return True
if self.console_statistics is not None and self.console_statistics._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode.ConsoleLine']['meta_info']
@property
def _common_path(self):
if self.id is None:
raise YPYModelError('Key property id is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:console-nodes/Cisco-IOS-XR-tty-server-oper:console-node[Cisco-IOS-XR-tty-server-oper:id = ' + str(self.id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.id is not None:
return True
if self.console_line is not None and self.console_line._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes.ConsoleNode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:console-nodes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.console_node is not None:
for child_ref in self.console_node:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.ConsoleNodes']['meta_info']
class VtyLines(object):
"""
List of VTY lines
.. attribute:: vty_line
VTY Line
**type**\: list of :py:class:`VtyLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vty_line = YList()
self.vty_line.parent = self
self.vty_line.name = 'vty_line'
class VtyLine(object):
"""
VTY Line
.. attribute:: line_number <key>
VTY Line number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration>`
.. attribute:: sessions
Outgoing sessions
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State>`
.. attribute:: vty_statistics
Statistics of the VTY line
**type**\: :py:class:`VtyStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.line_number = None
self.configuration = Tty.VtyLines.VtyLine.Configuration()
self.configuration.parent = self
self.sessions = Tty.VtyLines.VtyLine.Sessions()
self.sessions.parent = self
self.state = Tty.VtyLines.VtyLine.State()
self.state.parent = self
self.vty_statistics = Tty.VtyLines.VtyLine.VtyStatistics()
self.vty_statistics.parent = self
class VtyStatistics(object):
"""
Statistics of the VTY line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Aaa>`
.. attribute:: connection
Connection related statistics
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Connection>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.VtyLines.VtyLine.VtyStatistics.Aaa()
self.aaa.parent = self
self.connection = Tty.VtyLines.VtyLine.VtyStatistics.Connection()
self.connection.parent = self
self.exec_ = Tty.VtyLines.VtyLine.VtyStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics()
self.general_statistics.parent = self
class Connection(object):
"""
Connection related statistics
.. attribute:: host_address_family
Incoming host address family
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_host_address
Incoming host address(max)
**type**\: str
**length:** 0..46
.. attribute:: service
Input transport
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.host_address_family = None
self.incoming_host_address = None
self.service = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.host_address_family is not None:
return True
if self.incoming_host_address is not None:
return True
if self.service is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Connection']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:vty-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.aaa is not None and self.aaa._has_data():
return True
if self.connection is not None and self.connection._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.VtyStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.VtyLines.VtyLine.State.General()
self.general.parent = self
self.template = Tty.VtyLines.VtyLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Configuration']['meta_info']
class Sessions(object):
"""
Outgoing sessions
.. attribute:: outgoing_connection
List of outgoing sessions
**type**\: list of :py:class:`OutgoingConnection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions.OutgoingConnection>`
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.outgoing_connection = YList()
self.outgoing_connection.parent = self
self.outgoing_connection.name = 'outgoing_connection'
class OutgoingConnection(object):
"""
List of outgoing sessions
.. attribute:: connection_id
Connection ID [1\-20]
**type**\: int
**range:** 0..255
.. attribute:: host_address
Host address
**type**\: :py:class:`HostAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress>`
.. attribute:: host_name
Host name
**type**\: str
.. attribute:: idle_time
Elapsed time since session was suspended (in seconds)
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: is_last_active_session
True indicates last active session
**type**\: bool
.. attribute:: transport_protocol
Session transport protocol
**type**\: :py:class:`TransportServiceEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper.TransportServiceEnum>`
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.connection_id = None
self.host_address = Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress()
self.host_address.parent = self
self.host_name = None
self.idle_time = None
self.is_last_active_session = None
self.transport_protocol = None
class HostAddress(object):
"""
Host address
.. attribute:: af_name
AFName
**type**\: :py:class:`HostAfIdBaseIdentity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_oper.HostAfIdBaseIdentity>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'tty-management-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.af_name = None
self.ipv4_address = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:host-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.af_name is not None:
return True
if self.ipv4_address is not None:
return True
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions.OutgoingConnection.HostAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:outgoing-connection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.connection_id is not None:
return True
if self.host_address is not None and self.host_address._has_data():
return True
if self.host_name is not None:
return True
if self.idle_time is not None:
return True
if self.is_last_active_session is not None:
return True
if self.transport_protocol is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions.OutgoingConnection']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-management-oper:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.outgoing_connection is not None:
for child_ref in self.outgoing_connection:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine.Sessions']['meta_info']
@property
def _common_path(self):
if self.line_number is None:
raise YPYModelError('Key property line_number is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:vty-lines/Cisco-IOS-XR-tty-server-oper:vty-line[Cisco-IOS-XR-tty-server-oper:line-number = ' + str(self.line_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.line_number is not None:
return True
if self.configuration is not None and self.configuration._has_data():
return True
if self.sessions is not None and self.sessions._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vty_statistics is not None and self.vty_statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines.VtyLine']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:vty-lines'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.vty_line is not None:
for child_ref in self.vty_line:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.VtyLines']['meta_info']
class AuxiliaryNodes(object):
"""
List of Nodes attached with an auxiliary line
.. attribute:: auxiliary_node
Line configuration on a node
**type**\: list of :py:class:`AuxiliaryNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auxiliary_node = YList()
self.auxiliary_node.parent = self
self.auxiliary_node.name = 'auxiliary_node'
class AuxiliaryNode(object):
"""
Line configuration on a node
.. attribute:: id <key>
Node ID
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: auxiliary_line
Auxiliary line
**type**\: :py:class:`AuxiliaryLine <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.id = None
self.auxiliary_line = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine()
self.auxiliary_line.parent = self
class AuxiliaryLine(object):
"""
Auxiliary line
.. attribute:: auxiliary_statistics
Statistics of the auxiliary line
**type**\: :py:class:`AuxiliaryStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics>`
.. attribute:: configuration
Configuration information of the line
**type**\: :py:class:`Configuration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration>`
.. attribute:: state
Line state information
**type**\: :py:class:`State <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auxiliary_statistics = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics()
self.auxiliary_statistics.parent = self
self.configuration = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration()
self.configuration.parent = self
self.state = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State()
self.state.parent = self
class AuxiliaryStatistics(object):
"""
Statistics of the auxiliary line
.. attribute:: aaa
AAA related statistics
**type**\: :py:class:`Aaa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa>`
.. attribute:: exec_
Exec related statistics
**type**\: :py:class:`Exec_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_>`
.. attribute:: general_statistics
General statistics of line
**type**\: :py:class:`GeneralStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics>`
.. attribute:: rs232
RS232 statistics of console line
**type**\: :py:class:`Rs232 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.aaa = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa()
self.aaa.parent = self
self.exec_ = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_()
self.exec_.parent = self
self.general_statistics = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics()
self.general_statistics.parent = self
self.rs232 = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232()
self.rs232.parent = self
class Rs232(object):
"""
RS232 statistics of console line
.. attribute:: baud_rate
Inbound/Outbound baud rate in bps
**type**\: int
**range:** 0..4294967295
**units**\: bit/s
.. attribute:: data_bits
Number of databits
**type**\: int
**range:** 0..4294967295
**units**\: bit
.. attribute:: exec_disabled
Exec disabled on TTY
**type**\: bool
.. attribute:: framing_error_count
Framing error count
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_flow_control_status
Hardware flow control status
**type**\: int
**range:** 0..4294967295
.. attribute:: overrun_error_count
Overrun error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_error_count
Parity error count
**type**\: int
**range:** 0..4294967295
.. attribute:: parity_status
Parity status
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_bits
Number of stopbits
**type**\: int
**range:** 0..4294967295
**units**\: bit
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.baud_rate = None
self.data_bits = None
self.exec_disabled = None
self.framing_error_count = None
self.hardware_flow_control_status = None
self.overrun_error_count = None
self.parity_error_count = None
self.parity_status = None
self.stop_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:rs232'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.baud_rate is not None:
return True
if self.data_bits is not None:
return True
if self.exec_disabled is not None:
return True
if self.framing_error_count is not None:
return True
if self.hardware_flow_control_status is not None:
return True
if self.overrun_error_count is not None:
return True
if self.parity_error_count is not None:
return True
if self.parity_status is not None:
return True
if self.stop_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Rs232']['meta_info']
class GeneralStatistics(object):
"""
General statistics of line
.. attribute:: absolute_timeout
Absolute timeout period
**type**\: int
**range:** 0..4294967295
.. attribute:: async_interface
Usable as async interface
**type**\: bool
.. attribute:: domain_lookup_enabled
DNS resolution enabled
**type**\: bool
.. attribute:: flow_control_start_character
Software flow control start char
**type**\: int
**range:** \-128..127
.. attribute:: flow_control_stop_character
Software flow control stop char
**type**\: int
**range:** \-128..127
.. attribute:: idle_time
TTY idle time
**type**\: int
**range:** 0..4294967295
.. attribute:: motd_banner_enabled
MOTD banner enabled
**type**\: bool
.. attribute:: private_flag
TTY private flag
**type**\: bool
.. attribute:: terminal_length
Terminal length
**type**\: int
**range:** 0..4294967295
.. attribute:: terminal_type
Terminal type
**type**\: str
.. attribute:: terminal_width
Line width
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.absolute_timeout = None
self.async_interface = None
self.domain_lookup_enabled = None
self.flow_control_start_character = None
self.flow_control_stop_character = None
self.idle_time = None
self.motd_banner_enabled = None
self.private_flag = None
self.terminal_length = None
self.terminal_type = None
self.terminal_width = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.absolute_timeout is not None:
return True
if self.async_interface is not None:
return True
if self.domain_lookup_enabled is not None:
return True
if self.flow_control_start_character is not None:
return True
if self.flow_control_stop_character is not None:
return True
if self.idle_time is not None:
return True
if self.motd_banner_enabled is not None:
return True
if self.private_flag is not None:
return True
if self.terminal_length is not None:
return True
if self.terminal_type is not None:
return True
if self.terminal_width is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.GeneralStatistics']['meta_info']
class Exec_(object):
"""
Exec related statistics
.. attribute:: time_stamp_enabled
Specifies whether timestamp is enabled or not
**type**\: bool
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.time_stamp_enabled = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:exec'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.time_stamp_enabled is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Exec_']['meta_info']
class Aaa(object):
"""
AAA related statistics
.. attribute:: user_name
The authenticated username
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.user_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:aaa'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.user_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics.Aaa']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:auxiliary-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.aaa is not None and self.aaa._has_data():
return True
if self.exec_ is not None and self.exec_._has_data():
return True
if self.general_statistics is not None and self.general_statistics._has_data():
return True
if self.rs232 is not None and self.rs232._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.AuxiliaryStatistics']['meta_info']
class State(object):
"""
Line state information
.. attribute:: general
General information
**type**\: :py:class:`General <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General>`
.. attribute:: template
Information related to template applied to the line
**type**\: :py:class:`Template <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General()
self.general.parent = self
self.template = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template()
self.template.parent = self
class Template(object):
"""
Information related to template applied to the
line
.. attribute:: name
Name of the template
**type**\: str
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.Template']['meta_info']
class General(object):
"""
General information
.. attribute:: general_state
State of the line
**type**\: :py:class:`LineStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.LineStateEnum>`
.. attribute:: operation
application running of on the tty line
**type**\: :py:class:`SessionOperationEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.SessionOperationEnum>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.general_state = None
self.operation = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:general'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general_state is not None:
return True
if self.operation is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State.General']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.general is not None and self.general._has_data():
return True
if self.template is not None and self.template._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.State']['meta_info']
class Configuration(object):
"""
Configuration information of the line
.. attribute:: connection_configuration
Conection configuration information
**type**\: :py:class:`ConnectionConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.connection_configuration = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration()
self.connection_configuration.parent = self
class ConnectionConfiguration(object):
"""
Conection configuration information
.. attribute:: acl_in
ACL for inbound traffic
**type**\: str
.. attribute:: acl_out
ACL for outbound traffic
**type**\: str
.. attribute:: transport_input
Protocols to use when connecting to the terminal server
**type**\: :py:class:`TransportInput <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_server_oper.Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput>`
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.acl_in = None
self.acl_out = None
self.transport_input = Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput()
self.transport_input.parent = self
class TransportInput(object):
"""
Protocols to use when connecting to the
terminal server
.. attribute:: none
Not used
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: protocol1
Transport protocol1
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: protocol2
Transport protocol2
**type**\: :py:class:`TtyTransportProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolEnum>`
.. attribute:: select
Choose transport protocols
**type**\: :py:class:`TtyTransportProtocolSelectEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tty_management_datatypes.TtyTransportProtocolSelectEnum>`
**default value**\: all
"""
_prefix = 'tty-server-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.none = None
self.protocol1 = None
self.protocol2 = None
self.select = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:transport-input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.none is not None:
return True
if self.protocol1 is not None:
return True
if self.protocol2 is not None:
return True
if self.select is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration.TransportInput']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:connection-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acl_in is not None:
return True
if self.acl_out is not None:
return True
if self.transport_input is not None and self.transport_input._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration.ConnectionConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.connection_configuration is not None and self.connection_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine.Configuration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tty-server-oper:auxiliary-line'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.auxiliary_statistics is not None and self.auxiliary_statistics._has_data():
return True
if self.configuration is not None and self.configuration._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode.AuxiliaryLine']['meta_info']
@property
def _common_path(self):
if self.id is None:
raise YPYModelError('Key property id is None')
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:auxiliary-nodes/Cisco-IOS-XR-tty-server-oper:auxiliary-node[Cisco-IOS-XR-tty-server-oper:id = ' + str(self.id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.id is not None:
return True
if self.auxiliary_line is not None and self.auxiliary_line._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes.AuxiliaryNode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty/Cisco-IOS-XR-tty-server-oper:auxiliary-nodes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.auxiliary_node is not None:
for child_ref in self.auxiliary_node:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty.AuxiliaryNodes']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tty-server-oper:tty'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.auxiliary_nodes is not None and self.auxiliary_nodes._has_data():
return True
if self.console_nodes is not None and self.console_nodes._has_data():
return True
if self.vty_lines is not None and self.vty_lines._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tty_server_oper as meta
return meta._meta_table['Tty']['meta_info']
|
{
"content_hash": "fcecb5cd90c47d93b72cab0ee4b5524e",
"timestamp": "",
"source": "github",
"line_count": 3080,
"max_line_length": 269,
"avg_line_length": 39.61071428571429,
"alnum_prop": 0.420078523946525,
"repo_name": "111pontes/ydk-py",
"id": "057cc32c53d3634176c7d8f555c3a87a7d9bb4a8",
"size": "122001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_tty_server_oper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "7226"
},
{
"name": "Python",
"bytes": "446117948"
}
],
"symlink_target": ""
}
|
from abc import ABC, abstractmethod
import logging
# from time import sleep
log = logging.getLogger("Port")
class Port(ABC):
@abstractmethod
def send_and_receive(self, *args, **kwargs) -> dict:
raise NotImplementedError
def connect(self) -> None:
log.debug("connect not implemented")
return
def disconnect(self) -> None:
log.debug("disconnect not implemented")
return
def process_command(self, command, protocol):
# Band-aid solution, need to reduce what is sent
log.debug(f"Command {command}")
full_command = protocol.get_full_command(command)
raw_response = self.send_and_receive(
command=command,
full_command=full_command,
protocol=protocol,
command_defn=protocol.get_command_defn(command),
)
log.debug(f"Send and Receive Response {raw_response}")
# Handle errors
# Maybe there should a decode for ERRORs and WARNINGS...
# Some inverters return the command if the command is unknown:
if raw_response == full_command:
return {
"ERROR": [
f"Inverter returned the command string for {command} - the inverter didnt recognise this command",
"",
]
}
# dict is returned on exception
if isinstance(raw_response, dict):
return raw_response
# Decode response
decoded_response = protocol.decode(raw_response, command)
log.info(f"Decoded response {decoded_response}")
return decoded_response
|
{
"content_hash": "1c8f0ad68a34ca71b50f9f9178cfac40",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 118,
"avg_line_length": 30.92452830188679,
"alnum_prop": 0.6034167175106773,
"repo_name": "jblance/mpp-solar",
"id": "1eeea21fe77903cdfcd952aa726e2fed52a9ecda",
"size": "1639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mppsolar/ports/port.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "274"
},
{
"name": "Python",
"bytes": "485644"
}
],
"symlink_target": ""
}
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from product_order.models import Product_Order
from product_order.order_system import Order_System
class Product_Order_Test(TestCase):
"""
Below is doctest for Order_System, which is moved here since we haven't found a way to run it automatically through
python manage.py test if it is put there. (TODO:)
>>> o_id = Order_System.create_order()
>>> o = get_object_or_404(Product_Order, id=o_id)
>>> int(o.id) == int(o_id)
True
>>> o.shipping_fee
>>>
>>> o.get_role_actions()
{u'P': [u'no payment', u'payment successful'], u'S': [u'change shipping fee'], u'B': [u'cancel order', u'make payment']}
>>> o.state
u'WP'
>>> o.buyer_can_extend_shipping_ack
False
>>> o.auto_confirm_reception_ended
False
>>> o.get_role_actions()
{u'P': [u'no payment', u'payment successful'], u'S': [u'change shipping fee'], u'B': [u'cancel order', u'make payment']}
>>> o.goto_next('B', 'make payement')
(None, None)
>>> o.state
u'WP'
>>> o.get_role_actions()
{u'P': [u'no payment', u'payment successful'], u'S': [u'change shipping fee'], u'B': [u'cancel order', u'make payment']}
>>> o.goto_next('P', 'payment successful')
(u'FP', u'order_paid')
>>> o.state
u'FP'
>>> o.get_role_actions()
{u'S': [u'ship the order', u'change shipping addr']}
>>> o.goto_next('s', 'ship the order') #role doesn't distinguish btw upper case or lower case
(u'FS', u'order_shipped')
>>> o.state
u'FS'
>>> o.get_role_actions()
{u'P': [u'auto ack reception of order'], u'S': [], u'B': [u'confirm reception of order']}
>>> o.goto_next('B', 'confirm')
(None, None)
>>> o.state
u'FS'
>>> o.goto_next('B', 'confirm reception of order')
(u'ST', u'transaction_successful')
>>> o.state
u'ST'
>>> o.get_role_actions()
{u'P': [u'no refund request']}
>>>
>>> o.goto_next('P', 'no refund request')
(u'WA', u'')
>>> o.state
u'WA'
>>>
>>> o.get_role_actions()
{u'P': [u'finished accounting']}
>>> o.goto_next('U', 'finished accounting') #unknown role have no effect
(None, None)
>>> o.state
u'WA'
>>>
>>> o.goto_next('P', 'finished accounting')
(u'FA', u'')
>>> o.state
u'FA'
>>> o.get_role_actions()
{}
>>> #test interacting with each specific object directly
>>> #test interacting through Order_System
"""
fixtures = ['product_order_init.json']
def setUp(self):
pass
def test_state_machine(self):
initials = Order_System.get_initial()
self.assertDictEqual(initials, {'role_actions': {u'B': [u'create order']}, 'current_state': '', 'order': None})
def test_work_flow1(self):
"""create product order -> cancel this order
"""
o_id = Order_System.create_order()
o = get_object_or_404(Product_Order, id=o_id)
status = o.state
self.assertEqual(status, u'WP')
status = o.get_state_display()
self.assertEqual(status, u'Waiting for Payment')
o.goto_next(u'B', u'cancel order')
status = o.state
self.assertEqual(status, u'CT')
actions = o.get_role_actions()
self.assertDictEqual(actions, {})
status = o.get_state_display()
self.assertEqual(status, u'Closed Transatcion')
def test_work_flow2(self):
"""create product order -> pay the order -> provider ships the product -> auto confirm
"""
o_id = Order_System.create_order()
o = get_object_or_404(Product_Order, id=o_id)
status = o.state
self.assertEqual(status, u'WP')
status = o.get_state_display()
self.assertEqual(status, u'Waiting for Payment')
o.goto_next(u'p', u'payment successful')
status = o.state
self.assertEqual(status, u'FP')
o.goto_next(u'S',u'ship the order')
status = o.state
self.assertEqual(status, u'FS')
o.goto_next(u'P',u'auto ack reception of order')
status = o.state
self.assertEqual(status, u'ST')
o.goto_next(u'P',u'no refund request')
status = o.state
self.assertEqual(status, u'WA')
o.goto_next(u'P',u'finished accounting')
status = o.state
self.assertEqual(status, u'FA')
def test_work_flow3(self):
"""create product order -> pay the order -> provider ships the product -> buyer confirms
"""
o_id = Order_System.create_order()
o = get_object_or_404(Product_Order, id=o_id)
status = o.state
self.assertEqual(status, u'WP')
status = o.get_state_display()
self.assertEqual(status, u'Waiting for Payment')
o.goto_next(u'p', u'payment successful')
status = o.state
self.assertEqual(status, u'FP')
o.goto_next(u'S',u'ship the order')
status = o.state
self.assertEqual(status, u'FS')
o.goto_next(u'B',u'confirm reception of order')
status = o.state
self.assertEqual(status, u'ST')
o.goto_next(u'P',u'no refund request')
status = o.state
self.assertEqual(status, u'WA')
o.goto_next(u'P',u'finished accounting')
status = o.state
self.assertEqual(status, u'FA')
def test_work_flow4(self):
"""refund before shipping the product: create product order -> pay -> request refund -> cancel refund -> provider ships the product -> buyer confirms
"""
pass
def test_work_flow4(self):
"""refund before shipping the product: create product order -> pay -> request refund -> provider agrees to refund
"""
pass
def test_work_flow5(self):
"""refund before shipping the product: create product order -> pay -> request refund -> provider ships the product (no refund) -buyer confirms
"""
pass
def test_work_flow6(self):
"""refund before shipping products(more than two product items): create product order -> pay -> refund one item -> provider agrees to refund
-> ship the product - buyer confirms
"""
pass
def test_work_flow7(self):
"""only refund payment: create product order -> pay -> ship the product -> request refund -> cancel refund -> provider ships the product -> buyer confirms
"""
pass
def test_work_flow8(self):
"""only refund payment: create product order -> pay -> ship product -> request refund -> agree to refund
"""
pass
def test_work_flow9(self):
"""only refund payment (more than one product items) : create product order -> pay -> ship product -> request refund -> agree to refund
-> confirm (the other product)
"""
pass
def test_work_flow10(self):
"""only refund charge : create product order - pay - send product - refund - disagree - cancel refund
"""
pass
class PO_With_Scheduled_Tasks_Test(TestCase):
def setUp(self):
pass
def test_work_flow1(self):
"""
"""
pass
class State_Machine_Test(TestCase):
pass
|
{
"content_hash": "1e7ce489ff9982e3f185b447867d27ac",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 162,
"avg_line_length": 29.533834586466167,
"alnum_prop": 0.5631364562118126,
"repo_name": "yejia/order_system",
"id": "502ee8a1542598198dfb853a0b69a3f2332e853d",
"size": "7856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "product_order/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "47213"
},
{
"name": "Python",
"bytes": "124860"
},
{
"name": "Shell",
"bytes": "78"
}
],
"symlink_target": ""
}
|
from django.contrib import auth
from dddp import THREAD_LOCAL
from dddp.api import API, Publication
from dddp.logging import LOGS_NAME
class ClientVersions(Publication):
"""Publication for `meteor_autoupdate_clientVersions`."""
name = 'meteor_autoupdate_clientVersions'
queries = []
class Logs(Publication):
name = LOGS_NAME
users = auth.get_user_model()
def get_queries(self):
user_pk = getattr(THREAD_LOCAL, 'user_id', False)
if user_pk:
if self.users.objects.filter(
pk=user_pk,
is_active=True,
is_superuser=True,
).exists():
return []
raise ValueError('User not permitted.')
API.register([ClientVersions, Logs])
|
{
"content_hash": "29f9b91db3d80e610e16f4ce34afb956",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 61,
"avg_line_length": 23.8125,
"alnum_prop": 0.6181102362204725,
"repo_name": "django-ddp/django-ddp",
"id": "6583844fd8bd620effdcd185e90cf50eca31b3b9",
"size": "762",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "dddp/ddp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1714"
},
{
"name": "HTML",
"bytes": "278"
},
{
"name": "JavaScript",
"bytes": "618"
},
{
"name": "Makefile",
"bytes": "1584"
},
{
"name": "Python",
"bytes": "168293"
},
{
"name": "Shell",
"bytes": "605"
}
],
"symlink_target": ""
}
|
'''
Test IPsec
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.ipsec_operations as ipsec_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import os
test_stub = test_lib.lib_get_test_stub()
test_obj_dict1 = test_state.TestStateDict()
test_obj_dict2 = test_state.TestStateDict()
ipsec1 = None
ipsec2 = None
mevoco1_ip = None
mevoco2_ip = None
def test():
global mevoco1_ip
global mevoco2_ip
global ipsec1
global ipsec2
global ipsec3
global ipsec4
mevoco1_ip = os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP']
mevoco2_ip = os.environ['secondZStackMnIp']
test_util.test_dsc('Create test vm in mevoco1')
vm1 = test_stub.create_vlan_vm(os.environ.get('l3VlanNetworkName1'))
test_obj_dict1.add_vm(vm1)
vm1.check()
pri_l3_uuid1 = vm1.vm.vmNics[0].l3NetworkUuid
vr1 = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid1)[0]
l3_uuid1 = test_lib.lib_find_vr_pub_nic(vr1).l3NetworkUuid
vip1 = test_stub.create_vip('ipsec1_vip', l3_uuid1)
cond = res_ops.gen_query_conditions('uuid', '=', pri_l3_uuid1)
first_zstack_cidrs = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0].ipRanges[0].networkCidr
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
test_util.test_dsc('Create test vm in mevoco2')
vm2 = test_stub.create_vlan_vm(os.environ.get('l3VlanDNATNetworkName'))
test_obj_dict2.add_vm(vm2)
vm2.check()
pri_l3_uuid2 = vm2.vm.vmNics[0].l3NetworkUuid
vr2 = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid2)[0]
l3_uuid2 = test_lib.lib_find_vr_pub_nic(vr2).l3NetworkUuid
vip2 = test_stub.create_vip('ipsec2_vip', l3_uuid2)
cond = res_ops.gen_query_conditions('uuid', '=', pri_l3_uuid2)
second_zstack_cidrs = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0].ipRanges[0].networkCidr
test_util.test_dsc('Create test vm in mevoco2')
vm3 = test_stub.create_vlan_vm(os.environ.get('l3VlanNetworkName4'))
test_obj_dict2.add_vm(vm3)
vm3.check()
pri_l3_uuid3 = vm3.vm.vmNics[0].l3NetworkUuid
vr3 = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid2)[0]
l3_uuid3 = test_lib.lib_find_vr_pub_nic(vr3).l3NetworkUuid
vip3 = test_stub.create_vip('ipsec3_vip', l3_uuid3)
cond = res_ops.gen_query_conditions('uuid', '=', pri_l3_uuid3)
third_zstack_cidrs = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0].ipRanges[0].networkCidr
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
test_util.test_dsc('Create ipsec in mevoco1')
ipsec1 = ipsec_ops.create_ipsec_connection('ipsec1', pri_l3_uuid1, vip2.get_vip().ip, '123456', vip1.get_vip().uuid, [second_zstack_cidrs])
ipsec3 = ipsec_ops.create_ipsec_connection('ipsec3', pri_l3_uuid1, vip3.get_vip().ip, '123456', vip1.get_vip().uuid, [third_zstack_cidrs])
vip1_uuid = vip1.get_vip().uuid
vip1_db = test_lib.lib_get_vip_by_uuid(vip1_uuid)
assert "IPsec" in vip1_db.useFor
assert vip1_db.useFor.count("IPsec") == 1
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
test_util.test_dsc('Create ipsec in mevoco2')
ipsec2 = ipsec_ops.create_ipsec_connection('ipsec2', pri_l3_uuid2, vip1.get_vip().ip, '123456', vip2.get_vip().uuid, [first_zstack_cidrs])
ipsec4 = ipsec_ops.create_ipsec_connection('ipsec4', pri_l3_uuid3, vip1.get_vip().ip, '123456', vip3.get_vip().uuid, [first_zstack_cidrs])
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
if not test_lib.lib_check_ping(vm1.vm, vm2.vm.vmNics[0].ip):
test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco1_ip, mevoco2_ip))
if not test_lib.lib_check_ping(vm1.vm, vm3.vm.vmNics[0].ip):
test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco1_ip, mevoco2_ip))
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
if not test_lib.lib_check_ping(vm2.vm, vm1.vm.vmNics[0].ip):
test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco2_ip, mevoco1_ip))
if not test_lib.lib_check_ping(vm3.vm, vm1.vm.vmNics[0].ip):
test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco2_ip, mevoco1_ip))
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
ipsec_ops.delete_ipsec_connection(ipsec1.uuid)
ipsec_ops.delete_ipsec_connection(ipsec3.uuid)
if test_lib.lib_check_ping(vm1.vm, vm2.vm.vmNics[0].ip, no_exception=True):
test_util.test_fail('vm in mevoco1[MN:%s] could still connect to vm in mevoco2[MN:%s] after Ipsec is deleted' % (mevoco1_ip, mevoco2_ip))
if test_lib.lib_check_ping(vm1.vm, vm3.vm.vmNics[0].ip, no_exception=True):
test_util.test_fail('vm in mevoco1[MN:%s] could still connect to vm in mevoco2[MN:%s] after Ipsec is deleted' % (mevoco1_ip, mevoco2_ip))
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
if test_lib.lib_check_ping(vm2.vm, vm1.vm.vmNics[0].ip, no_exception=True):
test_util.test_fail('vm in mevoco2[MN:%s] could still connect to vm in mevoco1[MN:%s] after Ipsec is deleted' % (mevoco2_ip, mevoco1_ip))
if test_lib.lib_check_ping(vm3.vm, vm1.vm.vmNics[0].ip, no_exception=True):
test_util.test_fail('vm in mevoco2[MN:%s] could still connect to vm in mevoco1[MN:%s] after Ipsec is deleted' % (mevoco2_ip, mevoco1_ip))
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
test_lib.lib_error_cleanup(test_obj_dict1)
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
ipsec_ops.delete_ipsec_connection(ipsec2.uuid)
ipsec_ops.delete_ipsec_connection(ipsec4.uuid)
test_lib.lib_error_cleanup(test_obj_dict2)
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
test_util.test_pass('Create Ipsec Success')
#Will be called only if exception happens in test().
def error_cleanup():
global mevoco1_ip
global mevoco2_ip
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip
global test_obj_dict1
test_lib.lib_error_cleanup(test_obj_dict1)
global ipsec1
if ipsec1 != None:
ipsec_ops.delete_ipsec_connection(ipsec1.uuid)
global ipsec3
if ipsec3 != None:
ipsec_ops.delete_ipsec_connection(ipsec3.uuid)
os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip
global test_obj_dict2
test_lib.lib_error_cleanup(test_obj_dict2)
global ipsec2
if ipsec2 != None:
ipsec_ops.delete_ipsec_connection(ipsec2.uuid)
global ipsec4
if ipsec4 != None:
ipsec_ops.delete_ipsec_connection(ipsec4.uuid)
|
{
"content_hash": "d4d61e4c4a1267a7fda2288b8b17047e",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 145,
"avg_line_length": 48.2887323943662,
"alnum_prop": 0.6775557824121335,
"repo_name": "zstackio/zstack-woodpecker",
"id": "2a351e108b15608b90d1278fdbcbb36f1d9302f1",
"size": "6857",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/virtualrouter/vip/test_multi_ipsec_with_vip.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
from sqlalchemy.dialects.oracle import base, cx_oracle, zxjdbc
base.dialect = cx_oracle.dialect
from sqlalchemy.dialects.oracle.base import \
VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, NUMBER,\
BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\
FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL
__all__ = (
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'DATETIME', 'NUMBER',
'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL'
)
|
{
"content_hash": "1bafba8d0f0e8f1c3b8558eab1598176",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 62,
"avg_line_length": 32.53333333333333,
"alnum_prop": 0.6782786885245902,
"repo_name": "obeattie/sqlalchemy",
"id": "eb47e80cb290f5eb6bc59665608a3c8c7193fd4d",
"size": "488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/sqlalchemy/dialects/oracle/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "29642"
},
{
"name": "JavaScript",
"bytes": "190"
},
{
"name": "Python",
"bytes": "4600520"
}
],
"symlink_target": ""
}
|
"""
Purpose
Shows how to use the AWS SDK for Python (Boto3) with Amazon Pinpoint to
send SMS messages using a message template.
"""
# snippet-start:[pinpoint.python.pinpoint_send_templated_sms_message.complete]
import logging
import boto3
from botocore.exceptions import ClientError
logger = logging.getLogger(__name__)
def send_templated_sms_message(
pinpoint_client,
project_id,
destination_number,
message_type,
origination_number,
template_name,
template_version):
"""
Sends an SMS message to a specific phone number using a pre-defined template.
:param pinpoint_client: A Boto3 Pinpoint client.
:param project_id: An Amazon Pinpoint project (application) ID.
:param destination_number: The phone number to send the message to.
:param message_type: The type of SMS message (promotional or transactional).
:param origination_number: The phone number that the message is sent from.
:param template_name: The name of the SMS template to use when sending the message.
:param template_version: The version number of the message template.
:return The ID of the message.
"""
try:
response = pinpoint_client.send_messages(
ApplicationId=project_id,
MessageRequest={
'Addresses': {
destination_number: {
'ChannelType': 'SMS'
}
},
'MessageConfiguration': {
'SMSMessage': {
'MessageType': message_type,
'OriginationNumber': origination_number
}
},
'TemplateConfiguration': {
'SMSTemplate': {
'Name': template_name,
'Version': template_version
}
}
}
)
except ClientError:
logger.exception("Couldn't send message.")
raise
else:
return response['MessageResponse']['Result'][destination_number]['MessageId']
def main():
region = "us-east-1"
origination_number = "+18555550001"
destination_number = "+14255550142"
project_id = "7353f53e6885409fa32d07cedexample"
message_type = "TRANSACTIONAL"
template_name = "My_SMS_Template"
template_version = "1"
message_id = send_templated_sms_message(
boto3.client('pinpoint', region_name=region), project_id,
destination_number, message_type, origination_number, template_name,
template_version)
print(f"Message sent! Message ID: {message_id}.")
if __name__ == '__main__':
main()
# snippet-end:[pinpoint.python.pinpoint_send_templated_sms_message.complete]
|
{
"content_hash": "6b5bde906af242701a628dbaa759415a",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 87,
"avg_line_length": 32.76470588235294,
"alnum_prop": 0.6021543985637343,
"repo_name": "awsdocs/aws-doc-sdk-examples",
"id": "500e9c4864b7c88651bcb08b6ae8a9e1681c2ce5",
"size": "2893",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/example_code/pinpoint/pinpoint_send_templated_sms_message.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "476653"
},
{
"name": "Batchfile",
"bytes": "900"
},
{
"name": "C",
"bytes": "3852"
},
{
"name": "C#",
"bytes": "2051923"
},
{
"name": "C++",
"bytes": "943634"
},
{
"name": "CMake",
"bytes": "82068"
},
{
"name": "CSS",
"bytes": "33378"
},
{
"name": "Dockerfile",
"bytes": "2243"
},
{
"name": "Go",
"bytes": "1764292"
},
{
"name": "HTML",
"bytes": "319090"
},
{
"name": "Java",
"bytes": "4966853"
},
{
"name": "JavaScript",
"bytes": "1655476"
},
{
"name": "Jupyter Notebook",
"bytes": "9749"
},
{
"name": "Kotlin",
"bytes": "1099902"
},
{
"name": "Makefile",
"bytes": "4922"
},
{
"name": "PHP",
"bytes": "1220594"
},
{
"name": "Python",
"bytes": "2507509"
},
{
"name": "Ruby",
"bytes": "500331"
},
{
"name": "Rust",
"bytes": "558811"
},
{
"name": "Shell",
"bytes": "63776"
},
{
"name": "Swift",
"bytes": "267325"
},
{
"name": "TypeScript",
"bytes": "119632"
}
],
"symlink_target": ""
}
|
import sys
sys.path.insert(0, "src")
from setuptools import setup, find_packages
from reversion import __version__
# Load in babel support, if available.
try:
from babel.messages import frontend as babel
cmdclass = {"compile_catalog": babel.compile_catalog,
"extract_messages": babel.extract_messages,
"init_catalog": babel.init_catalog,
"update_catalog": babel.update_catalog,}
except ImportError:
cmdclass = {}
setup(
name = "django-reversion",
version = '.'.join(str(x) for x in __version__),
license = "BSD",
description = "An extension to the Django web framework that provides comprehensive version control facilities",
author = "Dave Hall",
author_email = "dave@etianen.com",
url = "http://github.com/etianen/django-reversion",
zip_safe = False,
packages = find_packages("src"),
package_dir = {
"": "src",
},
package_data = {
"reversion": ["locale/*/LC_MESSAGES/django.*", "templates/reversion/*.html"]},
cmdclass = cmdclass,
install_requires = [
"django>=1.7",
],
extras_require = {
"diff": [
"diff_match_patch",
],
"test": [
"coverage",
],
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Framework :: Django",
]
)
|
{
"content_hash": "ea4ba621e3f0340e027cf1d66d0474c2",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 116,
"avg_line_length": 31.775862068965516,
"alnum_prop": 0.5816603364080304,
"repo_name": "MikeAmy/django-reversion",
"id": "e285854fcbd0a5d1267340357341948ca4baec00",
"size": "1843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "6215"
},
{
"name": "Python",
"bytes": "181458"
}
],
"symlink_target": ""
}
|
import os
import math
import hashlib
import unicodedata
import string
from typing import Sequence, Dict
from types import MappingProxyType
from .util import resource_path, bfh, bh2u, randrange
from .crypto import hmac_oneshot
from . import version
from .logging import Logger
# http://www.asahi-net.or.jp/~ax2s-kmtn/ref/unicode/e_asia.html
CJK_INTERVALS = [
(0x4E00, 0x9FFF, 'CJK Unified Ideographs'),
(0x3400, 0x4DBF, 'CJK Unified Ideographs Extension A'),
(0x20000, 0x2A6DF, 'CJK Unified Ideographs Extension B'),
(0x2A700, 0x2B73F, 'CJK Unified Ideographs Extension C'),
(0x2B740, 0x2B81F, 'CJK Unified Ideographs Extension D'),
(0xF900, 0xFAFF, 'CJK Compatibility Ideographs'),
(0x2F800, 0x2FA1D, 'CJK Compatibility Ideographs Supplement'),
(0x3190, 0x319F, 'Kanbun'),
(0x2E80, 0x2EFF, 'CJK Radicals Supplement'),
(0x2F00, 0x2FDF, 'CJK Radicals'),
(0x31C0, 0x31EF, 'CJK Strokes'),
(0x2FF0, 0x2FFF, 'Ideographic Description Characters'),
(0xE0100, 0xE01EF, 'Variation Selectors Supplement'),
(0x3100, 0x312F, 'Bopomofo'),
(0x31A0, 0x31BF, 'Bopomofo Extended'),
(0xFF00, 0xFFEF, 'Halfwidth and Fullwidth Forms'),
(0x3040, 0x309F, 'Hiragana'),
(0x30A0, 0x30FF, 'Katakana'),
(0x31F0, 0x31FF, 'Katakana Phonetic Extensions'),
(0x1B000, 0x1B0FF, 'Kana Supplement'),
(0xAC00, 0xD7AF, 'Hangul Syllables'),
(0x1100, 0x11FF, 'Hangul Jamo'),
(0xA960, 0xA97F, 'Hangul Jamo Extended A'),
(0xD7B0, 0xD7FF, 'Hangul Jamo Extended B'),
(0x3130, 0x318F, 'Hangul Compatibility Jamo'),
(0xA4D0, 0xA4FF, 'Lisu'),
(0x16F00, 0x16F9F, 'Miao'),
(0xA000, 0xA48F, 'Yi Syllables'),
(0xA490, 0xA4CF, 'Yi Radicals'),
]
def is_CJK(c):
n = ord(c)
for imin,imax,name in CJK_INTERVALS:
if n>=imin and n<=imax: return True
return False
def normalize_text(seed: str) -> str:
# normalize
seed = unicodedata.normalize('NFKD', seed)
# lower
seed = seed.lower()
# remove accents
seed = u''.join([c for c in seed if not unicodedata.combining(c)])
# normalize whitespaces
seed = u' '.join(seed.split())
# remove whitespaces between CJK
seed = u''.join([seed[i] for i in range(len(seed)) if not (seed[i] in string.whitespace and is_CJK(seed[i-1]) and is_CJK(seed[i+1]))])
return seed
_WORDLIST_CACHE = {} # type: Dict[str, Wordlist]
class Wordlist(tuple):
def __init__(self, words: Sequence[str]):
super().__init__()
index_from_word = {w: i for i, w in enumerate(words)}
self._index_from_word = MappingProxyType(index_from_word) # no mutation
def index(self, word, start=None, stop=None) -> int:
try:
return self._index_from_word[word]
except KeyError as e:
raise ValueError from e
def __contains__(self, word) -> bool:
try:
self.index(word)
except ValueError:
return False
else:
return True
@classmethod
def from_file(cls, filename) -> 'Wordlist':
path = resource_path('wordlist', filename)
if path not in _WORDLIST_CACHE:
with open(path, 'r', encoding='utf-8') as f:
s = f.read().strip()
s = unicodedata.normalize('NFKD', s)
lines = s.split('\n')
words = []
for line in lines:
line = line.split('#')[0]
line = line.strip(' \r')
assert ' ' not in line
if line:
words.append(line)
_WORDLIST_CACHE[path] = Wordlist(words)
return _WORDLIST_CACHE[path]
filenames = {
'en':'english.txt',
'es':'spanish.txt',
'ja':'japanese.txt',
'pt':'portuguese.txt',
'zh':'chinese_simplified.txt'
}
class Mnemonic(Logger):
# Seed derivation does not follow BIP39
# Mnemonic phrase uses a hash based checksum, instead of a wordlist-dependent checksum
def __init__(self, lang=None):
Logger.__init__(self)
lang = lang or 'en'
self.logger.info(f'language {lang}')
filename = filenames.get(lang[0:2], 'english.txt')
self.wordlist = Wordlist.from_file(filename)
self.logger.info(f"wordlist has {len(self.wordlist)} words")
@classmethod
def mnemonic_to_seed(self, mnemonic, passphrase) -> bytes:
PBKDF2_ROUNDS = 2048
mnemonic = normalize_text(mnemonic)
passphrase = passphrase or ''
passphrase = normalize_text(passphrase)
return hashlib.pbkdf2_hmac('sha512', mnemonic.encode('utf-8'), b'electrum' + passphrase.encode('utf-8'), iterations = PBKDF2_ROUNDS)
def mnemonic_encode(self, i):
n = len(self.wordlist)
words = []
while i:
x = i%n
i = i//n
words.append(self.wordlist[x])
return ' '.join(words)
def get_suggestions(self, prefix):
for w in self.wordlist:
if w.startswith(prefix):
yield w
def mnemonic_decode(self, seed):
n = len(self.wordlist)
words = seed.split()
i = 0
while words:
w = words.pop()
k = self.wordlist.index(w)
i = i*n + k
return i
def make_seed(self, *, seed_type=None, num_bits=None) -> str:
from .keystore import bip39_is_checksum_valid
if seed_type is None:
seed_type = 'segwit'
if num_bits is None:
num_bits = 132
prefix = version.seed_prefix(seed_type)
# increase num_bits in order to obtain a uniform distribution for the last word
bpw = math.log(len(self.wordlist), 2)
num_bits = int(math.ceil(num_bits/bpw) * bpw)
self.logger.info(f"make_seed. prefix: '{prefix}', entropy: {num_bits} bits")
entropy = 1
while entropy < pow(2, num_bits - bpw):
# try again if seed would not contain enough words
entropy = randrange(pow(2, num_bits))
nonce = 0
while True:
nonce += 1
i = entropy + nonce
seed = self.mnemonic_encode(i)
if i != self.mnemonic_decode(seed):
raise Exception('Cannot extract same entropy from mnemonic!')
if is_old_seed(seed):
continue
# Make sure the mnemonic we generate is not also a valid bip39 seed
# by accident. Note that this test has not always been done historically,
# so it cannot be relied upon.
if bip39_is_checksum_valid(seed, wordlist=self.wordlist) == (True, True):
continue
if is_new_seed(seed, prefix):
break
self.logger.info(f'{len(seed.split())} words')
return seed
def is_new_seed(x: str, prefix=version.SEED_PREFIX) -> bool:
x = normalize_text(x)
s = bh2u(hmac_oneshot(b"Seed version", x.encode('utf8'), hashlib.sha512))
return s.startswith(prefix)
def is_old_seed(seed: str) -> bool:
from . import old_mnemonic
seed = normalize_text(seed)
words = seed.split()
try:
# checks here are deliberately left weak for legacy reasons, see #3149
old_mnemonic.mn_decode(words)
uses_electrum_words = True
except Exception:
uses_electrum_words = False
try:
seed = bfh(seed)
is_hex = (len(seed) == 16 or len(seed) == 32)
except Exception:
is_hex = False
return is_hex or (uses_electrum_words and (len(words) == 12 or len(words) == 24))
def seed_type(x: str) -> str:
num_words = len(x.split())
if is_old_seed(x):
return 'old'
elif is_new_seed(x, version.SEED_PREFIX):
return 'standard'
elif is_new_seed(x, version.SEED_PREFIX_SW):
return 'segwit'
elif is_new_seed(x, version.SEED_PREFIX_2FA) and (num_words == 12 or num_words >= 20):
# Note: in Electrum 2.7, there was a breaking change in key derivation
# for this seed type. Unfortunately the seed version/prefix was reused,
# and now we can only distinguish them based on number of words. :(
return '2fa'
elif is_new_seed(x, version.SEED_PREFIX_2FA_SW):
return '2fa_segwit'
return ''
def is_seed(x: str) -> bool:
return bool(seed_type(x))
def is_any_2fa_seed_type(seed_type: str) -> bool:
return seed_type in ['2fa', '2fa_segwit']
|
{
"content_hash": "902788302814485b652868e80ba0c335",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 140,
"avg_line_length": 33.88306451612903,
"alnum_prop": 0.5966916577412829,
"repo_name": "spesmilo/electrum",
"id": "5db4a4a999a657a23ef971badb9b3edf22dbc46e",
"size": "9565",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "electrum/mnemonic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "13136"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "Java",
"bytes": "2929"
},
{
"name": "Makefile",
"bytes": "2185"
},
{
"name": "NSIS",
"bytes": "7681"
},
{
"name": "Python",
"bytes": "5400804"
},
{
"name": "QML",
"bytes": "355804"
},
{
"name": "Ruby",
"bytes": "16748"
},
{
"name": "Shell",
"bytes": "105118"
},
{
"name": "kvlang",
"bytes": "67438"
}
],
"symlink_target": ""
}
|
import collections
import sys
Conn = collections.namedtuple('Conn', ['node', 'weight'])
def info(*tt):
print(*tt, file=sys.stderr)
class Node:
finf = float('inf')
def __init__(self):
self.connected = []
self.origin = None
self.dist = self.finf
info("Created node")
def wipe_dists(self):
info("Wiping own distance")
if self.dist != self.finf:
self.dist = self.finf
for node in connected:
node.wipe_dists()
def connect_to(self, node, path_weight):
info("Connecting up")
self.connected.append(Conn(node, path_weight))
node.connected.append(Conn(self, path_weight))
def set_origin(self):
info("Setting origin")
self.wipe_dists()
self.build_dist(0, self)
def build_dist(self, dd, obj):
info("Building distance")
self.origin = obj
self.dist = dd
needs_build = []
for conn in self.connected:
if conn.node.dist > self.dist + conn.weight:
conn.node.build_dist(self.dist + conn.weight, obj)
def shortest_path_to(self, nodes):
self.set_origin()
return [node.dist for node in nodes]
def __repr__(self):
return 'Node(dist={}, conn={})'.format(
repr(self.dist), repr(self.connected))
|
{
"content_hash": "388c00e97f95c10644823b6908cb8c1f",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 66,
"avg_line_length": 27.2,
"alnum_prop": 0.5639705882352941,
"repo_name": "Samuel-Phillips/python-graph-theory",
"id": "d2c00d663830122d4725445f6a733bfc12844d41",
"size": "1379",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "LiveScript",
"bytes": "1131"
},
{
"name": "Makefile",
"bytes": "91"
},
{
"name": "Python",
"bytes": "5363"
}
],
"symlink_target": ""
}
|
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import io
from os.path import abspath
from warnings import warn
# External imports
# Bokeh imports
from ..settings import settings
from ..util.string import decode_utf8
from .state import curstate
from .util import default_filename
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
DEFAULT_TITLE = "Bokeh Plot"
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def save(obj, filename=None, resources=None, title=None, template=None, state=None, **kwargs):
''' Save an HTML file with the data for the current document.
Will fall back to the default output state (or an explicitly provided
:class:`State` object) for ``filename``, ``resources``, or ``title`` if they
are not provided. If the filename is not given and not provided via output state,
it is derived from the script name (e.g. ``/foo/myplot.py`` will create
``/foo/myplot.html``)
Args:
obj (LayoutDOM object) : a Layout (Row/Column), Plot or Widget object to display
filename (str, optional) : filename to save document under (default: None)
If None, use the default state configuration.
resources (Resources, optional) : A Resources config to use (default: None)
If None, use the default state configuration, if there is one.
otherwise use ``resources.INLINE``.
title (str, optional) : a title for the HTML document (default: None)
If None, use the default state title value, if there is one.
Otherwise, use "Bokeh Plot"
state (State, optional) :
A :class:`State` object. If None, then the current default
implicit state is used. (default: None).
Returns:
str: the filename where the HTML file is saved.
'''
if state is None:
state = curstate()
filename, resources, title = _get_save_args(state, filename, resources, title)
_save_helper(obj, filename, resources, title, template)
return abspath(filename)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _get_save_args(state, filename, resources, title):
'''
'''
filename, is_default_filename = _get_save_filename(state, filename)
resources = _get_save_resources(state, resources, is_default_filename)
title = _get_save_title(state, title, is_default_filename)
return filename, resources, title
def _get_save_filename(state, filename):
if filename is not None:
return filename, False
if state.file and not settings.ignore_filename():
return state.file['filename'], False
return default_filename("html"), True
def _get_save_resources(state, resources, suppress_warning):
if resources is not None:
return resources
if state.file:
return state.file['resources']
if not suppress_warning:
warn("save() called but no resources were supplied and output_file(...) was never called, defaulting to resources.CDN")
from ..resources import CDN
return CDN
def _get_save_title(state, title, suppress_warning):
if title is not None:
return title
if state.file:
return state.file['title']
if not suppress_warning:
warn("save() called but no title was supplied and output_file(...) was never called, using default title 'Bokeh Plot'")
return DEFAULT_TITLE
def _save_helper(obj, filename, resources, title, template):
'''
'''
from ..embed import file_html
html = file_html(obj, resources, title=title, template=template)
with io.open(filename, mode="w", encoding="utf-8") as f:
f.write(decode_utf8(html))
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
{
"content_hash": "4890f86d06c2f4c2aadaf25ca94d99b8",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 127,
"avg_line_length": 33.241379310344826,
"alnum_prop": 0.5180497925311204,
"repo_name": "jakirkham/bokeh",
"id": "7b868f28d0347832e97ca506847ab16029bebcf1",
"size": "5167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bokeh/io/saving.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1442"
},
{
"name": "CSS",
"bytes": "102287"
},
{
"name": "CoffeeScript",
"bytes": "413132"
},
{
"name": "Dockerfile",
"bytes": "4099"
},
{
"name": "HTML",
"bytes": "47532"
},
{
"name": "JavaScript",
"bytes": "25172"
},
{
"name": "Makefile",
"bytes": "1150"
},
{
"name": "PowerShell",
"bytes": "691"
},
{
"name": "Python",
"bytes": "3332368"
},
{
"name": "Shell",
"bytes": "9209"
},
{
"name": "TypeScript",
"bytes": "1634848"
}
],
"symlink_target": ""
}
|
import os
import tempfile
from mysos.common import pkgutil, zookeeper
from .http import MysosServer
from .scheduler import MysosScheduler
from .state import LocalStateProvider, Scheduler, StateProvider
from .zk_state import ZooKeeperStateProvider
from kazoo.client import KazooClient
import mesos.interface
from mesos.interface.mesos_pb2 import Credential, FrameworkInfo
import mesos.native
from twitter.common import app, log
from twitter.common.exceptions import ExceptionalThread
from twitter.common.http import HttpServer
from twitter.common.log.options import LogOptions
from twitter.common.quantity import Time
from twitter.common.quantity.parse_simple import InvalidTime, parse_time
import yaml
FRAMEWORK_NAME = 'mysos'
MYSOS_MODULE = 'mysos.scheduler'
ASSET_RELPATH = 'assets'
LogOptions.disable_disk_logging()
LogOptions.set_stderr_log_level('google:INFO')
def proxy_main():
app.add_option(
'--port',
dest='api_port',
type='int',
default=None,
help='Port for the HTTP API server')
app.add_option(
'--mesos_master',
dest='mesos_master',
default=None,
help='Mesos master address. It can be a ZooKeeper URL through which the master can be '
'detected')
app.add_option(
'--framework_user',
dest='framework_user',
help='The Unix user that Mysos executor runs as')
app.add_option(
'--framework_role',
dest='framework_role',
default='*',
help="The role that Mysos framework runs as. If set, Mysos only uses Mesos pool resources "
"with that role. The default value '*' is what Mesos considers as the default role.\n"
"NOTE: Mesos master needs to be configured to allow the specified role. See its --roles "
"flag")
app.add_option(
'--executor_uri',
dest='executor_uri',
default=None,
help='URI for the Mysos executor package')
app.add_option(
'--executor_cmd',
dest='executor_cmd',
default=None,
help='Command to execute the executor package')
app.add_option(
'--executor_environ',
dest='executor_environ',
default=None,
help="Environment variables for the executors (and the tasks) as a list of dicts keyed by "
"{name, value} in JSON. Note that these variables don't affect Mesos slave components "
"such as the fetcher")
app.add_option(
'--zk_url',
dest='zk_url',
default=None,
help='ZooKeeper URL for various Mysos operations, in the form of '
'"zk://username:password@servers/path". The sub-directory <zk_url>/discover is used for '
'communicating MySQL cluster information between Mysos scheduler and executors')
# TODO(jyx): This could also be made a per-cluster configuration.
app.add_option(
'--election_timeout',
dest='election_timeout',
default='60s',
help='The amount of time the scheduler waits for all slaves to respond during a MySQL master '
'election, e.g., 60s. After the timeout the master is elected from only the slaves that '
'have responded')
app.add_option(
'--admin_keypath',
dest='admin_keypath',
default=None,
help='The path to the key file with MySQL admin credentials on Mesos slaves')
app.add_option(
'--work_dir',
dest='work_dir',
default=os.path.join(tempfile.gettempdir(), 'mysos'),
help="Directory path to place Mysos work directories, e.g., web assets, state files if "
"--state_storage=local. Default to a system temp directory.")
app.add_option(
'--state_storage',
dest='state_storage',
default='zk',
help="Mechanism to persist scheduler state. Available options are 'zk' and 'local'. If 'zk' "
"is chosen, the scheduler state is stored under <zk_url>/state; see --zk_url. Otherwise "
"'local' is chosen and the state is persisted under <work_dir>/state; see --work_dir")
app.add_option(
'--scheduler_keypath',
dest='scheduler_keypath',
help="Path to the key file that the scheduler uses to store secrets such as MySQL "
"cluster passwords. This key must be exactly 32 bytes long")
app.add_option(
'--framework_failover_timeout',
dest='framework_failover_timeout',
default='14d',
help='Time after which Mysos framework is considered deleted. This implies losing all tasks. '
'SHOULD BE VERY HIGH')
# TODO(jyx): Flags like this are generally optional but specific executor implementations may
# require them. Consider adding validators that can be plugged in so configuration errors can be
# caught in the scheduler.
app.add_option(
'--installer_args',
dest='installer_args',
default=None,
help='Arguments for MySQL installer directly passed along to and parsed by the installer. '
'e.g., a serialized JSON string'
)
app.add_option(
'--backup_store_args',
dest='backup_store_args',
default=None,
help="Arguments for the store for MySQL backups. Its use and format are defined by the "
"backup store implementation. e.g., It can be a serialized JSON string"
)
app.add_option(
'--framework_authentication_file',
dest='framework_authentication_file',
default=None,
help="Path to the key file for authenticating the framework against Mesos master. Framework "
"will fail to register with Mesos if authentication is required by Mesos and this "
"option is not provided"
)
def main(args, options):
log.info("Options in use: %s", options)
if not options.api_port:
app.error('Must specify --port')
if not options.mesos_master:
app.error('Must specify --mesos_master')
if not options.framework_user:
app.error('Must specify --framework_user')
if not options.executor_uri:
app.error('Must specify --executor_uri')
if not options.executor_cmd:
app.error('Must specify --executor_cmd')
if not options.zk_url:
app.error('Must specify --zk_url')
if not options.admin_keypath:
app.error('Must specify --admin_keypath')
if not options.scheduler_keypath:
app.error('Must specify --scheduler_keypath')
try:
election_timeout = parse_time(options.election_timeout)
framework_failover_timeout = parse_time(options.framework_failover_timeout)
except InvalidTime as e:
app.error(e.message)
try:
_, zk_servers, zk_root = zookeeper.parse(options.zk_url)
except Exception as e:
app.error("Invalid --zk_url: %s" % e.message)
web_assets_dir = os.path.join(options.work_dir, "web")
pkgutil.unpack_assets(web_assets_dir, MYSOS_MODULE, ASSET_RELPATH)
log.info("Extracted web assets into %s" % options.work_dir)
fw_principal = None
fw_secret = None
if options.framework_authentication_file:
try:
with open(options.framework_authentication_file, "r") as f:
cred = yaml.load(f)
fw_principal = cred["principal"]
fw_secret = cred["secret"]
log.info("Loaded credential (principal=%s) for framework authentication" % fw_principal)
except IOError as e:
app.error("Unable to read the framework authentication key file: %s" % e)
except (KeyError, yaml.YAMLError) as e:
app.error("Invalid framework authentication key file format %s" % e)
scheduler_key = None
try:
with open(options.scheduler_keypath, 'rb') as f:
scheduler_key = f.read().strip()
if not scheduler_key:
raise ValueError("The key file is empty")
except Exception as e:
app.error("Cannot read --scheduler_keypath: %s" % e)
log.info("Starting Mysos scheduler")
kazoo = KazooClient(zk_servers)
kazoo.start()
if options.state_storage == 'zk':
log.info("Using ZooKeeper (path: %s) for state storage" % zk_root)
state_provider = ZooKeeperStateProvider(kazoo, zk_root)
else:
log.info("Using local disk for state storage")
state_provider = LocalStateProvider(options.work_dir)
try:
state = state_provider.load_scheduler_state()
except StateProvider.Error as e:
app.error(e.message)
if state:
log.info("Successfully restored scheduler state")
framework_info = state.framework_info
if framework_info.HasField('id'):
log.info("Recovered scheduler's FrameworkID is %s" % framework_info.id.value)
else:
log.info("No scheduler state to restore")
framework_info = FrameworkInfo(
user=options.framework_user,
name=FRAMEWORK_NAME,
checkpoint=True,
failover_timeout=framework_failover_timeout.as_(Time.SECONDS),
role=options.framework_role)
if fw_principal:
framework_info.principal = fw_principal
state = Scheduler(framework_info)
state_provider.dump_scheduler_state(state)
scheduler = MysosScheduler(
state,
state_provider,
options.framework_user,
options.executor_uri,
options.executor_cmd,
kazoo,
options.zk_url,
election_timeout,
options.admin_keypath,
scheduler_key,
installer_args=options.installer_args,
backup_store_args=options.backup_store_args,
executor_environ=options.executor_environ,
framework_role=options.framework_role)
if fw_principal and fw_secret:
cred = Credential(principal=fw_principal, secret=fw_secret)
scheduler_driver = mesos.native.MesosSchedulerDriver(
scheduler,
framework_info,
options.mesos_master,
cred)
else:
scheduler_driver = mesos.native.MesosSchedulerDriver(
scheduler,
framework_info,
options.mesos_master)
scheduler_driver.start()
server = HttpServer()
server.mount_routes(MysosServer(scheduler, web_assets_dir))
et = ExceptionalThread(
target=server.run, args=('0.0.0.0', options.api_port, 'cherrypy'))
et.daemon = True
et.start()
try:
# Wait for the scheduler to stop.
# The use of 'stopped' event instead of scheduler_driver.join() is necessary to stop the
# process with SIGINT.
while not scheduler.stopped.wait(timeout=0.5):
pass
except KeyboardInterrupt:
log.info('Interrupted, exiting.')
else:
log.info('Scheduler exited.')
app.shutdown(1) # Mysos scheduler is supposed to be long-running thus the use of exit status 1.
app.main()
|
{
"content_hash": "f86689751748e66cb567f61bf61ddf66",
"timestamp": "",
"source": "github",
"line_count": 314,
"max_line_length": 100,
"avg_line_length": 33.789808917197455,
"alnum_prop": 0.6602262016965127,
"repo_name": "programwithebay/ApacheMysos",
"id": "7ff6a251496ad536385e0d1e92acd74795877f6e",
"size": "10610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysos/scheduler/mysos_scheduler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1662"
},
{
"name": "Python",
"bytes": "196786"
},
{
"name": "Shell",
"bytes": "10158"
}
],
"symlink_target": ""
}
|
""" Defines the HighlightTool class.
"""
# Major library imports
from numpy import ones
# Enthought library imports
from traits.api import Enum, Float, Str
from enable.api import BaseTool
# Chaco imports
from chaco.api import BasePlotContainer
class HighlightTool(BaseTool):
""" A tool that enables the user to select a plot to be highlighted on the
graph by clicking on it.
"""
# The name of the data source metadata which controls selections.
metadata_name = Str('selections')
# The mouse button that initiates the selection.
drag_button = Enum("left", "right")
# Threshold distance for hit-testing.
threshold = Float(20.0)
#---------------------------------------------------------------------
# Inherited BaseTool traits
#---------------------------------------------------------------------
# This tool is not drawn. Overrides BaseTool.
draw_mode = "none"
# This tool is not visible. Overrides BaseTool.
visible = False
def normal_left_down(self, event):
""" Handles the left mouse button being pressed.
If the left mouse button initiates the selection, this method does so.
"""
if self.drag_button == "left":
self._highlight(event)
return
def normal_right_down(self, event):
""" Handles the right mouse button being pressed.
If the right mouse button initiates the selection, this method does so.
"""
if self.drag_button == "right":
self._highlight(event)
return
def _highlight(self, event):
if isinstance(self.component, BasePlotContainer):
event.offset_xy(self.component.x, self.component.y)
closest_plot = self._find_curve(self.component.components, event)
if closest_plot:
index = closest_plot.index
index.metadata[self.metadata_name] = ones(len(index.get_data()), dtype=bool)
closest_plot.request_redraw()
else:
# If we are attached to a plot container, then we can deselect
# all of the plots in the container
for p in self.component.components:
if self.metadata_name in p.index.metadata:
del p.index.metadata[self.metadata_name]
p.request_redraw()
event.pop()
elif hasattr(self.component, "hittest"):
hit_point = self.component.hittest((event.x, event.y), self.threshold)
index = self.component.index
if hit_point is not None:
index.metadata[self.metadata_name] = ones(len(index.get_data()), dtype=bool)
self.component.request_redraw()
elif self.metadata_name in index.metadata:
del index.metadata[self.metadata_name]
self.component.request_redraw()
event.handled = True
return
def _find_curve(self, plots, event):
# need to change to use distance - not just return first plot within threshold
for p in plots:
if hasattr(p, "hittest"):
cpoint = p.hittest((event.x,event.y), self.threshold)
if cpoint:
return p
return None
#EOF
|
{
"content_hash": "91e407613e8d6932223db5d960d66d93",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 92,
"avg_line_length": 34.375,
"alnum_prop": 0.5781818181818181,
"repo_name": "ContinuumIO/chaco",
"id": "40eee5a159edd4a2f1c3c20cff8e12835c493a40",
"size": "3300",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "chaco/tools/highlight_tool.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "57003"
},
{
"name": "C++",
"bytes": "9881"
},
{
"name": "JavaScript",
"bytes": "449791"
},
{
"name": "Python",
"bytes": "2301293"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('database', '0048_settingscontract'),
]
operations = [
migrations.RenameField(
model_name='settingsapp',
old_name='auto_hide_notes',
new_name='auto_hide',
),
]
|
{
"content_hash": "41f8b2cb092960cbae31272c749b822c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 46,
"avg_line_length": 20.61111111111111,
"alnum_prop": 0.5876010781671159,
"repo_name": "ACLARKNET/aclarknet-database",
"id": "ddb612d3d11a32d4fc552687cadf95752a70f8b3",
"size": "444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aclarknet/database/migrations/0049_auto_20170903_1807.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3681"
},
{
"name": "HTML",
"bytes": "167324"
},
{
"name": "JavaScript",
"bytes": "751469"
},
{
"name": "Makefile",
"bytes": "12395"
},
{
"name": "Python",
"bytes": "166944"
}
],
"symlink_target": ""
}
|
import logging
from werkzeug import (
BaseResponse, Request
)
from werkzeug.datastructures import Headers
from kay.utils.test import Client
from kay.app import get_application
from kay.conf import LazySettings
from kay.ext.testutils.gae_test_base import GAETestBase
from kay.utils import url_for
from kay.tests.restapp.models import RestModel
class RestJSONTestCase(GAETestBase):
KIND_NAME_UNSWAPPED = False
USE_PRODUCTION_STUBS = True
CLEANUP_USED_KIND = True
def setUp(self):
s = LazySettings(settings_module='kay.tests.rest_settings')
app = get_application(settings=s)
self.client = Client(app, BaseResponse)
self.client.test_logout()
def tearDown(self):
self.client.test_logout()
def test_rest_json(self):
headers = Headers({"Accept": "application/json"})
response = self.client.get('/rest/metadata', headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com")
response = self.client.get('/rest/metadata', headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.get('/rest/metadata', headers=headers)
self.assertEqual(response.status_code, 200)
self.client.test_logout()
response = self.client.get('/rest/metadata/RestModel', headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com")
response = self.client.get('/rest/metadata/RestModel', headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.get('/rest/metadata/RestModel', headers=headers)
self.assertEqual(response.status_code, 200)
self.client.test_logout()
response = self.client.post(
'/rest/RestModel',
data='{"RestModel": {"i_prop": 12, "s_prop": "string"}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com")
response = self.client.post(
'/rest/RestModel',
data='{"RestModel": {"i_prop": 12, "s_prop": "string"}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.post(
'/rest/RestModel',
data='{"RestModel": {"i_prop": 12, "s_prop": "string"}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 200)
key = response.data
elm = RestModel.get(key)
self.assertEqual(elm.s_prop, "string")
self.assertEqual(elm.i_prop, 12)
self.client.test_logout()
response = self.client.post(
'/rest/RestModel/%s' % key,
data='{"RestModel": {"i_prop": 14}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com")
response = self.client.post(
'/rest/RestModel/%s' % key,
data='{"RestModel": {"i_prop": 14}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.post(
'/rest/RestModel/%s' % key,
data='{"RestModel": {"i_prop": 14}}',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 200)
key2 = response.data
self.assertEqual(key, key2)
elm = RestModel.get(key)
self.assertEqual(elm.s_prop, "string")
self.assertEqual(elm.i_prop, 14)
response = self.client.post(
'/rest/RestModel',
data='[{"RestModel": {"i_prop": 1, "s_prop": "foobar1"}},{"RestModel": {"i_prop": 2, "s_prop": "foobar2"}}]',
content_type="application/json; charset=utf-8")
self.assertEqual(response.status_code, 200)
key3, key4 = response.data.split(',')
elm3 = RestModel.get(key3)
elm4 = RestModel.get(key4)
self.assertEqual(elm3.s_prop, "foobar1")
self.assertEqual(elm3.i_prop, 1)
self.assertEqual(elm4.s_prop, "foobar2")
self.assertEqual(elm4.i_prop, 2)
response = self.client.get('/rest/RestModel', headers=headers)
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s' % key, headers=headers)
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s/s_prop' % key,
headers=headers)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, "string")
response = self.client.get('/rest/RestModel/%s/i_prop' % key,
headers=headers)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, "14")
self.client.test_logout()
response = self.client.delete('/rest/RestModel/%s' % key,
headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com")
response = self.client.delete('/rest/RestModel/%s' % key,
headers=headers)
self.assertEqual(response.status_code, 403)
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.delete('/rest/RestModel/%s' % key,
headers=headers)
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s' % key,
headers=headers)
self.assertEqual(response.status_code, 404)
class RestTestCase(GAETestBase):
KIND_NAME_UNSWAPPED = False
USE_PRODUCTION_STUBS = True
CLEANUP_USED_KIND = True
def setUp(self):
s = LazySettings(settings_module='kay.tests.rest_settings')
app = get_application(settings=s)
self.client = Client(app, BaseResponse)
self.client.test_logout()
def tearDown(self):
self.client.test_logout()
def test_rest_operations(self):
self.client.test_login(email="test@example.com", is_admin="1")
response = self.client.get('/rest/metadata')
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/metadata/RestModel')
self.assertEqual(response.status_code, 200)
response = self.client.post('/rest/RestModel', data='<?xml version="1.0" encoding="utf-8"?><RestModel><i_prop>12</i_prop><s_prop>string</s_prop></RestModel>')
self.assertEqual(response.status_code, 200)
key = response.data
elm = RestModel.get(key)
self.assertEqual(elm.s_prop, "string")
self.assertEqual(elm.i_prop, 12)
response = self.client.post(
'/rest/RestModel/%s' % key,
data='<?xml version="1.0" encoding="utf-8"?><RestModel><i_prop>14</i_prop></RestModel>')
self.assertEqual(response.status_code, 200)
key2 = response.data
self.assertEqual(key, key2)
elm = RestModel.get(key)
self.assertEqual(elm.s_prop, "string")
self.assertEqual(elm.i_prop, 14)
response = self.client.get('/rest/RestModel')
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s' % key)
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s/s_prop' % key)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, "string")
response = self.client.get('/rest/RestModel/%s/i_prop' % key)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, "14")
response = self.client.delete('/rest/RestModel/%s' % key)
self.assertEqual(response.status_code, 200)
response = self.client.get('/rest/RestModel/%s' % key)
self.assertEqual(response.status_code, 404)
|
{
"content_hash": "afe321d41131dddad9e58541110f9554",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 162,
"avg_line_length": 35.89497716894977,
"alnum_prop": 0.666073018699911,
"repo_name": "yosukesuzuki/let-me-notify",
"id": "2b10369499ff65a9c8927551fc525e115844ab2f",
"size": "7862",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "project/kay/tests/rest_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1704"
},
{
"name": "HTML",
"bytes": "34400"
},
{
"name": "Python",
"bytes": "764480"
}
],
"symlink_target": ""
}
|
import copy
from nova.api.openstack.compute.schemas import block_device_mapping_v1
from nova.api.openstack.compute.schemas import server_tags
from nova.api.validation import parameter_types
from nova.objects import fields
block_device_mapping_new_item = {
# defined in nova/block_device.py:from_api()
# NOTE: Client can specify the Id with the combination of
# source_type and uuid, or a single attribute like volume_id/
# image_id/snapshot_id.
'source_type': {
'type': 'string',
'enum': ['volume', 'image', 'snapshot', 'blank'],
},
'uuid': {
'type': 'string', 'minLength': 1, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9._-]*$',
},
'image_id': parameter_types.image_id,
# Defined as varchar(255) in column "destination_type" in table
# "block_device_mapping"
'destination_type': {
'type': 'string',
'enum': fields.BlockDeviceDestinationType.ALL,
},
# Defined as varchar(255) in column "guest_format" in table
# "block_device_mapping"
'guest_format': {
'type': 'string', 'maxLength': 255,
},
# Defined as varchar(255) in column "device_type" in table
# "block_device_mapping"
'device_type': {
'type': 'string', 'maxLength': 255,
},
# Defined as varchar(255) in column "disk_bus" in table
# "block_device_mapping"
'disk_bus': {
'type': 'string', 'maxLength': 255,
},
# Defined as integer in nova/block_device.py:from_api()
'boot_index': {
'type': ['integer', 'string'],
'pattern': '^-?[0-9]+$',
},
}
block_device_mapping = copy.deepcopy(
block_device_mapping_v1.legacy_block_device_mapping)
block_device_mapping['properties'].update(block_device_mapping_new_item)
server_create = {
'block_device_mapping_v2': {
'type': 'array',
'items': [block_device_mapping]
}
}
block_device_mapping_v232_new_item = {
'tag': server_tags.tag
}
block_device_mapping_v232 = copy.deepcopy(block_device_mapping)
block_device_mapping_v232['properties'].update(
block_device_mapping_v232_new_item)
server_create_v232 = {
'block_device_mapping_v2': {
'type': 'array',
'items': [block_device_mapping_v232]
}
}
|
{
"content_hash": "ecdb2626ea02d41a1690641504bf97d8",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 76,
"avg_line_length": 30.266666666666666,
"alnum_prop": 0.6162995594713656,
"repo_name": "sebrandon1/nova",
"id": "81097f936f603a83fd6ac288aeb385f71c51d304",
"size": "2901",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/schemas/block_device_mapping.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "18265203"
},
{
"name": "Shell",
"bytes": "37074"
},
{
"name": "Smarty",
"bytes": "299657"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import pytest
from sentry.ownership.grammar import Rule, Matcher, Owner, parse_rules, dump_schema, load_schema
fixture_data = """
# cool stuff comment
*.js #frontend m@ROBENOLT.com
# good comment
url:http://google.com/* #backend
path:src/sentry/* david@sentry.io
tags.foo:bar tagperson@sentry.io
tags.foo:"bar baz" tagperson@sentry.io
"""
def test_parse_rules():
assert parse_rules(fixture_data) == [
Rule(Matcher("path", "*.js"), [Owner("team", "frontend"), Owner("user", "m@robenolt.com")]),
Rule(Matcher("url", "http://google.com/*"), [Owner("team", "backend")]),
Rule(Matcher("path", "src/sentry/*"), [Owner("user", "david@sentry.io")]),
Rule(Matcher("tags.foo", "bar"), [Owner("user", "tagperson@sentry.io")]),
Rule(Matcher("tags.foo", "bar baz"), [Owner("user", "tagperson@sentry.io")]),
]
def test_dump_schema():
assert dump_schema([Rule(Matcher("path", "*.js"), [Owner("team", "frontend")])]) == {
"$version": 1,
"rules": [
{
"matcher": {"type": "path", "pattern": "*.js"},
"owners": [{"type": "team", "identifier": "frontend"}],
}
],
}
def test_load_schema():
assert load_schema(
{
"$version": 1,
"rules": [
{
"matcher": {"type": "path", "pattern": "*.js"},
"owners": [{"type": "team", "identifier": "frontend"}],
}
],
}
) == [Rule(Matcher("path", "*.js"), [Owner("team", "frontend")])]
def test_matcher_test_url():
data = {"request": {"url": "http://example.com/foo.js"}}
assert Matcher("url", "*.js").test(data)
assert Matcher("url", "http://*.com/foo.js").test(data)
assert not Matcher("url", "*.py").test(data)
assert not Matcher("url", "*.jsx").test(data)
assert not Matcher("path", "*.js").test(data)
assert not Matcher("url", "*.js").test({})
def test_matcher_test_none():
data = {"request": {"url": None}}
assert not Matcher("url", "").test(data)
def test_matcher_test_exception():
data = {
"exception": {
"values": [
{
"stacktrace": {
"frames": [
{"filename": "foo/file.py"},
{"abs_path": "/usr/local/src/other/app.py"},
]
}
}
]
}
}
assert Matcher("path", "*.py").test(data)
assert Matcher("path", "foo/*.py").test(data)
assert Matcher("path", "/usr/local/src/*/app.py").test(data)
assert not Matcher("path", "*.js").test(data)
assert not Matcher("path", "*.jsx").test(data)
assert not Matcher("url", "*.py").test(data)
assert not Matcher("path", "*.py").test({})
def test_matcher_test_stacktrace():
data = {
"stacktrace": {
"frames": [{"filename": "foo/file.py"}, {"abs_path": "/usr/local/src/other/app.py"}]
}
}
assert Matcher("path", "*.py").test(data)
assert Matcher("path", "foo/*.py").test(data)
assert Matcher("path", "/usr/local/src/*/app.py").test(data)
assert not Matcher("path", "*.js").test(data)
assert not Matcher("path", "*.jsx").test(data)
assert not Matcher("url", "*.py").test(data)
assert not Matcher("path", "*.py").test({})
def test_matcher_test_tags():
data = {
"tags": [["foo", "foo_value"], ["bar", "barval"]],
}
assert Matcher("tags.foo", "foo_value").test(data)
assert Matcher("tags.bar", "barval").test(data)
assert not Matcher("tags.barz", "barval").test(data)
@pytest.mark.parametrize("data", [{}, {"tags": None}, {"tags": [None]}])
def test_matcher_test_tags_without_tag_data(data):
assert not Matcher("tags.foo", "foo_value").test(data)
assert not Matcher("tags.bar", "barval").test(data)
|
{
"content_hash": "7afe20e1f65fc77536f7f9b21fbb175b",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 100,
"avg_line_length": 31.543307086614174,
"alnum_prop": 0.5199700449326011,
"repo_name": "beeftornado/sentry",
"id": "4d1467483d460cc0963fde8dea6ce46dfcde3856",
"size": "4006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/ownership/test_grammar.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
}
|
"""Tests the output methods."""
import json
from monitorstack.common import formatters
SAMPLE_RESULT = {
'exit_code': 0,
'message': 'uptime is ok',
'measurement_name': 'system_uptime',
'meta': {
'platform': 'example_platform',
},
'variables': {
'uptime': '29587.75'
}
}
SAMPLE_RESULT_ERROR = {
'exit_code': 1,
'message': 'uptime failed',
'measurement_name': 'system_uptime',
'meta': {},
'variables': {}
}
SAMPLE_RESULT_MEASUREMENT_TYPE = {
'exit_code': 0,
'message': 'uptime is ok',
'measurement_name': 'system_uptime',
'measurement_type': 'testType',
'meta': {
'platform': 'example_platform',
},
'variables': {
'uptime': '29587.75'
}
}
SAMPLE_RESULT_MEASUREMENT_UNITS = {
'exit_code': 0,
'message': 'uptime is ok',
'measurement_name': 'system_uptime',
'measurement_units': 'testUnits',
'meta': {
'platform': 'example_platform',
},
'variables': {
'uptime': '29587.75'
}
}
SAMPLE_RESULT_NO_META = {
'exit_code': 0,
'message': 'uptime is ok',
'measurement_name': 'system_uptime',
'variables': {
'uptime': '29587.75'
}
}
SAMPLE_RESULT_NO_META_WITH_FLOAT = {
'exit_code': 0,
'message': 'uptime is ok',
'measurement_name': 'system_uptime',
'variables': {
'uptime': float(29587.75)
}
}
class TestFormatters(object):
"""Tests for the base cli module."""
def test_current_time(self):
"""Test current_time()."""
result = formatters._current_time()
assert isinstance(result, int)
assert result > 0
def test__get_value_types_int32(self):
"""Test _get_value_types() with int."""
value, m_type = formatters._get_value_types(1)
assert value == 1
assert m_type == 'int32'
def test__get_value_types_int32_str(self):
"""Test _get_value_types() with int."""
value, m_type = formatters._get_value_types('1')
assert value == 1
assert m_type == 'int32'
def test__get_value_types_int64(self):
"""Test _get_value_types() with int."""
value, m_type = formatters._get_value_types(9999999999)
assert value == 9999999999
assert m_type == 'int64'
def test__get_value_types_int64_str(self):
"""Test _get_value_types() with int."""
value, m_type = formatters._get_value_types('9999999999')
assert value == 9999999999
assert m_type == 'int64'
def test__get_value_types_float(self):
"""Test _get_value_types() with float."""
value, m_type = formatters._get_value_types(1.1)
assert value == 1.1
assert m_type == 'float'
def test__get_value_types_float_str(self):
"""Test _get_value_types() with float."""
value, m_type = formatters._get_value_types('1.1')
assert value == 1.1
assert m_type == 'float'
def test__get_value_types_set_m_type(self):
"""Test _get_value_types() with float."""
value, m_type = formatters._get_value_types('1.1', 'double')
assert value == 1.1
assert m_type == 'double'
def test__get_value_types_string(self):
"""Test _get_value_types() with str."""
value, m_type = formatters._get_value_types('TestString')
assert value == 'TestString'
assert m_type == 'string'
def test_write_json(self, capsys):
"""Test write_json() module."""
formatters.write_json(SAMPLE_RESULT)
out, err = capsys.readouterr()
result_json = json.loads(out)
assert isinstance(result_json, dict)
assert result_json['measurement_name'] == \
SAMPLE_RESULT['measurement_name']
def test_write_line(self, capsys):
"""Test write_line() module."""
formatters.write_line(SAMPLE_RESULT)
out, err = capsys.readouterr()
assert out == "uptime {}\n".format(
SAMPLE_RESULT['variables']['uptime']
)
def test_write_telegraf(self, capsys):
"""Test write_telegraf() module."""
formatters.write_telegraf(SAMPLE_RESULT)
out, err = capsys.readouterr()
assert out.startswith(SAMPLE_RESULT['measurement_name'])
def test_write_telegraf_without_meta(self, capsys):
"""Test write_telegrat() module without meta in result."""
formatters.write_telegraf(SAMPLE_RESULT_NO_META)
out, err = capsys.readouterr()
assert out.startswith(SAMPLE_RESULT['measurement_name'])
def test_write_telegraf_line_format_with_float(self):
"""Test _telegraf_line_format() with float in meta."""
sets = {
'platform': 'example_platform',
'othervar': float(3)
}
result = formatters._telegraf_line_format(sets=sets, quote=True)
assert isinstance(result, str)
assert 'othervar=3' in result
assert 'platform="example_platform"' in result
def test_write_rax_maas(self, capsys):
"""Test write_telegraf() module."""
formatters.write_rax_maas(SAMPLE_RESULT)
out, err = capsys.readouterr()
assert SAMPLE_RESULT['message'] in out
assert 'metric uptime float 29587.75' in out
def test_write_rax_maas_with_types(self, capsys):
"""Test write_telegraf() module."""
formatters.write_rax_maas(SAMPLE_RESULT_MEASUREMENT_TYPE)
out, err = capsys.readouterr()
assert SAMPLE_RESULT['message'] in out
assert 'metric uptime testType 29587.75' in out
def test_write_rax_maas_with_units(self, capsys):
"""Test write_telegraf() module."""
formatters.write_rax_maas(SAMPLE_RESULT_MEASUREMENT_UNITS)
out, err = capsys.readouterr()
out_split = out.splitlines()
assert [i for i in out_split if SAMPLE_RESULT['message'] in i]
assert 'metric uptime float 29587.75 testUnits' in out_split
def test_write_rax_maas_with_error(self, capsys):
"""Test write_telegraf() module."""
formatters.write_rax_maas(SAMPLE_RESULT_ERROR)
out, err = capsys.readouterr()
out_split = out.splitlines()
assert [i for i in out_split if 'status error' in i]
|
{
"content_hash": "c73d2528e4a951f58e703416eae1d237",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 72,
"avg_line_length": 32.00512820512821,
"alnum_prop": 0.591892324947925,
"repo_name": "major/monitorstack",
"id": "3cdf78570d5477b64745059311b9b5a59e8963e0",
"size": "6835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_formatters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "98887"
}
],
"symlink_target": ""
}
|
"""
Test lldb Python API object's default constructor and make sure it is invalid
after initial construction.
There are also some cases of boundary condition testings sprinkled throughout
the tests where None is passed to SB API which expects (const char *) in the
C++ API counterpart. Passing None should not crash lldb!
There are three exceptions to the above general rules, though; API objects
SBCommadnReturnObject, SBStream, and SBSymbolContextList, are all valid objects
after default construction.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class APIDefaultConstructorTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBAddress(self):
obj = lldb.SBAddress()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_address
sb_address.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBBlock(self):
obj = lldb.SBBlock()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_block
sb_block.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBBreakpoint(self):
obj = lldb.SBBreakpoint()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_breakpoint
sb_breakpoint.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBBreakpointLocation(self):
obj = lldb.SBBreakpointLocation()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_breakpointlocation
sb_breakpointlocation.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBBreakpointName(self):
obj = lldb.SBBreakpointName()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_breakpointname
sb_breakpointname.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBBroadcaster(self):
obj = lldb.SBBroadcaster()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_broadcaster
sb_broadcaster.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBCommandReturnObject(self):
"""SBCommandReturnObject object is valid after default construction."""
obj = lldb.SBCommandReturnObject()
if self.TraceOn():
print(obj)
self.assertTrue(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBCommunication(self):
obj = lldb.SBCommunication()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_communication
sb_communication.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBCompileUnit(self):
obj = lldb.SBCompileUnit()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_compileunit
sb_compileunit.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBDebugger(self):
obj = lldb.SBDebugger()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_debugger
sb_debugger.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
# darwin: This test passes with swig 3.0.2, fails w/3.0.5 other tests fail
# with 2.0.12 http://llvm.org/pr23488
def test_SBError(self):
obj = lldb.SBError()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_error
sb_error.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBEvent(self):
obj = lldb.SBEvent()
# This is just to test that typemap, as defined in lldb.swig, works.
obj2 = lldb.SBEvent(0, "abc")
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_event
sb_event.fuzz_obj(obj)
@add_test_categories(['pyapi'])
def test_SBFileSpec(self):
obj = lldb.SBFileSpec()
# This is just to test that FileSpec(None) does not crash.
obj2 = lldb.SBFileSpec(None, True)
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_filespec
sb_filespec.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBFrame(self):
obj = lldb.SBFrame()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_frame
sb_frame.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBFunction(self):
obj = lldb.SBFunction()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_function
sb_function.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBInstruction(self):
obj = lldb.SBInstruction()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_instruction
sb_instruction.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBInstructionList(self):
obj = lldb.SBInstructionList()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_instructionlist
sb_instructionlist.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBLineEntry(self):
obj = lldb.SBLineEntry()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_lineentry
sb_lineentry.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBListener(self):
obj = lldb.SBListener()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_listener
sb_listener.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
# Py3 asserts due to a bug in SWIG. Trying to upstream a patch to fix
# this in 3.0.8
@skipIf(py_version=['>=', (3, 0)], swig_version=['<', (3, 0, 8)])
def test_SBModule(self):
obj = lldb.SBModule()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_module
sb_module.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBProcess(self):
obj = lldb.SBProcess()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_process
sb_process.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBProcessInfo(self):
obj = lldb.SBProcessInfo()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_process_info
sb_process_info.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBSection(self):
obj = lldb.SBSection()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_section
sb_section.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBStream(self):
"""SBStream object is valid after default construction."""
obj = lldb.SBStream()
if self.TraceOn():
print(obj)
self.assertTrue(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBStringList(self):
obj = lldb.SBStringList()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_stringlist
sb_stringlist.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBSymbol(self):
obj = lldb.SBSymbol()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_symbol
sb_symbol.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBSymbolContext(self):
obj = lldb.SBSymbolContext()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_symbolcontext
sb_symbolcontext.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBSymbolContextList(self):
"""SBSymbolContextList object is valid after default construction."""
obj = lldb.SBSymbolContextList()
if self.TraceOn():
print(obj)
self.assertTrue(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBTarget(self):
obj = lldb.SBTarget()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_target
sb_target.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBThread(self):
obj = lldb.SBThread()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_thread
sb_thread.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBType(self):
try:
obj = lldb.SBType()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# If we reach here, the test fails.
self.fail("lldb.SBType() should fail, not succeed!")
except:
# Exception is expected.
return
# Unreachable code because lldb.SBType() should fail.
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_type
sb_type.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBTypeList(self):
"""SBTypeList object is valid after default construction."""
obj = lldb.SBTypeList()
if self.TraceOn():
print(obj)
self.assertTrue(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBValue(self):
obj = lldb.SBValue()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_value
sb_value.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBValueList(self):
obj = lldb.SBValueList()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_valuelist
sb_valuelist.fuzz_obj(obj)
@add_test_categories(['pyapi'])
@no_debug_info_test
def test_SBWatchpoint(self):
obj = lldb.SBWatchpoint()
if self.TraceOn():
print(obj)
self.assertFalse(obj)
# Do fuzz testing on the invalid obj, it should not crash lldb.
import sb_watchpoint
sb_watchpoint.fuzz_obj(obj)
|
{
"content_hash": "6fb26377ed1e8050ed19e2966fd486e0",
"timestamp": "",
"source": "github",
"line_count": 418,
"max_line_length": 79,
"avg_line_length": 31.629186602870814,
"alnum_prop": 0.6032826563800016,
"repo_name": "apple/swift-lldb",
"id": "e48b90e2ee940db76c45496e5d32dcfc635e89e3",
"size": "13221",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "packages/Python/lldbsuite/test/python_api/default-constructor/TestDefaultConstructorForAPIObjects.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "130449"
},
{
"name": "C",
"bytes": "198536"
},
{
"name": "C++",
"bytes": "27687071"
},
{
"name": "CMake",
"bytes": "172176"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "106804"
},
{
"name": "Objective-C",
"bytes": "106821"
},
{
"name": "Objective-C++",
"bytes": "25658"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "4680483"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Swift",
"bytes": "260786"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
"""
Volume driver library for NetApp C-mode block storage systems.
"""
from oslo_log import log as logging
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _
from cinder.objects import fields
from cinder import utils
from cinder.volume.drivers.netapp.dataontap import block_base
from cinder.volume.drivers.netapp.dataontap.performance import perf_cmode
from cinder.volume.drivers.netapp.dataontap.utils import capabilities
from cinder.volume.drivers.netapp.dataontap.utils import data_motion
from cinder.volume.drivers.netapp.dataontap.utils import loopingcalls
from cinder.volume.drivers.netapp.dataontap.utils import utils as dot_utils
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
@six.add_metaclass(utils.TraceWrapperMetaclass)
class NetAppBlockStorageCmodeLibrary(block_base.NetAppBlockStorageLibrary,
data_motion.DataMotionMixin):
"""NetApp block storage library for Data ONTAP (Cluster-mode)."""
REQUIRED_CMODE_FLAGS = ['netapp_vserver']
def __init__(self, driver_name, driver_protocol, **kwargs):
super(NetAppBlockStorageCmodeLibrary, self).__init__(driver_name,
driver_protocol,
**kwargs)
self.configuration.append_config_values(na_opts.netapp_cluster_opts)
self.driver_mode = 'cluster'
self.failed_over_backend_name = kwargs.get('active_backend_id')
self.failed_over = self.failed_over_backend_name is not None
self.replication_enabled = (
True if self.get_replication_backend_names(
self.configuration) else False)
def do_setup(self, context):
super(NetAppBlockStorageCmodeLibrary, self).do_setup(context)
na_utils.check_flags(self.REQUIRED_CMODE_FLAGS, self.configuration)
# cDOT API client
self.zapi_client = dot_utils.get_client_for_backend(
self.failed_over_backend_name or self.backend_name)
self.vserver = self.zapi_client.vserver
self.using_cluster_credentials = \
self.zapi_client.check_for_cluster_credentials()
# Performance monitoring library
self.perf_library = perf_cmode.PerformanceCmodeLibrary(
self.zapi_client)
# Storage service catalog
self.ssc_library = capabilities.CapabilitiesLibrary(
self.driver_protocol, self.vserver, self.zapi_client,
self.configuration)
def _update_zapi_client(self, backend_name):
"""Set cDOT API client for the specified config backend stanza name."""
self.zapi_client = dot_utils.get_client_for_backend(backend_name)
self.vserver = self.zapi_client.vserver
self.ssc_library._update_for_failover(self.zapi_client,
self._get_flexvol_to_pool_map())
ssc = self.ssc_library.get_ssc()
self.perf_library._update_for_failover(self.zapi_client, ssc)
# Clear LUN table cache
self.lun_table = {}
def check_for_setup_error(self):
"""Check that the driver is working and can communicate."""
self.ssc_library.check_api_permissions()
if not self._get_flexvol_to_pool_map():
msg = _('No pools are available for provisioning volumes. '
'Ensure that the configuration option '
'netapp_pool_name_search_pattern is set correctly.')
raise exception.NetAppDriverException(msg)
self._add_looping_tasks()
super(NetAppBlockStorageCmodeLibrary, self).check_for_setup_error()
def _add_looping_tasks(self):
"""Add tasks that need to be executed at a fixed interval."""
# Note(cknight): Run the update once in the current thread to prevent a
# race with the first invocation of _update_volume_stats.
self._update_ssc()
# Add the task that updates the slow-changing storage service catalog
self.loopingcalls.add_task(self._update_ssc,
loopingcalls.ONE_HOUR,
loopingcalls.ONE_HOUR)
self.loopingcalls.add_task(
self._handle_housekeeping_tasks,
loopingcalls.TEN_MINUTES,
0)
super(NetAppBlockStorageCmodeLibrary, self)._add_looping_tasks()
def _handle_housekeeping_tasks(self):
"""Handle various cleanup activities."""
# Harvest soft-deleted QoS policy groups
self.zapi_client.remove_unused_qos_policy_groups()
active_backend = self.failed_over_backend_name or self.backend_name
LOG.debug("Current service state: Replication enabled: %("
"replication)s. Failed-Over: %(failed)s. Active Backend "
"ID: %(active)s",
{
'replication': self.replication_enabled,
'failed': self.failed_over,
'active': active_backend,
})
# Create pool mirrors if whole-backend replication configured
if self.replication_enabled and not self.failed_over:
self.ensure_snapmirrors(
self.configuration, self.backend_name,
self.ssc_library.get_ssc_flexvol_names())
def _handle_ems_logging(self):
"""Log autosupport messages."""
base_ems_message = dot_utils.build_ems_log_message_0(
self.driver_name, self.app_version, self.driver_mode)
self.zapi_client.send_ems_log_message(base_ems_message)
pool_ems_message = dot_utils.build_ems_log_message_1(
self.driver_name, self.app_version, self.vserver,
self.ssc_library.get_ssc_flexvol_names(), [])
self.zapi_client.send_ems_log_message(pool_ems_message)
def _create_lun(self, volume_name, lun_name, size,
metadata, qos_policy_group_name=None):
"""Creates a LUN, handling Data ONTAP differences as needed."""
self.zapi_client.create_lun(
volume_name, lun_name, size, metadata, qos_policy_group_name)
def _create_lun_handle(self, metadata, vserver=None):
"""Returns LUN handle based on filer type."""
vserver = vserver or self.vserver
return '%s:%s' % (self.vserver, metadata['Path'])
def _find_mapped_lun_igroup(self, path, initiator_list):
"""Find an igroup for a LUN mapped to the given initiator(s)."""
initiator_igroups = self.zapi_client.get_igroup_by_initiators(
initiator_list)
lun_maps = self.zapi_client.get_lun_map(path)
if initiator_igroups and lun_maps:
for igroup in initiator_igroups:
igroup_name = igroup['initiator-group-name']
if igroup_name.startswith(na_utils.OPENSTACK_PREFIX):
for lun_map in lun_maps:
if lun_map['initiator-group'] == igroup_name:
return igroup_name, lun_map['lun-id']
return None, None
def _clone_lun(self, name, new_name, space_reserved=None,
qos_policy_group_name=None, src_block=0, dest_block=0,
block_count=0, source_snapshot=None, is_snapshot=False):
"""Clone LUN with the given handle to the new name."""
if not space_reserved:
space_reserved = self.lun_space_reservation
metadata = self._get_lun_attr(name, 'metadata')
volume = metadata['Volume']
self.zapi_client.clone_lun(volume, name, new_name, space_reserved,
qos_policy_group_name=qos_policy_group_name,
src_block=src_block, dest_block=dest_block,
block_count=block_count,
source_snapshot=source_snapshot,
is_snapshot=is_snapshot)
LOG.debug("Cloned LUN with new name %s", new_name)
lun = self.zapi_client.get_lun_by_args(vserver=self.vserver,
path='/vol/%s/%s'
% (volume, new_name))
if len(lun) == 0:
msg = _("No cloned LUN named %s found on the filer")
raise exception.VolumeBackendAPIException(data=msg % new_name)
clone_meta = self._create_lun_meta(lun[0])
self._add_lun_to_table(
block_base.NetAppLun('%s:%s' % (clone_meta['Vserver'],
clone_meta['Path']),
new_name,
lun[0].get_child_content('size'),
clone_meta))
def _create_lun_meta(self, lun):
"""Creates LUN metadata dictionary."""
self.zapi_client.check_is_naelement(lun)
meta_dict = {}
meta_dict['Vserver'] = lun.get_child_content('vserver')
meta_dict['Volume'] = lun.get_child_content('volume')
meta_dict['Qtree'] = lun.get_child_content('qtree')
meta_dict['Path'] = lun.get_child_content('path')
meta_dict['OsType'] = lun.get_child_content('multiprotocol-type')
meta_dict['SpaceReserved'] = \
lun.get_child_content('is-space-reservation-enabled')
meta_dict['UUID'] = lun.get_child_content('uuid')
return meta_dict
def _get_fc_target_wwpns(self, include_partner=True):
return self.zapi_client.get_fc_target_wwpns()
def _update_volume_stats(self, filter_function=None,
goodness_function=None):
"""Retrieve backend stats."""
LOG.debug('Updating volume stats')
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.driver_name
data['vendor_name'] = 'NetApp'
data['driver_version'] = self.VERSION
data['storage_protocol'] = self.driver_protocol
data['pools'] = self._get_pool_stats(
filter_function=filter_function,
goodness_function=goodness_function)
data['sparse_copy_volume'] = True
# Used for service state report
data['replication_enabled'] = self.replication_enabled
self._stats = data
def _get_pool_stats(self, filter_function=None, goodness_function=None):
"""Retrieve pool (Data ONTAP flexvol) stats.
Pool statistics are assembled from static driver capabilities, the
Storage Service Catalog of flexvol attributes, and real-time capacity
and controller utilization metrics. The pool name is the flexvol name.
"""
pools = []
ssc = self.ssc_library.get_ssc()
if not ssc:
return pools
# Utilization and performance metrics require cluster-scoped
# credentials
if self.using_cluster_credentials:
# Get up-to-date node utilization metrics just once
self.perf_library.update_performance_cache(ssc)
# Get up-to-date aggregate capacities just once
aggregates = self.ssc_library.get_ssc_aggregates()
aggr_capacities = self.zapi_client.get_aggregate_capacities(
aggregates)
else:
aggr_capacities = {}
for ssc_vol_name, ssc_vol_info in ssc.items():
pool = dict()
# Add storage service catalog data
pool.update(ssc_vol_info)
# Add driver capabilities and config info
pool['QoS_support'] = True
pool['multiattach'] = False
pool['consistencygroup_support'] = True
pool['consistent_group_snapshot_enabled'] = True
pool['reserved_percentage'] = self.reserved_percentage
pool['max_over_subscription_ratio'] = (
self.max_over_subscription_ratio)
# Add up-to-date capacity info
capacity = self.zapi_client.get_flexvol_capacity(
flexvol_name=ssc_vol_name)
size_total_gb = capacity['size-total'] / units.Gi
pool['total_capacity_gb'] = na_utils.round_down(size_total_gb)
size_available_gb = capacity['size-available'] / units.Gi
pool['free_capacity_gb'] = na_utils.round_down(size_available_gb)
pool['provisioned_capacity_gb'] = round(
pool['total_capacity_gb'] - pool['free_capacity_gb'], 2)
if self.using_cluster_credentials:
dedupe_used = self.zapi_client.get_flexvol_dedupe_used_percent(
ssc_vol_name)
else:
dedupe_used = 0.0
pool['netapp_dedupe_used_percent'] = na_utils.round_down(
dedupe_used)
aggregate_name = ssc_vol_info.get('netapp_aggregate')
aggr_capacity = aggr_capacities.get(aggregate_name, {})
pool['netapp_aggregate_used_percent'] = aggr_capacity.get(
'percent-used', 0)
# Add utilization data
utilization = self.perf_library.get_node_utilization_for_pool(
ssc_vol_name)
pool['utilization'] = na_utils.round_down(utilization)
pool['filter_function'] = filter_function
pool['goodness_function'] = goodness_function
# Add replication capabilities/stats
pool.update(
self.get_replication_backend_stats(self.configuration))
pools.append(pool)
return pools
def _update_ssc(self):
"""Refresh the storage service catalog with the latest set of pools."""
self.ssc_library.update_ssc(self._get_flexvol_to_pool_map())
def _get_flexvol_to_pool_map(self):
"""Get the flexvols that match the pool name search pattern.
The map is of the format suitable for seeding the storage service
catalog: {<flexvol_name> : {'pool_name': <flexvol_name>}}
"""
pool_regex = na_utils.get_pool_name_filter_regex(self.configuration)
pools = {}
flexvol_names = self.zapi_client.list_flexvols()
for flexvol_name in flexvol_names:
msg_args = {
'flexvol': flexvol_name,
'vol_pattern': pool_regex.pattern,
}
if pool_regex.match(flexvol_name):
msg = "Volume '%(flexvol)s' matches %(vol_pattern)s"
LOG.debug(msg, msg_args)
pools[flexvol_name] = {'pool_name': flexvol_name}
else:
msg = "Volume '%(flexvol)s' does not match %(vol_pattern)s"
LOG.debug(msg, msg_args)
return pools
def delete_volume(self, volume):
"""Driver entry point for destroying existing volumes."""
super(NetAppBlockStorageCmodeLibrary, self).delete_volume(volume)
try:
qos_policy_group_info = na_utils.get_valid_qos_policy_group_info(
volume)
except exception.Invalid:
# Delete even if there was invalid qos policy specified for the
# volume.
qos_policy_group_info = None
self._mark_qos_policy_group_for_deletion(qos_policy_group_info)
msg = 'Deleted LUN with name %(name)s and QoS info %(qos)s'
LOG.debug(msg, {'name': volume['name'], 'qos': qos_policy_group_info})
def _get_preferred_target_from_list(self, target_details_list,
filter=None):
# cDOT iSCSI LIFs do not migrate from controller to controller
# in failover. Rather, an iSCSI LIF must be configured on each
# controller and the initiator has to take responsibility for
# using a LIF that is UP. In failover, the iSCSI LIF on the
# downed controller goes DOWN until the controller comes back up.
#
# Currently Nova only accepts a single target when obtaining
# target details from Cinder, so we pass back the first portal
# with an UP iSCSI LIF. There are plans to have Nova accept
# and try multiple targets. When that happens, we can and should
# remove this filter and return all targets since their operational
# state could change between the time we test here and the time
# Nova uses the target.
operational_addresses = (
self.zapi_client.get_operational_lif_addresses())
return (super(NetAppBlockStorageCmodeLibrary, self)
._get_preferred_target_from_list(target_details_list,
filter=operational_addresses))
def _setup_qos_for_volume(self, volume, extra_specs):
try:
qos_policy_group_info = na_utils.get_valid_qos_policy_group_info(
volume, extra_specs)
except exception.Invalid:
msg = _('Invalid QoS specification detected while getting QoS '
'policy for volume %s') % volume['id']
raise exception.VolumeBackendAPIException(data=msg)
self.zapi_client.provision_qos_policy_group(qos_policy_group_info)
return qos_policy_group_info
def _get_volume_model_update(self, volume):
"""Provide any updates necessary for a volume being created/managed."""
if self.replication_enabled:
return {'replication_status': fields.ReplicationStatus.ENABLED}
def _mark_qos_policy_group_for_deletion(self, qos_policy_group_info):
self.zapi_client.mark_qos_policy_group_for_deletion(
qos_policy_group_info)
def unmanage(self, volume):
"""Removes the specified volume from Cinder management.
Does not delete the underlying backend storage object.
"""
try:
qos_policy_group_info = na_utils.get_valid_qos_policy_group_info(
volume)
except exception.Invalid:
# Unmanage even if there was invalid qos policy specified for the
# volume.
qos_policy_group_info = None
self._mark_qos_policy_group_for_deletion(qos_policy_group_info)
super(NetAppBlockStorageCmodeLibrary, self).unmanage(volume)
def failover_host(self, context, volumes, secondary_id=None, groups=None):
"""Failover a backend to a secondary replication target."""
return self._failover_host(volumes, secondary_id=secondary_id)
def _get_backing_flexvol_names(self):
"""Returns a list of backing flexvol names."""
return self.ssc_library.get_ssc().keys()
def create_group(self, group):
"""Driver entry point for creating a generic volume group.
ONTAP does not maintain an actual Group construct. As a result, no
communication to the backend is necessary for generic volume group
creation.
:returns: Hard-coded model update for generic volume group model.
"""
model_update = {'status': fields.GroupStatus.AVAILABLE}
return model_update
def delete_group(self, group, volumes):
"""Driver entry point for deleting a group.
:returns: Updated group model and list of volume models
for the volumes that were deleted.
"""
model_update = {'status': fields.GroupStatus.DELETED}
volumes_model_update = []
for volume in volumes:
try:
self._delete_lun(volume['name'])
volumes_model_update.append(
{'id': volume['id'], 'status': 'deleted'})
except Exception:
volumes_model_update.append(
{'id': volume['id'],
'status': 'error_deleting'})
LOG.exception("Volume %(vol)s in the group could not be "
"deleted.", {'vol': volume})
return model_update, volumes_model_update
def update_group(self, group, add_volumes=None, remove_volumes=None):
"""Driver entry point for updating a generic volume group.
Since no actual group construct is ever created in ONTAP, it is not
necessary to update any metadata on the backend. Since this is a NO-OP,
there is guaranteed to be no change in any of the volumes' statuses.
"""
return None, None, None
def create_group_snapshot(self, group_snapshot, snapshots):
"""Creates a Cinder group snapshot object.
The Cinder group snapshot object is created by making use of an
ephemeral ONTAP consistency group snapshot in order to provide
write-order consistency for a set of flexvol snapshots. First, a list
of the flexvols backing the given Cinder group must be gathered. An
ONTAP group-snapshot of these flexvols will create a snapshot copy of
all the Cinder volumes in the generic volume group. For each Cinder
volume in the group, it is then necessary to clone its backing LUN from
the ONTAP cg-snapshot. The naming convention used for the clones is
what indicates the clone's role as a Cinder snapshot and its inclusion
in a Cinder group. The ONTAP cg-snapshot of the flexvols is no longer
required after having cloned the LUNs backing the Cinder volumes in
the Cinder group.
:returns: An implicit update for group snapshot and snapshots models
that is interpreted by the manager to set their models to
available.
"""
try:
if volume_utils.is_group_a_cg_snapshot_type(group_snapshot):
self._create_consistent_group_snapshot(group_snapshot,
snapshots)
else:
for snapshot in snapshots:
self._create_snapshot(snapshot)
except Exception as ex:
err_msg = (_("Create group snapshot failed (%s).") % ex)
LOG.exception(err_msg, resource=group_snapshot)
raise exception.NetAppDriverException(err_msg)
return None, None
def _create_consistent_group_snapshot(self, group_snapshot, snapshots):
flexvols = set()
for snapshot in snapshots:
flexvols.add(volume_utils.extract_host(
snapshot['volume']['host'], level='pool'))
self.zapi_client.create_cg_snapshot(flexvols, group_snapshot['id'])
for snapshot in snapshots:
self._clone_lun(snapshot['volume']['name'], snapshot['name'],
source_snapshot=group_snapshot['id'])
for flexvol in flexvols:
try:
self.zapi_client.wait_for_busy_snapshot(
flexvol, group_snapshot['id'])
self.zapi_client.delete_snapshot(
flexvol, group_snapshot['id'])
except exception.SnapshotIsBusy:
self.zapi_client.mark_snapshot_for_deletion(
flexvol, group_snapshot['id'])
def delete_group_snapshot(self, group_snapshot, snapshots):
"""Delete LUNs backing each snapshot in the group snapshot.
:returns: An implicit update for snapshots models that is interpreted
by the manager to set their models to deleted.
"""
for snapshot in snapshots:
self._delete_lun(snapshot['name'])
LOG.debug("Snapshot %s deletion successful", snapshot['name'])
return None, None
def create_group_from_src(self, group, volumes, group_snapshot=None,
snapshots=None, source_group=None,
source_vols=None):
"""Creates a group from a group snapshot or a group of cinder vols.
:returns: An implicit update for the volumes model that is
interpreted by the manager as a successful operation.
"""
LOG.debug("VOLUMES %s ", ', '.join([vol['id'] for vol in volumes]))
volume_model_updates = []
if group_snapshot:
vols = zip(volumes, snapshots)
for volume, snapshot in vols:
source = {
'name': snapshot['name'],
'size': snapshot['volume_size'],
}
volume_model_update = self._clone_source_to_destination(
source, volume)
if volume_model_update is not None:
volume_model_update['id'] = volume['id']
volume_model_updates.append(volume_model_update)
else:
vols = zip(volumes, source_vols)
for volume, old_src_vref in vols:
src_lun = self._get_lun_from_table(old_src_vref['name'])
source = {'name': src_lun.name, 'size': old_src_vref['size']}
volume_model_update = self._clone_source_to_destination(
source, volume)
if volume_model_update is not None:
volume_model_update['id'] = volume['id']
volume_model_updates.append(volume_model_update)
return None, volume_model_updates
|
{
"content_hash": "b9005c2b7cb2cd8a9180d1ce4c99995f",
"timestamp": "",
"source": "github",
"line_count": 586,
"max_line_length": 79,
"avg_line_length": 43.35665529010239,
"alnum_prop": 0.5979060888731452,
"repo_name": "eharney/cinder",
"id": "09d6f3ff0f6e8b22c00aa014688f546227e2d179",
"size": "26558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/netapp/dataontap/block_cmode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "561"
},
{
"name": "Python",
"bytes": "19839107"
},
{
"name": "Shell",
"bytes": "6453"
}
],
"symlink_target": ""
}
|
import sys
import json
import logging
log = logging.getLogger('wikitables')
def ftag(*args):
return lambda node: node.tag in args
def jprint(obj):
if isinstance(obj, str):
obj = json.loads(obj)
print(json.dumps(obj, indent=2, sort_keys=False, cls=TableJSONEncoder))
def guess_type(value):
""" attempt to convert string value into numeric type """
num_value = value.replace(',', '') # remove comma from potential numbers
try:
return int(num_value)
except ValueError:
pass
try:
return float(num_value)
except ValueError:
pass
return value
def ustr(value):
if sys.version_info < (3, 0):
#py2
try:
# pylint: disable=undefined-variable
return unicode(value).encode('utf-8')
except UnicodeDecodeError:
return str(value)
else:
return str(value)
class TableJSONEncoder(json.JSONEncoder):
def default(self, o):
if hasattr(o, '__json__'):
return o.__json__()
return json.JSONEncoder.default(self, o)
|
{
"content_hash": "b8a7a196d9d30e126e0a9900ab0cbba1",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 21.01923076923077,
"alnum_prop": 0.6075022872827082,
"repo_name": "bcicen/wikitables",
"id": "dc36a94ee77ee46e0e3b79440ea844534bfbb44e",
"size": "1093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wikitables/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27754"
}
],
"symlink_target": ""
}
|
"""
This module implements the functionality to take any Python expression as a
string and fix all numbers and other things before evaluating it,
thus
1/2
returns
Integer(1)/Integer(2)
We use the Python ast module for that, which is in python2.6 and later. It is
well documented at docs.python.org.
Some tips to understand how this works: use dump() to get a nice
representation of any node. Then write a string of what you want to get,
e.g. "Integer(1)", parse it, dump it and you'll see that you need to do
"Call(Name('Integer', Load()), [node], [], None, None)". You don't need
to bother with lineno and col_offset, just call fix_missing_locations()
before returning the node.
If the ast module is not available (Python 2.5), we use the old compiler
module.
"""
from sympy.core.basic import Basic
from sympy.core.sympify import SympifyError
try:
from ast import parse, NodeTransformer, Call, Name, Load, \
fix_missing_locations, Str, Tuple
ast_enabled = True
except ImportError:
ast_enabled = False
if ast_enabled:
class Transform(NodeTransformer):
def __init__(self, local_dict, global_dict):
NodeTransformer.__init__(self)
self.local_dict = local_dict
self.global_dict = global_dict
def visit_Num(self, node):
if isinstance(node.n, int):
return fix_missing_locations(Call(Name('Integer', Load()),
[node], [], None, None))
elif isinstance(node.n, float):
return fix_missing_locations(Call(Name('Float', Load()),
[node], [], None, None))
return node
def visit_Name(self, node):
if node.id in self.local_dict:
return node
elif node.id in self.global_dict:
name_obj = self.global_dict[node.id]
if isinstance(name_obj, (Basic, type)) or callable(name_obj):
return node
elif node.id in ['True', 'False']:
return node
return fix_missing_locations(Call(Name('Symbol', Load()),
[Str(node.id)], [], None, None))
def visit_Lambda(self, node):
args = [self.visit(arg) for arg in node.args.args]
body = self.visit(node.body)
n = Call(Name('Lambda', Load()), [Tuple(args, Load()), body], [], None, None)
return fix_missing_locations(n)
def parse_expr(s, local_dict):
"""
Converts the string "s" to a SymPy expression, in local_dict.
It converts all numbers to Integers before feeding it to Python and
automatically creates Symbols.
"""
if ast_enabled:
global_dict = {}
exec 'from sympy import *' in global_dict
try:
a = parse(s.strip(), mode="eval")
except SyntaxError:
raise SympifyError("Cannot parse %s." %repr(s))
a = Transform(local_dict, global_dict).visit(a)
e = compile(a, "<string>", "eval")
return eval(e, global_dict, local_dict)
else:
# in Python 2.5, the "ast" module is not available, so we need
# to use our old implementation:
from ast_parser_python25 import SymPyParser
try:
return SymPyParser(local_dict=local_dict).parse_expr(s)
except SyntaxError:
raise SympifyError("Cannot parse %s." %repr(s))
|
{
"content_hash": "7019187e992a32b4d50930b44b36f3bb",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 89,
"avg_line_length": 35.09278350515464,
"alnum_prop": 0.604289071680376,
"repo_name": "srjoglekar246/sympy",
"id": "8b4a9f39f621001c696b1e27db2a262009ac7383",
"size": "3404",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sympy/parsing/ast_parser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10283965"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "TeX",
"bytes": "8789"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
"""
Cost model optimizer based on simulated annealing
"""
import heapq
import logging
import time
import numpy as np
from ..util import sample_ints
from .model_based_tuner import ModelOptimizer, knob2point, point2knob
logger = logging.getLogger('autotvm')
class SimulatedAnnealingOptimizer(ModelOptimizer):
"""parallel simulated annealing optimization algorithm
Parameters
----------
task: Task
The tuning task
n_iter: int
The number of iterations of simulated annealing
temp: float or Array of float
If is a single float, then use a constant temperature.
If is an Array, then perform linear cooling from temp[0] to temp[1]
early_stop: int, optional
Stop iteration if the optimal set do not change in `early_stop` rounds
log_interval: int, optional
Print log every `log_interval` iterations
"""
def __init__(self, task, n_iter=500, temp=(1, 0), persistent=True, parallel_size=128,
early_stop=50, log_interval=50):
super(SimulatedAnnealingOptimizer, self).__init__()
self.task = task
self.dims = [len(x) for x in self.task.config_space.space_map.values()]
self.n_iter = n_iter
self.temp = temp
self.persistent = persistent
self.parallel_size = min(parallel_size, len(self.task.config_space))
self.early_stop = early_stop or 1e9
self.log_interval = log_interval
self.points = None
def find_maximums(self, model, num, exclusive):
tic = time.time()
temp, n_iter, early_stop, log_interval = \
self.temp, self.n_iter, self.early_stop, self.log_interval
if self.persistent and self.points is not None:
points = self.points
else:
points = np.array(sample_ints(0, len(self.task.config_space), self.parallel_size))
scores = model.predict(points)
# build heap and insert initial points
heap_items = [(float('-inf'), -i) for i in range(num)]
heapq.heapify(heap_items)
in_heap = set(exclusive)
in_heap.update([-i for i in range(num)])
for s, p in zip(scores, points):
if s > heap_items[0][0] and p not in in_heap:
pop = heapq.heapreplace(heap_items, (s, p))
in_heap.remove(pop[1])
in_heap.add(p)
k = 0
k_last_modify = 0
if isinstance(temp, (tuple, list, np.ndarray)):
t = temp[0]
cool = 1.0 * (temp[0] - temp[1]) / (n_iter + 1)
else:
t = temp
cool = 0
while k < n_iter and k < k_last_modify + early_stop:
new_points = np.empty_like(points)
for i, p in enumerate(points):
new_points[i] = random_walk(p, self.dims)
new_scores = model.predict(new_points)
ac_prob = np.exp(np.minimum((new_scores - scores) / (t + 1e-5), 1))
ac_index = np.random.random(len(ac_prob)) < ac_prob
points[ac_index] = new_points[ac_index]
scores[ac_index] = new_scores[ac_index]
for s, p in zip(new_scores, new_points):
if s > heap_items[0][0] and p not in in_heap:
pop = heapq.heapreplace(heap_items, (s, p))
in_heap.remove(pop[1])
in_heap.add(p)
k_last_modify = k
k += 1
t -= cool
if log_interval and k % log_interval == 0:
t_str = "%.2f" % t
logger.debug("SA iter: %d\tlast_update: %d\tmax-0: %.2f\tmax-1: %.2f\ttemp: %s\t"
"elapsed: %.2f",
k, k_last_modify, heap_items[0][0],
np.max([v for v, _ in heap_items]), t_str,
time.time() - tic)
heap_items.sort(key=lambda item: -item[0])
logger.debug("SA iter: %d\tlast_update: %d\tmax-0: %.2f\tmax-1: %.2f\telapsed: %.2f",
k, k_last_modify, heap_items[-1][0], heap_items[0][0], time.time() - tic)
logger.debug("SA Maximums: %s", heap_items)
if self.persistent:
self.points = points
return [x[1] for x in heap_items]
def random_walk(p, dims):
"""random walk as local transition
Parameters
----------
p: int
index of the ConfigEntity
dims: Array of int
sizes of each dimension
Returns
-------
new_p: int
new neighborhood index
"""
# transform to knob form
old = point2knob(p, dims)
new = list(old)
# mutate
while new == old:
from_i = np.random.randint(len(old))
to_v = np.random.randint(dims[from_i])
new[from_i] = to_v
# transform to index form
return knob2point(new, dims)
|
{
"content_hash": "5673e6048df4e3c8c8f8131bcbf95bbd",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 97,
"avg_line_length": 32.57046979865772,
"alnum_prop": 0.5503812075005151,
"repo_name": "mlperf/training_results_v0.6",
"id": "77c7e919593b488a2376025dd66a988f51d06a95",
"size": "4910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/python/tvm/autotvm/tuner/sa_model_optimizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13941"
},
{
"name": "C",
"bytes": "208630"
},
{
"name": "C++",
"bytes": "10999411"
},
{
"name": "CMake",
"bytes": "129712"
},
{
"name": "CSS",
"bytes": "64767"
},
{
"name": "Clojure",
"bytes": "396764"
},
{
"name": "Cuda",
"bytes": "2272433"
},
{
"name": "Dockerfile",
"bytes": "67820"
},
{
"name": "Groovy",
"bytes": "62557"
},
{
"name": "HTML",
"bytes": "19753082"
},
{
"name": "Java",
"bytes": "166294"
},
{
"name": "JavaScript",
"bytes": "71846"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "2713169"
},
{
"name": "Lua",
"bytes": "4430"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "115694"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "24905683"
},
{
"name": "R",
"bytes": "351865"
},
{
"name": "Roff",
"bytes": "293052"
},
{
"name": "Scala",
"bytes": "1189019"
},
{
"name": "Shell",
"bytes": "794096"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "TypeScript",
"bytes": "361164"
}
],
"symlink_target": ""
}
|
import json
from social.exceptions import AuthUnknownError, AuthCanceled
from social.tests.backends.oauth import OAuth2Test
class FacebookOAuth2Test(OAuth2Test):
backend_path = 'social.backends.facebook.FacebookOAuth2'
user_data_url = 'https://graph.facebook.com/v2.3/me'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'username': 'foobar',
'first_name': 'Foo',
'last_name': 'Bar',
'verified': True,
'name': 'Foo Bar',
'gender': 'male',
'updated_time': '2013-02-13T14:59:42+0000',
'link': 'http://www.facebook.com/foobar',
'id': '110011001100010'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class FacebookOAuth2WrongUserDataTest(FacebookOAuth2Test):
user_data_body = 'null'
def test_login(self):
with self.assertRaises(AuthUnknownError):
self.do_login()
def test_partial_pipeline(self):
with self.assertRaises(AuthUnknownError):
self.do_partial_pipeline()
class FacebookOAuth2AuthCancelTest(FacebookOAuth2Test):
access_token_status = 400
access_token_body = json.dumps({
'error': {
'message': "redirect_uri isn't an absolute URI. Check RFC 3986.",
'code': 191,
'type': 'OAuthException',
'fbtrace_id': '123Abc'
}
})
def test_login(self):
with self.assertRaises(AuthCanceled) as cm:
self.do_login()
self.assertIn('error', cm.exception.response.json())
def test_partial_pipeline(self):
with self.assertRaises(AuthCanceled) as cm:
self.do_partial_pipeline()
self.assertIn('error', cm.exception.response.json())
|
{
"content_hash": "ba4a12a1fc3215a62ad710d64209a3e5",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 77,
"avg_line_length": 28.893939393939394,
"alnum_prop": 0.6140534871525957,
"repo_name": "paulsoh/moxie",
"id": "166d75327a24acea3a270174b228abe58d62d4bb",
"size": "1907",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "moxie/social/tests/backends/test_facebook.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12850"
},
{
"name": "HTML",
"bytes": "58007"
},
{
"name": "JavaScript",
"bytes": "23689"
},
{
"name": "Makefile",
"bytes": "91"
},
{
"name": "Python",
"bytes": "719646"
},
{
"name": "Shell",
"bytes": "67"
}
],
"symlink_target": ""
}
|
__all__ = ["tabulate_prime"]
"""
Extend `tabulate` module with prime print format.
"""
import tabulate as tabulate_module
from tabulate import DataRow, Line, TableFormat
prime_format = TableFormat(
lineabove=None,
linebelowheader=Line("", "-", "-", ""),
linebetweenrows=None,
linebelow=None,
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0, with_header_hide=None
)
tabulate_module._table_formats["prime"] = prime_format
orig_tabulate = tabulate_module.tabulate
def tabulate_prime(tabular_data):
"""
This `tabulate_prime` function only support prime table requirement,
just as ETL stuffs.
"""
# treat the second column as normal values.
tabular_data = [([row[0]] + ["|"] + row[1:]) for row in tabular_data]
# print table as customized format.
output = orig_tabulate(tabular_data, headers="firstrow",
tablefmt="prime", stralign="right",)
lines = output.split("\n")
# add "+" sign to horizontal line row.
first_line = lines[0]
second_line = lines[1]
sign_idx = first_line.index("|")
chars_in_line_2 = list(second_line)
chars_in_line_2[sign_idx] = "+"
lines[1] = "".join(chars_in_line_2)
# align the second horizontal line row.
last_line = lines[-1]
max_width = len(last_line)
lines[1] = lines[1][0:max_width]
# remote the column after "+" sign
lines = [line[0:sign_idx - 2] + line[sign_idx] + line[sign_idx + 2:]
for line in lines]
output = "\n".join(lines)
return output
|
{
"content_hash": "543306ba9781072a6942bc4e8965553b",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 73,
"avg_line_length": 28.017857142857142,
"alnum_prop": 0.6163161249203314,
"repo_name": "mvj3/prints_a_multiplication_table_of_primes_numbers",
"id": "0a41f84ced61e542e61f5a87209194bebaf28b4e",
"size": "1594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prints_a_multiplication_table_of_primes_numbers/tabulate_ext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9473"
}
],
"symlink_target": ""
}
|
from httplib import HTTPConnection
import os
from subprocess import Popen, PIPE
import sys
from time import sleep, time
from collections import defaultdict
import unittest
from nose import SkipTest
from swiftclient import get_auth, head_account
from swift.obj.diskfile import get_data_dir
from swift.common.ring import Ring
from swift.common.utils import readconf
from swift.common.manager import Manager
from swift.common.storage_policy import POLICIES, EC_POLICY, REPL_POLICY
from test.probe import CHECK_SERVER_TIMEOUT, VALIDATE_RSYNC
ENABLED_POLICIES = [p for p in POLICIES if not p.is_deprecated]
POLICIES_BY_TYPE = defaultdict(list)
for p in POLICIES:
POLICIES_BY_TYPE[p.policy_type].append(p)
def get_server_number(port, port2server):
server_number = port2server[port]
server, number = server_number[:-1], server_number[-1:]
try:
number = int(number)
except ValueError:
# probably the proxy
return server_number, None
return server, number
def start_server(port, port2server, pids, check=True):
server, number = get_server_number(port, port2server)
err = Manager([server]).start(number=number, wait=False)
if err:
raise Exception('unable to start %s' % (
server if not number else '%s%s' % (server, number)))
if check:
return check_server(port, port2server, pids)
return None
def check_server(port, port2server, pids, timeout=CHECK_SERVER_TIMEOUT):
server = port2server[port]
if server[:-1] in ('account', 'container', 'object'):
if int(server[-1]) > 4:
return None
path = '/connect/1/2'
if server[:-1] == 'container':
path += '/3'
elif server[:-1] == 'object':
path += '/3/4'
try_until = time() + timeout
while True:
try:
conn = HTTPConnection('127.0.0.1', port)
conn.request('GET', path)
resp = conn.getresponse()
# 404 because it's a nonsense path (and mount_check is false)
# 507 in case the test target is a VM using mount_check
if resp.status not in (404, 507):
raise Exception(
'Unexpected status %s' % resp.status)
break
except Exception as err:
if time() > try_until:
print err
print 'Giving up on %s:%s after %s seconds.' % (
server, port, timeout)
raise err
sleep(0.1)
else:
try_until = time() + timeout
while True:
try:
url, token = get_auth('http://127.0.0.1:8080/auth/v1.0',
'test:tester', 'testing')
account = url.split('/')[-1]
head_account(url, token)
return url, token, account
except Exception as err:
if time() > try_until:
print err
print 'Giving up on proxy:8080 after 30 seconds.'
raise err
sleep(0.1)
return None
def kill_server(port, port2server, pids):
server, number = get_server_number(port, port2server)
err = Manager([server]).kill(number=number)
if err:
raise Exception('unable to kill %s' % (server if not number else
'%s%s' % (server, number)))
try_until = time() + 30
while True:
try:
conn = HTTPConnection('127.0.0.1', port)
conn.request('GET', '/')
conn.getresponse()
except Exception as err:
break
if time() > try_until:
raise Exception(
'Still answering on port %s after 30 seconds' % port)
sleep(0.1)
def kill_nonprimary_server(primary_nodes, port2server, pids):
primary_ports = [n['port'] for n in primary_nodes]
for port, server in port2server.iteritems():
if port in primary_ports:
server_type = server[:-1]
break
else:
raise Exception('Cannot figure out server type for %r' % primary_nodes)
for port, server in list(port2server.iteritems()):
if server[:-1] == server_type and port not in primary_ports:
kill_server(port, port2server, pids)
return port
def build_port_to_conf(server):
# map server to config by port
port_to_config = {}
for server_ in Manager([server]):
for config_path in server_.conf_files():
conf = readconf(config_path,
section_name='%s-replicator' % server_.type)
port_to_config[int(conf['bind_port'])] = conf
return port_to_config
def get_ring(ring_name, required_replicas, required_devices,
server=None, force_validate=None):
if not server:
server = ring_name
ring = Ring('/etc/swift', ring_name=ring_name)
if not VALIDATE_RSYNC and not force_validate:
return ring
# easy sanity checks
if ring.replica_count != required_replicas:
raise SkipTest('%s has %s replicas instead of %s' % (
ring.serialized_path, ring.replica_count, required_replicas))
if len(ring.devs) != required_devices:
raise SkipTest('%s has %s devices instead of %s' % (
ring.serialized_path, len(ring.devs), required_devices))
port_to_config = build_port_to_conf(server)
for dev in ring.devs:
# verify server is exposing mounted device
conf = port_to_config[dev['port']]
for device in os.listdir(conf['devices']):
if device == dev['device']:
dev_path = os.path.join(conf['devices'], device)
full_path = os.path.realpath(dev_path)
if not os.path.exists(full_path):
raise SkipTest(
'device %s in %s was not found (%s)' %
(device, conf['devices'], full_path))
break
else:
raise SkipTest(
"unable to find ring device %s under %s's devices (%s)" % (
dev['device'], server, conf['devices']))
# verify server is exposing rsync device
if port_to_config[dev['port']].get('vm_test_mode', False):
rsync_export = '%s%s' % (server, dev['replication_port'])
else:
rsync_export = server
cmd = "rsync rsync://localhost/%s" % rsync_export
p = Popen(cmd, shell=True, stdout=PIPE)
stdout, _stderr = p.communicate()
if p.returncode:
raise SkipTest('unable to connect to rsync '
'export %s (%s)' % (rsync_export, cmd))
for line in stdout.splitlines():
if line.rsplit(None, 1)[-1] == dev['device']:
break
else:
raise SkipTest("unable to find ring device %s under rsync's "
"exported devices for %s (%s)" %
(dev['device'], rsync_export, cmd))
return ring
def get_policy(**kwargs):
kwargs.setdefault('is_deprecated', False)
# go through the policies and make sure they match the
# requirements of kwargs
for policy in POLICIES:
# TODO: for EC, pop policy type here and check it first
matches = True
for key, value in kwargs.items():
try:
if getattr(policy, key) != value:
matches = False
except AttributeError:
matches = False
if matches:
return policy
raise SkipTest('No policy matching %s' % kwargs)
class ProbeTest(unittest.TestCase):
"""
Don't instantiate this directly, use a child class instead.
"""
def setUp(self):
p = Popen("resetswift 2>&1", shell=True, stdout=PIPE)
stdout, _stderr = p.communicate()
print stdout
Manager(['all']).stop()
self.pids = {}
try:
self.account_ring = get_ring(
'account',
self.acct_cont_required_replicas,
self.acct_cont_required_devices)
self.container_ring = get_ring(
'container',
self.acct_cont_required_replicas,
self.acct_cont_required_devices)
self.policy = get_policy(**self.policy_requirements)
self.object_ring = get_ring(
self.policy.ring_name,
self.obj_required_replicas,
self.obj_required_devices,
server='object')
Manager(['main']).start(wait=False)
self.port2server = {}
for server, port in [('account', 6002), ('container', 6001),
('object', 6000)]:
for number in xrange(1, 9):
self.port2server[port + (number * 10)] = \
'%s%d' % (server, number)
for port in self.port2server:
check_server(port, self.port2server, self.pids)
self.port2server[8080] = 'proxy'
self.url, self.token, self.account = \
check_server(8080, self.port2server, self.pids)
self.configs = defaultdict(dict)
for name in ('account', 'container', 'object'):
for server_name in (name, '%s-replicator' % name):
for server in Manager([server_name]):
for i, conf in enumerate(server.conf_files(), 1):
self.configs[server.server][i] = conf
self.replicators = Manager(
['account-replicator', 'container-replicator',
'object-replicator'])
self.updaters = Manager(['container-updater', 'object-updater'])
self.server_port_to_conf = {}
# get some configs backend daemon configs loaded up
for server in ('account', 'container', 'object'):
self.server_port_to_conf[server] = build_port_to_conf(server)
except BaseException:
try:
raise
finally:
try:
Manager(['all']).kill()
except Exception:
pass
def tearDown(self):
Manager(['all']).kill()
def device_dir(self, server, node):
conf = self.server_port_to_conf[server][node['port']]
return os.path.join(conf['devices'], node['device'])
def storage_dir(self, server, node, part=None, policy=None):
policy = policy or self.policy
device_path = self.device_dir(server, node)
path_parts = [device_path, get_data_dir(policy)]
if part is not None:
path_parts.append(str(part))
return os.path.join(*path_parts)
def config_number(self, node):
_server_type, config_number = get_server_number(
node['port'], self.port2server)
return config_number
def get_to_final_state(self):
# these .stop()s are probably not strictly necessary,
# but may prevent race conditions
self.replicators.stop()
self.updaters.stop()
self.replicators.once()
self.updaters.once()
self.replicators.once()
class ReplProbeTest(ProbeTest):
acct_cont_required_replicas = 3
acct_cont_required_devices = 4
obj_required_replicas = 3
obj_required_devices = 4
policy_requirements = {'policy_type': REPL_POLICY}
class ECProbeTest(ProbeTest):
acct_cont_required_replicas = 3
acct_cont_required_devices = 4
obj_required_replicas = 6
obj_required_devices = 8
policy_requirements = {'policy_type': EC_POLICY}
if __name__ == "__main__":
for server in ('account', 'container'):
try:
get_ring(server, 3, 4,
force_validate=True)
except SkipTest as err:
sys.exit('%s ERROR: %s' % (server, err))
print '%s OK' % server
for policy in POLICIES:
try:
get_ring(policy.ring_name, 3, 4,
server='object', force_validate=True)
except SkipTest as err:
sys.exit('object ERROR (%s): %s' % (policy.name, err))
print 'object OK (%s)' % policy.name
|
{
"content_hash": "d2faa401d7667ac230e9788075682ef9",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 79,
"avg_line_length": 36.82686567164179,
"alnum_prop": 0.5526465104968793,
"repo_name": "jungle90/Openstack-Swift-I-O-throttler",
"id": "7d1e754014451ab18983b3cb4042b72292c5790c",
"size": "12932",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "test/probe/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "673"
},
{
"name": "Python",
"bytes": "8142183"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from darkoob.group import views as group_view
from darkoob.social import views as social_view
urlpatterns = patterns('',
url(r'^(?P<group_id>\d+)/(?P<group_slug>[-\w]+)/$', group_view.group, name='group_page'),
url(r'^new/$', group_view.create_group, name='create_group'),
url(r'^look/$', social_view.user_lookup),
url(r'^members/$', group_view.members, name='members'),
url(r'^schedules/$', group_view.schedules, name='schedules'),
url(r'^(?P<group_id>\d+)/(?P<group_slug>[-\w]+)/add-schedule/$', group_view.add_schedule, name='add_schedule'),
# url(r'^rate/$', book_views.rate, name='rate'),
)
|
{
"content_hash": "627dde9a9b1130f7997765227f244308",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 115,
"avg_line_length": 48.42857142857143,
"alnum_prop": 0.6504424778761062,
"repo_name": "s1na/darkoob",
"id": "a7148f86514a453300905d626d4624eca647fd5b",
"size": "678",
"binary": false,
"copies": "1",
"ref": "refs/heads/alpha",
"path": "darkoob/group/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "155888"
},
{
"name": "Python",
"bytes": "111600"
},
{
"name": "Shell",
"bytes": "364"
}
],
"symlink_target": ""
}
|
import pygtk
pygtk.require('2.0')
import gtk
import gobject
import cairo
import getopt
import sys
import os
import os.path
import glob
import cStringIO
import errno
import ConfigParser
import gconf
import tarfile
import tempfile
import subprocess
try:
# Try to use XDG Base Directory standard for config files.
import xdg.BaseDirectory
CONFIG_HOME = os.path.join(xdg.BaseDirectory.xdg_config_home, 'turtleart')
except ImportError as e:
# Default to `.config` per the spec.
CONFIG_HOME = os.path.expanduser(os.path.join('~', '.config', 'turtleart'))
argv = sys.argv[:] # Workaround for import behavior of gst in tagplay
sys.argv[1:] = [] # Execution of import gst cannot see '--help' or '-h'
import gettext
from gettext import gettext as _
from TurtleArt.taconstants import (OVERLAY_LAYER, DEFAULT_TURTLE_COLORS,
TAB_LAYER, SUFFIX, TMP_SVG_PATH,
TMP_ODP_PATH, PASTE_OFFSET)
from TurtleArt.tautils import (data_from_string, get_load_name,
get_path, get_save_name, is_writeable)
from TurtleArt.tapalette import default_values
from TurtleArt.tawindow import TurtleArtWindow
from TurtleArt.taexportlogo import save_logo
from TurtleArt.taexportpython import save_python
from TurtleArt.taprimitive import PyExportError
from TurtleArt.taplugin import (load_a_plugin, cancel_plugin_install,
complete_plugin_install)
from util.menubuilder import MenuBuilder
class TurtleMain():
''' Launch Turtle Art in GNOME (from outside of Sugar). '''
_INSTALL_PATH = '/usr/share/sugar/activities/TurtleArt.activity'
_ALTERNATIVE_INSTALL_PATH = \
'/usr/local/share/sugar/activities/TurtleArt.activity'
_ICON_SUBPATH = 'images/turtle.png'
_GNOME_PLUGIN_SUBPATH = 'gnome_plugins'
_HOVER_HELP = '/desktop/sugar/activities/turtleart/hoverhelp'
_ORIENTATION = '/desktop/sugar/activities/turtleart/orientation'
_COORDINATE_SCALE = '/desktop/sugar/activities/turtleart/coordinatescale'
def __init__(self):
self._setting_gconf_overrides = False
self._abspath = os.path.abspath('.')
self._execdirname = self._get_execution_dir()
if self._execdirname is not None:
os.chdir(self._execdirname)
file_activity_info = ConfigParser.ConfigParser()
activity_info_path = os.path.abspath('./activity/activity.info')
file_activity_info.read(activity_info_path)
bundle_id = file_activity_info.get('Activity', 'bundle_id')
self.version = file_activity_info.get('Activity', 'activity_version')
self.name = file_activity_info.get('Activity', 'name')
self.summary = file_activity_info.get('Activity', 'summary')
self.website = file_activity_info.get('Activity', 'website')
self.icon_name = file_activity_info.get('Activity', 'icon')
self.bundle_path = self._abspath
path = os.path.abspath('./locale/')
gettext.bindtextdomain(bundle_id, path)
gettext.textdomain(bundle_id)
global _
_ = gettext.gettext
self._HELP_MSG = 'turtleblocks.py: ' + _('usage is') + '''
\tturtleblocks.py
\tturtleblocks.py project.tb
\tturtleblocks.py --output_png project.tb
\tturtleblocks.py -o project
\tturtleblocks.py --run project.tb
\tturtleblocks.py -r project'''
self._init_vars()
self._parse_command_line()
self._ensure_sugar_paths()
self._gnome_plugins = []
self._selected_sample = None
self._sample_window = None
self.has_toolbarbox = False
if self._output_png:
# Outputing to file, so no need for a canvas
self.canvas = None
self._build_window(interactive=False)
self._draw_and_quit()
else:
self._read_initial_pos()
self._init_gnome_plugins()
self._get_gconf_settings()
self._setup_gtk()
self._build_window()
self._run_gnome_plugins()
self._start_gtk()
def _get_gconf_settings(self):
self.client = gconf.client_get_default()
def get_config_home(self):
return CONFIG_HOME
def _get_gnome_plugin_home(self):
''' Use plugin directory associated with execution path. '''
if os.path.exists(os.path.join(self._execdirname,
self._GNOME_PLUGIN_SUBPATH)):
return os.path.join(self._execdirname, self._GNOME_PLUGIN_SUBPATH)
else:
return None
def _get_plugin_candidates(self, path):
''' Look for plugin files in plugin directory. '''
plugin_files = []
if path is not None:
candidates = os.listdir(path)
for c in candidates:
if c[-10:] == '_plugin.py' and c[0] != '#' and c[0] != '.':
plugin_files.append(c.split('.')[0])
return plugin_files
def _init_gnome_plugins(self):
''' Try launching any plugins we may have found. '''
for p in self._get_plugin_candidates(self._get_gnome_plugin_home()):
P = p.capitalize()
f = "def f(self): from gnome_plugins.%s import %s; \
return %s(self)" % (p, P, P)
plugin = {}
try:
exec f in globals(), plugin
self._gnome_plugins.append(plugin.values()[0](self))
except ImportError as e:
print 'failed to import %s: %s' % (P, str(e))
def _run_gnome_plugins(self):
''' Tell the plugin about the TurtleWindow instance. '''
for p in self._gnome_plugins:
p.set_tw(self.tw)
def _mkdir_p(self, path):
'''Create a directory in a fashion similar to `mkdir -p`.'''
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def _makepath(self, path):
''' Make a path if it doesn't previously exist '''
from os import makedirs
from os.path import normpath, dirname, exists
dpath = normpath(dirname(path))
if not exists(dpath):
makedirs(dpath)
def _start_gtk(self):
''' Get a main window set up. '''
self.win.connect('configure_event', self.tw.update_overlay_position)
self.tw.parent = self.win
self.init_complete = True
if self._ta_file is None:
self.tw.load_start()
else:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
gobject.idle_add(self._project_loader, self._ta_file)
self._set_gconf_overrides()
gtk.main()
def _project_loader(self, file_name):
self.tw.load_start(self._ta_file)
self.tw.lc.trace = 0
if self._run_on_launch:
self._do_run_cb()
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
def _draw_and_quit(self):
''' Non-interactive mode: run the project, save it to a file
and quit. '''
self.tw.load_start(self._ta_file)
self.tw.lc.trace = 0
self.tw.run_button(0)
self.tw.save_as_image(self._ta_file)
def _build_window(self, interactive=True):
''' Initialize the TurtleWindow instance. '''
if interactive:
win = self.canvas.get_window()
cr = win.cairo_create()
surface = cr.get_target()
else:
img_surface = cairo.ImageSurface(cairo.FORMAT_RGB24,
1024, 768)
cr = cairo.Context(img_surface)
surface = cr.get_target()
self.turtle_canvas = surface.create_similar(
cairo.CONTENT_COLOR,
# max(1024, gtk.gdk.screen_width() * 2),
# max(768, gtk.gdk.screen_height() * 2))
gtk.gdk.screen_width() * 2,
gtk.gdk.screen_height() * 2)
# Make sure the autosave directory is writeable
if is_writeable(self._execdirname):
self._autosavedirname = self._execdirname
else:
self._autosavedirname = os.path.expanduser('~')
self.tw = TurtleArtWindow(self.canvas, self._execdirname,
turtle_canvas=self.turtle_canvas,
activity=self, running_sugar=False)
self.tw.save_folder = self._abspath # os.path.expanduser('~')
if hasattr(self, 'client'):
if self.client.get_int(self._HOVER_HELP) == 1:
self.tw.no_help = True
self.hover.set_active(False)
self._do_hover_help_off_cb()
if not self.client.get_int(self._COORDINATE_SCALE) in [0, 1]:
self.tw.coord_scale = 1
else:
self.tw.coord_scale = 0
if self.client.get_int(self._ORIENTATION) == 1:
self.tw.orientation = 1
def _set_gconf_overrides(self):
if self.tw.coord_scale == 0:
self.tw.coord_scale = 1
else:
self._do_rescale_cb(None)
if self.tw.coord_scale != 1:
self._setting_gconf_overrides = True
self.coords.set_active(True)
self._setting_gconf_overrides = False
def _init_vars(self):
''' If we are invoked to start a project from Gnome, we should make
sure our current directory is TA's source dir. '''
self._ta_file = None
self._output_png = False
self._run_on_launch = False
self.current_palette = 0
self.scale = 2.0
self.tw = None
self.init_complete = False
def _parse_command_line(self):
''' Try to make sense of the command-line arguments. '''
try:
opts, args = getopt.getopt(argv[1:], 'hor',
['help', 'output_png', 'run'])
except getopt.GetoptError as err:
print str(err)
print self._HELP_MSG
sys.exit(2)
self._run_on_launch = False
for o, a in opts:
if o in ('-h', '--help'):
print self._HELP_MSG
sys.exit()
if o in ('-o', '--output_png'):
self._output_png = True
elif o in ('-r', '--run'):
self._run_on_launch = True
else:
assert False, _('No option action:') + ' ' + o
if args:
self._ta_file = args[0]
if len(args) > 1 or self._output_png and self._ta_file is None:
print self._HELP_MSG
sys.exit()
if self._ta_file is not None:
if not self._ta_file.endswith(SUFFIX):
self._ta_file += '.tb'
if not os.path.exists(self._ta_file):
self._ta_file = os.path.join(self._abspath, self._ta_file)
if not os.path.exists(self._ta_file):
assert False, ('%s: %s' %
(self._ta_file, _('File not found')))
def _ensure_sugar_paths(self):
''' Make sure Sugar paths are present. '''
tapath = os.path.join(os.environ['HOME'], '.sugar', 'default',
'org.laptop.TurtleArtActivity')
map(self._makepath, (os.path.join(tapath, 'data/'),
os.path.join(tapath, 'instance/')))
def _read_initial_pos(self):
''' Read saved configuration. '''
try:
data_file = open(os.path.join(CONFIG_HOME, 'turtleartrc'), 'r')
except IOError:
# Opening the config file failed
# We'll assume it needs to be created
try:
self._mkdir_p(CONFIG_HOME)
data_file = open(os.path.join(CONFIG_HOME, 'turtleartrc'),
'a+')
except IOError as e:
# We can't write to the configuration file, use
# a faux file that will persist for the length of
# the session.
print _('Configuration directory not writable: %s') % (e)
data_file = cStringIO.StringIO()
data_file.write(str(50) + '\n')
data_file.write(str(50) + '\n')
data_file.write(str(800) + '\n')
data_file.write(str(550) + '\n')
data_file.seek(0)
try:
self.x = int(data_file.readline())
self.y = int(data_file.readline())
self.width = int(data_file.readline())
self.height = int(data_file.readline())
except ValueError:
self.x = 50
self.y = 50
self.width = 800
self.height = 550
def _fixed_resize_cb(self, widget=None, rect=None):
''' If a toolbar opens or closes, we need to resize the vbox
holding out scrolling window. '''
self.vbox.set_size_request(rect[2], rect[3])
self.menu_height = self.menu_bar.size_request()[1]
def restore_cursor(self):
''' No longer copying or sharing, so restore standard cursor. '''
self.tw.copying_blocks = False
self.tw.sharing_blocks = False
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
if hasattr(self, 'get_window'):
if hasattr(self.get_window(), 'get_cursor'):
self.get_window().set_cursor(self._old_cursor)
else:
self.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
def _setup_gtk(self):
''' Set up a scrolled window in which to run Turtle Blocks. '''
win = gtk.Window(gtk.WINDOW_TOPLEVEL)
win.set_default_size(self.width, self.height)
win.move(self.x, self.y)
win.maximize()
win.set_title('%s %s' % (self.name, str(self.version)))
if os.path.exists(os.path.join(self._execdirname, self._ICON_SUBPATH)):
win.set_icon_from_file(os.path.join(self._execdirname,
self._ICON_SUBPATH))
win.show()
win.connect('delete_event', self._quit_ta)
''' Create a scrolled window to contain the turtle canvas. We
add a Fixed container in order to position text Entry widgets
on top of string and number blocks.'''
self.fixed = gtk.Fixed()
self.fixed.connect('size-allocate', self._fixed_resize_cb)
width = gtk.gdk.screen_width() - 80
height = gtk.gdk.screen_height() - 80
self.fixed.set_size_request(width, height)
self.vbox = gtk.VBox(False, 0)
self.vbox.show()
self.menu_bar = self._get_menu_bar()
self.vbox.pack_start(self.menu_bar, False, False)
self.menu_bar.show()
self.menu_height = self.menu_bar.size_request()[1]
self.sw = gtk.ScrolledWindow()
self.sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.sw.show()
canvas = gtk.DrawingArea()
width = gtk.gdk.screen_width() * 2
height = gtk.gdk.screen_height() * 2
canvas.set_size_request(width, height)
self.sw.add_with_viewport(canvas)
canvas.show()
self.vbox.pack_end(self.sw, True, True)
self.fixed.put(self.vbox, 0, 0)
self.fixed.show()
win.add(self.fixed)
win.show_all()
self.win = win
self.canvas = canvas
def _get_menu_bar(self):
''' Instead of Sugar toolbars, use GNOME menus. '''
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('New'), self._do_new_cb)
MenuBuilder.make_menu_item(menu, _('Show sample projects'),
self._create_store)
MenuBuilder.make_menu_item(menu, _('Open'), self._do_open_cb)
MenuBuilder.make_menu_item(menu, _('Add project'), self._do_load_cb)
MenuBuilder.make_menu_item(menu, _('Load plugin'),
self._do_load_plugin_cb)
MenuBuilder.make_menu_item(menu, _('Save'), self._do_save_cb)
MenuBuilder.make_menu_item(menu, _('Save as'), self._do_save_as_cb)
# export submenu
export_submenu = gtk.Menu()
export_menu = MenuBuilder.make_sub_menu(export_submenu, _('Export as'))
menu.append(export_menu)
MenuBuilder.make_menu_item(export_submenu, _('image'),
self._do_save_picture_cb)
MenuBuilder.make_menu_item(export_submenu, _('SVG'),
self._do_save_svg_cb)
MenuBuilder.make_menu_item(export_submenu, _('icon'),
self._do_save_as_icon_cb)
# TRANS: ODP is Open Office presentation
MenuBuilder.make_menu_item(export_submenu, _('ODP'),
self._do_save_as_odp_cb)
MenuBuilder.make_menu_item(export_submenu, _('Logo'),
self._do_save_logo_cb)
MenuBuilder.make_menu_item(export_submenu, _('Python'),
self._do_save_python_cb)
MenuBuilder.make_menu_item(menu, _('Quit'), self._quit_ta)
activity_menu = MenuBuilder.make_sub_menu(menu, _('File'))
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Cartesian coordinates'),
self._do_cartesian_cb)
MenuBuilder.make_menu_item(menu, _('Polar coordinates'),
self._do_polar_cb)
self.coords = MenuBuilder.make_checkmenu_item(
menu, _('Rescale coordinates'),
self._do_rescale_cb, status=False)
MenuBuilder.make_menu_item(menu, _('Grow blocks'),
self._do_resize_cb, 1.5)
MenuBuilder.make_menu_item(menu, _('Shrink blocks'),
self._do_resize_cb, 0.667)
MenuBuilder.make_menu_item(menu, _('Reset block size'),
self._do_resize_cb, -1)
self.hover = MenuBuilder.make_checkmenu_item(
menu, _('Turn on hover help'),
self._do_toggle_hover_help_cb, status=True)
view_menu = MenuBuilder.make_sub_menu(menu, _('View'))
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Copy'), self._do_copy_cb)
MenuBuilder.make_menu_item(menu, _('Paste'), self._do_paste_cb)
MenuBuilder.make_menu_item(menu, _('Save stack'),
self._do_save_macro_cb)
MenuBuilder.make_menu_item(menu, _('Delete stack'),
self._do_delete_macro_cb)
edit_menu = MenuBuilder.make_sub_menu(menu, _('Edit'))
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Show palette'),
self._do_palette_cb)
MenuBuilder.make_menu_item(menu, _('Hide palette'),
self._do_hide_palette_cb)
MenuBuilder.make_menu_item(menu, _('Show/hide blocks'),
self._do_hideshow_cb)
tool_menu = MenuBuilder.make_sub_menu(menu, _('Tools'))
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Clean'), self._do_eraser_cb)
MenuBuilder.make_menu_item(menu, _('Run'), self._do_run_cb)
MenuBuilder.make_menu_item(menu, _('Step'), self._do_step_cb)
MenuBuilder.make_menu_item(menu, _('Debug'), self._do_trace_cb)
MenuBuilder.make_menu_item(menu, _('Stop'), self._do_stop_cb)
turtle_menu = MenuBuilder.make_sub_menu(menu, _('Turtle'))
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('About...'), self._do_about_cb)
help_menu = MenuBuilder.make_sub_menu(menu, _('Help'))
menu_bar = gtk.MenuBar()
menu_bar.append(activity_menu)
menu_bar.append(edit_menu)
menu_bar.append(view_menu)
menu_bar.append(tool_menu)
menu_bar.append(turtle_menu)
# Add menus for plugins
for p in self._gnome_plugins:
menu_item = p.get_menu()
if menu_item is not None:
menu_bar.append(menu_item)
menu_bar.append(help_menu)
return menu_bar
def _quit_ta(self, widget=None, e=None):
''' Save changes on exit '''
project_empty = self.tw.is_project_empty()
if not project_empty:
resp = self._show_save_dialog(e is None)
if resp == gtk.RESPONSE_YES:
if self.tw.is_new_project():
self._save_as()
else:
if self.tw.project_has_changed():
self._save_changes()
elif resp == gtk.RESPONSE_CANCEL:
return
if hasattr(self, 'client'):
self.client.set_int(self._ORIENTATION, self.tw.orientation)
for plugin in self.tw.turtleart_plugins:
if hasattr(plugin, 'quit'):
plugin.quit()
# Clean up temporary files
if os.path.exists(TMP_SVG_PATH):
os.remove(TMP_SVG_PATH)
if os.path.exists(TMP_ODP_PATH):
os.remove(TMP_ODP_PATH)
gtk.main_quit()
exit()
def _show_save_dialog(self, add_cancel=False):
''' Dialog for save project '''
dlg = gtk.MessageDialog(parent=None, type=gtk.MESSAGE_INFO,
buttons=gtk.BUTTONS_YES_NO,
message_format=_('You have unsaved work. \
Would you like to save before quitting?'))
dlg.set_default_response(gtk.RESPONSE_YES)
if add_cancel:
dlg.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
dlg.set_title(_('Save project?'))
dlg.set_property('skip-taskbar-hint', False)
resp = dlg.run()
dlg.destroy()
return resp
def _reload_plugin_alert(self, tmp_dir, tmp_path, plugin_path, plugin_name,
file_info):
print "Already installed"
title = _('Plugin %s already installed') % plugin_name
msg = _('Do you want to reinstall %s?') % plugin_name
dlg = gtk.MessageDialog(parent=None, type=gtk.MESSAGE_INFO,
buttons=gtk.BUTTONS_YES_NO,
message_format=title)
dlg.format_secondary_text(msg)
dlg.set_title(title)
dlg.set_property('skip-taskbar-hint', False)
resp = dlg.run()
dlg.destroy()
if resp is gtk.RESPONSE_OK:
complete_plugin_install(tmp_dir, tmp_path, plugin_path,
plugin_name, file_info)
elif resp is gtk.RESPONSE_CANCEL:
cancel_plugin_install(tmp_dir)
def _do_new_cb(self, widget):
''' Callback for new project. '''
self.tw.new_project()
self.tw.load_start()
def _do_open_cb(self, widget):
''' Callback for open project. '''
self.tw.load_file_from_chooser(True)
def _do_load_cb(self, widget):
''' Callback for load project (add to current project). '''
self.tw.load_file_from_chooser(False)
def _do_load_plugin_cb(self, widget):
self.tw.load_save_folder = self._get_execution_dir()
file_path, loaddir = get_load_name('.tar.gz', self.tw.load_save_folder)
if file_path is None:
return
try:
# Copy to tmp file since some systems had trouble
# with gunzip directly from datastore
datapath = get_path(None, 'instance')
if not os.path.exists(datapath):
os.makedirs(datapath)
tmpfile = os.path.join(datapath, 'tmpfile.tar.gz')
subprocess.call(['cp', file_path, tmpfile])
status = subprocess.call(['gunzip', tmpfile])
if status == 0:
tar_fd = tarfile.open(tmpfile[:-3], 'r')
else:
tar_fd = tarfile.open(tmpfile, 'r')
except:
tar_fd = tarfile.open(file_path, 'r')
tmp_dir = tempfile.mkdtemp()
try:
tar_fd.extractall(tmp_dir)
load_a_plugin(self, tmp_dir)
self.restore_cursor()
except:
self.restore_cursor()
finally:
tar_fd.close()
# Remove tmpfile.tar
subprocess.call(['rm',
os.path.join(datapath, 'tmpfile.tar')])
def _do_save_cb(self, widget):
''' Callback for save project. '''
self.tw.save_file(self._ta_file)
def _do_save_as_cb(self, widget):
''' Callback for save-as project. '''
self._save_as()
def autosave(self):
''' Autosave is called each type the run button is pressed '''
temp_load_save_folder = self.tw.load_save_folder
temp_save_folder = self.tw.save_folder
self.tw.load_save_folder = self._autosavedirname
self.tw.save_folder = self._autosavedirname
self.tw.save_file(file_name=os.path.join(
self._autosavedirname, 'autosave.tb'))
self.tw.save_folder = temp_save_folder
self.tw.load_save_folder = temp_load_save_folder
def _save_as(self):
''' Save as is called from callback and quit '''
self.tw.save_file_name = self._ta_file
self.tw.save_file()
def _save_changes(self):
''' Save changes to current project '''
self.tw.save_file_name = self._ta_file
self.tw.save_file(self.tw._loaded_project)
def _do_save_picture_cb(self, widget):
''' Callback for save canvas. '''
self.tw.save_as_image()
def _do_save_svg_cb(self, widget):
''' Callback for save canvas as SVG. '''
self.tw.save_as_image(svg=True)
def _do_save_as_icon_cb(self, widget):
''' Callback for save canvas. '''
self.tw.write_svg_operation()
self.tw.save_as_icon()
def _do_save_as_odp_cb(self, widget):
''' Callback for save canvas. '''
self.tw.save_as_odp()
def _do_save_logo_cb(self, widget):
''' Callback for save project to Logo. '''
logocode = save_logo(self.tw)
if len(logocode) == 0:
return
save_type = '.lg'
self.tw.load_save_folder = self._get_execution_dir()
filename, self.tw.load_save_folder = get_save_name(
save_type, self.tw.load_save_folder, 'logosession')
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
if filename is not None:
f = file(filename, 'w')
f.write(logocode)
f.close()
def _do_save_python_cb(self, widget):
''' Callback for saving the project as Python code. '''
# catch PyExportError and display a user-friendly message instead
try:
pythoncode = save_python(self.tw)
except PyExportError as pyee:
if pyee.block is not None:
pyee.block.highlight()
self.tw.showlabel('status', str(pyee))
print pyee
return
if not pythoncode:
return
# use name of TA project if it has been saved already
default_name = self.tw.save_file_name
if default_name is None:
default_name = _("myproject")
elif default_name.endswith(".ta") or default_name.endswith(".tb"):
default_name = default_name[:-3]
save_type = '.py'
self.tw.load_save_folder = self._get_execution_dir()
filename, self.tw.load_save_folder = get_save_name(
save_type, self.tw.load_save_folder, default_name)
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
if filename is not None:
f = file(filename, 'w')
f.write(pythoncode)
f.close()
def _do_resize_cb(self, widget, factor):
''' Callback to resize blocks. '''
if factor == -1:
self.tw.block_scale = 2.0
else:
self.tw.block_scale *= factor
self.tw.resize_blocks()
def _do_cartesian_cb(self, button):
''' Callback to display/hide Cartesian coordinate overlay. '''
self.tw.set_cartesian(True)
def _do_polar_cb(self, button):
''' Callback to display/hide Polar coordinate overlay. '''
self.tw.set_polar(True)
def _do_rescale_cb(self, button):
''' Callback to rescale coordinate space. '''
if self._setting_gconf_overrides:
return
if self.tw.coord_scale == 1:
self.tw.coord_scale = self.tw.height / 40
self.tw.update_overlay_position()
if self.tw.cartesian is True:
self.tw.overlay_shapes['Cartesian_labeled'].hide()
self.tw.overlay_shapes['Cartesian'].set_layer(OVERLAY_LAYER)
default_values['forward'] = [10]
default_values['back'] = [10]
default_values['arc'] = [90, 10]
default_values['setpensize'] = [1]
self.tw.turtles.get_active_turtle().set_pen_size(1)
else:
self.tw.coord_scale = 1
if self.tw.cartesian is True:
self.tw.overlay_shapes['Cartesian'].hide()
self.tw.overlay_shapes['Cartesian_labeled'].set_layer(
OVERLAY_LAYER)
default_values['forward'] = [100]
default_values['back'] = [100]
default_values['arc'] = [90, 100]
default_values['setpensize'] = [5]
self.tw.turtles.get_active_turtle().set_pen_size(5)
if hasattr(self, 'client'):
self.client.set_int(self._COORDINATE_SCALE,
int(self.tw.coord_scale))
self.tw.recalculate_constants()
def _do_toggle_hover_help_cb(self, button):
''' Toggle hover help on/off '''
self.tw.no_help = not(button.get_active())
if self.tw.no_help:
self._do_hover_help_off_cb()
else:
self._do_hover_help_on_cb()
def _do_hover_help_on_cb(self):
''' Turn hover help on '''
if hasattr(self, 'client'):
self.client.set_int(self._HOVER_HELP, 0)
def _do_hover_help_off_cb(self):
''' Turn hover help off '''
self.tw.last_label = None
if self.tw.status_spr is not None:
self.tw.status_spr.hide()
if hasattr(self, 'client'):
self.client.set_int(self._HOVER_HELP, 1)
def _do_palette_cb(self, widget):
''' Callback to show/hide palette of blocks. '''
self.tw.show_palette(self.current_palette)
self.current_palette += 1
if self.current_palette == len(self.tw.palettes):
self.current_palette = 0
def _do_hide_palette_cb(self, widget):
''' Hide the palette of blocks. '''
self.tw.hide_palette()
def _do_hideshow_cb(self, widget):
''' Hide/show the blocks. '''
self.tw.hideshow_button()
def _do_eraser_cb(self, widget):
''' Callback for eraser button. '''
self.tw.eraser_button()
return
def _do_run_cb(self, widget=None):
''' Callback for run button (rabbit). '''
self.tw.lc.trace = 0
self.tw.hideblocks()
self.tw.display_coordinates(clear=True)
self.tw.toolbar_shapes['stopiton'].set_layer(TAB_LAYER)
self.tw.run_button(0, running_from_button_push=True)
return
def _do_step_cb(self, widget):
''' Callback for step button (turtle). '''
self.tw.lc.trace = 1
self.tw.run_button(3, running_from_button_push=True)
return
def _do_trace_cb(self, widget):
''' Callback for debug button (bug). '''
self.tw.lc.trace = 1
self.tw.run_button(9, running_from_button_push=True)
return
def _do_stop_cb(self, widget):
''' Callback for stop button. '''
if self.tw.running_blocks:
self.tw.toolbar_shapes['stopiton'].hide()
if self.tw.hide:
self.tw.showblocks()
self.tw.stop_button()
self.tw.display_coordinates()
def _do_save_macro_cb(self, widget):
''' Callback for save stack button. '''
self.tw.copying_blocks = False
self.tw.deleting_blocks = False
if self.tw.saving_blocks:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.tw.saving_blocks = False
else:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.HAND1))
self.tw.saving_blocks = True
def _do_delete_macro_cb(self, widget):
''' Callback for delete stack button. '''
self.tw.copying_blocks = False
self.tw.saving_blocks = False
if self.tw.deleting_blocks:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.tw.deleting_blocks = False
else:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.HAND1))
self.tw.deleting_blocks = True
def _do_copy_cb(self, button):
''' Callback for copy button. '''
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
if self.tw.copying_blocks:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
self.tw.copying_blocks = False
else:
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.HAND1))
self.tw.copying_blocks = True
def _do_paste_cb(self, button):
''' Callback for paste button. '''
self.tw.copying_blocks = False
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
clipboard = gtk.Clipboard()
text = clipboard.wait_for_text()
if text is not None:
if self.tw.selected_blk is not None and \
self.tw.selected_blk.name == 'string' and \
text[0:2] != '[[': # Don't paste block data into a string
self.tw.paste_text_in_block_label(text)
self.tw.selected_blk.resize()
else:
self.tw.process_data(data_from_string(text),
self.tw.paste_offset)
self.tw.paste_offset += PASTE_OFFSET
def _do_about_cb(self, widget):
about = gtk.AboutDialog()
about.set_program_name(_(self.name))
about.set_version(self.version)
about.set_comments(_(self.summary))
about.set_website(self.website)
about.set_logo(
gtk.gdk.pixbuf_new_from_file(
'activity/' + self.icon_name + '.svg'))
about.run()
about.destroy()
def _window_event(self, event, data):
''' Callback for resize event. '''
data_file = open('.turtleartrc', 'w')
data_file.write(str(data.x) + '\n')
data_file.write(str(data.y) + '\n')
data_file.write(str(data.width) + '\n')
data_file.write(str(data.height) + '\n')
def nick_changed(self, nick):
''' TODO: Rename default turtle in dictionary '''
pass
def color_changed(self, colors):
''' Reskin turtle with collaboration colors '''
turtle = self.tw.turtles.get_turtle(self.tw.default_turtle_name)
try:
turtle.colors = colors.split(',')
except:
turtle.colors = DEFAULT_TURTLE_COLORS
turtle.custom_shapes = True # Force regeneration of shapes
turtle.reset_shapes()
turtle.show()
def _get_execution_dir(self):
''' From whence is the program being executed? '''
dirname = os.path.dirname(__file__)
if dirname == '':
if os.path.exists(os.path.join('~', 'Activities',
'TurtleArt.activity')):
return os.path.join('~', 'Activities', 'TurtleArt.activity')
elif os.path.exists(self._INSTALL_PATH):
return self._INSTALL_PATH
elif os.path.exists(self._ALTERNATIVE_INSTALL_PATH):
return self._ALTERNATIVE_INSTALL_PATH
else:
return os.path.abspath('.')
else:
return os.path.abspath(dirname)
def restore_state(self):
''' Anything that needs restoring after a clear screen can go here '''
pass
def hide_store(self, widget=None):
if self._sample_window is not None:
self._sample_box.hide()
def _create_store(self, widget=None):
if self._sample_window is None:
self._sample_box = gtk.EventBox()
self._sample_window = gtk.ScrolledWindow()
self._sample_window.set_policy(gtk.POLICY_NEVER,
gtk.POLICY_AUTOMATIC)
width = gtk.gdk.screen_width() / 2
height = gtk.gdk.screen_height() / 2
self._sample_window.set_size_request(width, height)
self._sample_window.show()
store = gtk.ListStore(gtk.gdk.Pixbuf, str)
icon_view = gtk.IconView()
icon_view.set_model(store)
icon_view.set_selection_mode(gtk.SELECTION_SINGLE)
icon_view.connect('selection-changed', self._sample_selected,
store)
icon_view.set_pixbuf_column(0)
icon_view.grab_focus()
self._sample_window.add_with_viewport(icon_view)
icon_view.show()
self._fill_samples_list(store)
width = gtk.gdk.screen_width() / 4
height = gtk.gdk.screen_height() / 4
self._sample_box.add(self._sample_window)
self.fixed.put(self._sample_box, width, height)
self._sample_window.show()
self._sample_box.show()
def _get_selected_path(self, widget, store):
try:
iter_ = store.get_iter(widget.get_selected_items()[0])
image_path = store.get(iter_, 1)[0]
return image_path, iter_
except:
return None
def _sample_selected(self, widget, store):
selected = self._get_selected_path(widget, store)
if selected is None:
self._selected_sample = None
self._sample_window.hide()
return
image_path, _iter = selected
iter_ = store.get_iter(widget.get_selected_items()[0])
image_path = store.get(iter_, 1)[0]
self._selected_sample = image_path
self._sample_window.hide()
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
gobject.idle_add(self._sample_loader)
def _sample_loader(self):
# Convert from thumbnail path to sample path
basename = os.path.basename(self._selected_sample)[:-4]
for suffix in ['.ta', '.tb']:
file_path = os.path.join(self._execdirname,
'samples', basename + suffix)
if os.path.exists(file_path):
self.tw.load_files(file_path)
break
self.tw.load_save_folder = os.path.join(self._get_execution_dir(),
'samples')
self.win.get_window().set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))
def _fill_samples_list(self, store):
'''
Append images from the artwork_paths to the store.
'''
for filepath in self._scan_for_samples():
pixbuf = None
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(
filepath, 100, 100)
store.append([pixbuf, filepath])
def _scan_for_samples(self):
samples = sorted(
glob.glob(
os.path.join(
self._get_execution_dir(),
'samples',
'thumbnails',
'*.png')))
return samples
if __name__ == '__main__':
TurtleMain()
|
{
"content_hash": "fde5cad6c76a99bce0ff2b8b7c64dfb5",
"timestamp": "",
"source": "github",
"line_count": 1037,
"max_line_length": 79,
"avg_line_length": 38.56894889103182,
"alnum_prop": 0.553055305530553,
"repo_name": "nvazquez/Turtlebots",
"id": "614fbed70ebc2940ef4747d3d94d4b6e5cbbc7c9",
"size": "41225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "turtleblocks.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "11536"
},
{
"name": "C++",
"bytes": "673"
},
{
"name": "Makefile",
"bytes": "1519"
},
{
"name": "Python",
"bytes": "3582442"
},
{
"name": "Shell",
"bytes": "356"
}
],
"symlink_target": ""
}
|
import json
import datetime
from decimal import Decimal
from django.db import transaction
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from boto.mturk.connection import MTurkConnection, MTurkRequestError
from boto.mturk.qualification import Qualifications, \
NumberHitsApprovedRequirement, PercentAssignmentsAbandonedRequirement, \
PercentAssignmentsApprovedRequirement, PercentAssignmentsRejectedRequirement, \
PercentAssignmentsReturnedRequirement, PercentAssignmentsSubmittedRequirement, \
Requirement
from common.utils import has_foreign_key
from accounts.models import UserProfile
def aws_str_to_datetime(s):
""" Parse Amazon date-time string """
return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ')
def get_mturk_connection():
return MTurkConnection(
aws_access_key_id=settings.MTURK_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.MTURK_AWS_SECRET_ACCESS_KEY,
host=settings.MTURK_HOST,
debug=settings.MTURK_SANDBOX)
def get_mturk_balance():
return Decimal(get_mturk_connection().get_account_balance()[0].amount)
def extract_mturk_attr(result_set, attr):
""" Extracts an attribute from a boto ResultSet """
if hasattr(result_set, attr):
return getattr(result_set, attr)
try:
for r in result_set:
if hasattr(r, attr):
return getattr(r, attr)
except TypeError:
pass
raise MTurkRequestError(status=0, reason='Missing %s in response' % attr)
def get_or_create_mturk_worker(mturk_worker_id):
""" Returns a UserProfile object for the associated mturk_worker_id """
if not mturk_worker_id:
return None
try:
return UserProfile.objects.get(mturk_worker_id=mturk_worker_id)
except UserProfile.DoesNotExist:
user = User.objects.get_or_create(
username='mturk_' + mturk_worker_id)[0]
profile = user.get_profile()
profile.mturk_worker_id = mturk_worker_id
profile.save()
return profile
def get_or_create_mturk_worker_from_request(request):
if 'workerId' in request.GET:
return get_or_create_mturk_worker(request.GET['workerId'])
else:
return None
def qualification_to_boto(*args):
""" Convert a qualification to the format required by boto """
if len(args) == 2:
name = args[0]
value = args[1]
elif len(args) == 1:
name = args[0].name
value = args[0].value
else:
raise ValueError("Invalid arguments")
from mturk.models import MtQualification
try:
q = MtQualification.objects.get(slug=name)
return Requirement(
qualification_type_id=q.id,
comparator="EqualTo",
integer_value=value)
except MtQualification.DoesNotExist:
pass
if name == 'num_approved':
return NumberHitsApprovedRequirement(
comparator='GreaterThanOrEqualTo', integer_value=value)
elif name == 'perc_abandoned':
return PercentAssignmentsAbandonedRequirement(
comparator='LessThanOrEqualTo', integer_value=value)
elif name == 'perc_approved':
return PercentAssignmentsApprovedRequirement(
comparator='GreaterThanOrEqualTo', integer_value=value)
elif name == 'perc_rejected':
return PercentAssignmentsRejectedRequirement(
comparator='LessThanOrEqualTo', integer_value=value)
elif name == 'perc_returned':
return PercentAssignmentsReturnedRequirement(
comparator='LessThanOrEqualTo', integer_value=value)
elif name == 'perc_submitted':
return PercentAssignmentsSubmittedRequirement(
comparator='GreaterThanOrEqualTo', integer_value=value)
else:
raise ValueError("Unknown name: %s" % name)
def qualification_dict_to_boto(quals):
if not quals:
return None
quals_boto = filter(
None, [qualification_to_boto(k, v) for k, v in quals.iteritems()])
return Qualifications(quals_boto) if quals_boto else None
def get_content_model_prefetch(content_model, content_attr='content'):
""" Returns the fields that should be prefetched, for a relation that
starts with '<content_attr>__'. If the model has MTURK_PREFETCH, then that
is used. Otherwise, some common attributes are tested (photo, shape) and
used if those foreign keys exist. """
if hasattr(content_model, 'MTURK_PREFETCH'):
return ['%s__%s' % (content_attr, k)
for k in content_model.MTURK_PREFETCH]
else:
# guess if there is no default
prefetch = []
if has_foreign_key(content_model, 'photo'):
prefetch.append('%s__photo' % content_attr)
if has_foreign_key(content_model, 'shape'):
prefetch.append('%s__shape' % content_attr)
prefetch.append('%s__shape__photo' % content_attr)
return prefetch
def get_model_prefetch(content_model):
""" Returns the fields that should be prefetched, for a generic relation """
if hasattr(content_model, 'MTURK_PREFETCH'):
return content_model.MTURK_PREFETCH
else:
# guess if there is no default
prefetch = []
if has_foreign_key(content_model, 'photo'):
prefetch.append('photo')
if has_foreign_key(content_model, 'shape'):
prefetch.append('shape')
prefetch.append('shape__photo')
return prefetch
def fetch_content_tuples(content_tuples):
""" Fetch a list of generic items, given as a list of
```[(content_type_id, object_id), ...]``` """
# group by content type
ct_to_ids = {}
for (ct, id) in content_tuples:
if ct in ct_to_ids:
ct_to_ids[ct].append(id)
else:
ct_to_ids[ct] = [id]
# bulk fetch for each content type
ct_to_values = {}
for (ct, ids) in ct_to_ids.iteritems():
model = ContentType.objects.get_for_id(ct).model_class()
prefetch = get_model_prefetch(model)
ct_to_values[ct] = model.objects \
.select_related(*prefetch).in_bulk(ids)
# match original ordering
return [ct_to_values[ct][id] for (ct, id) in content_tuples]
def fetch_hit_contents(hit):
""" Fetch the contents (the items shown the user) efficiently in a small
number of queries """
prefetch = ['content']
content_type_ids = hit.contents.all() \
.order_by().distinct('content_type') \
.values_list('content_type', flat=True)
if len(content_type_ids) == 1:
content_model = ContentType.objects.get_for_id(
content_type_ids[0]).model_class()
prefetch += get_content_model_prefetch(content_model)
#else: TODO: handle this case efficiently (still works, just slow)
contents = [p.content
for p in hit.contents.all()
.prefetch_related(*prefetch).order_by()]
return filter(None, contents)
def configure_experiment(slug, variant='', **kwargs):
""" Configures an experiment in the database
(:class:`mturk.models.Experiment`). To be called by
:meth:`configure_experiments`.
:param slug: unique human-readable ID (must be valid Python variable name).
The ``slug`` and ``variant`` are together unique.
:param variant: optional string that may be used to include multiple
variations on the same experiment, where the same template and user
interface is used across all variants. The ``slug`` and ``variant``
together are unique.
Example: you want to perform object labeling, with different lists of
allowed object names (see ``shapes.experiments`` for this example).
:param completed_id: optional string that may be used in place of ``slug``
when determining whether an experiment has been completed. If two
experiments share this field, then an item completed under one
experiment will count as completed under the other experiment.
:param template_dir: directory for templates, usually
``'<app>/experiments'``.
The templates for each experiment are constructed as follows:
::
{template_dir}/{slug}.html -- mturk task
{template_dir}/{slug}_inst_content.html -- instructions page (just the
content)
{template_dir}/{slug}_inst.html -- instructions (includes
_inst_content.html)
{template_dir}/{slug}_tut.html -- tutorial (if there is one)
:param module: module containing the ``experiments.py`` file, usually
``'<app>.experiments'``
:param examples_group_attr: the attribute used to group examples together.
Example: if you have good and bad BRDFs for a shape, and the BRDF
points to the shape with the name 'shape', then this field would could
set to 'shape'.
:param version: should be the value ``2`` (note that ``1`` is for the
original OpenSurfaces publication).
:param reward: payment per HIT, as an instance of ``decimal.Decimal``
:param num_outputs_max: the number of output items that each input item
will produce. Usually this is ``1``. An example of another value: for
OpenSurfaces material segmentation, 1 photo will produce 6
segmentations.
:param contents_per_hit: the number of contents to include in each HIT
:param test_contents_per_assignment: if specified, the number of
secret test items to be added (on top of ``contents_per_hit``)
to each HIT.
:param has_tutorial: ``True`` if this experiment has a special
tutorial (see ``intrinsic/experiments.py`` for an example).
:param content_type_model: the model class for input content (content that
is shown to the user)
:param out_content_type_model: the model class for output (user responses)
:param out_content_attr: on the output model class, the name of the
attribute that gives the input for that output. For example,
for a material segmentation, a ``Photo`` is the input and a
``SubmittedShape`` is the output, and ``SubmittedShape.photo``
gives the input photo.
:param content_filter: a dictionary of filters to be applied
to the input content to determine which items should be labeled.
Example for labeling BRDFs:
.. code-block:: py
{
'invalid': False,
'pixel_area__gt': Shape.MIN_PIXEL_AREA,
'num_vertices__gte': 10,
'correct': True,
'substance__isnull': False,
'substance__fail': False,
'photo__whitebalanced': True,
'photo__scene_category_correct': True,
}
:param title: string shown in the MTurk marketplace as the title of the
task.
:param description: string shown in the MTurk marketplace describing the
task.
:param keywords: comma-separated string listing the keywords, e.g.
``'keyword1,keyword2,keyword3'``.
:param frame_height: height in pixels used to display the iframe for
workers. Most workers have 1024x768 or 800x600 screen resolutions, so I
recommend setting this to at most **668** pixels. Alternatively,
you could set it to a very large number and avoid an inner scroll bar.
:param requirements: [deprecated feature] dictionary of requirements that
users must satisfy to submit a task, or ``{}`` if there are no
requirements. These requirements are passed as context variables. This is
an old feature and is implemented very inefficiently. There
are better ways of getting data into the experiment context,
such as :meth:`external_task_extra_context`.
:param auto_add_hits: if ``True``, dispatch new HITs of this type.
"""
# import locally to avoid circular import
from mturk.models import Experiment, ExperimentExample, \
ExperimentTestContent
# create experiment object
if not isinstance(variant, basestring):
variant = json.dumps(variant)
experiment, __ = Experiment.objects.get_or_create(
slug=slug, variant=variant)
# variables on experiment object
experiment_dirty = False
for k in ['name', 'completed_id', 'version', 'has_tutorial',
'template_dir', 'module', 'examples_group_attr',
'test_contents_per_assignment']:
if k in kwargs:
setattr(experiment, k, kwargs[k])
del kwargs[k]
experiment_dirty = True
if experiment_dirty:
experiment.save()
# allow specifying objects instead of json strings
for k in ('content_filter', 'requirements'):
if k in kwargs and kwargs[k]:
if isinstance(kwargs[k], basestring):
# make sure it's valid json and remove any extra whitespace
kwargs[k] = json.dumps(json.loads(kwargs[k]))
else:
kwargs[k] = json.dumps(kwargs[k])
else:
kwargs[k] = '{}'
# allow specifying models instead of contenttype objects
if 'content_type_model' in kwargs:
kwargs['content_type'] = ContentType.objects.get_for_model(
kwargs['content_type_model'])
del kwargs['content_type_model']
# make sure that the out_content_type also satisfies certain requirements
if 'out_content_type_model' in kwargs:
model = kwargs['out_content_type_model']
# make sure certain methods and fields are implemented
if not hasattr(model, 'mturk_submit'):
raise NotImplementedError(
"Model %s does not have a mturk_submit method" % model)
if 'mturk_assignment' not in [f.name for f in model._meta.fields]:
raise NotImplementedError(
"Model %s does not have a mturk_assignment field" % model)
kwargs['out_content_type'] = ContentType.objects.get_for_model(model)
del kwargs['out_content_type_model']
if 'examples_good' in kwargs:
example_ids = []
for obj in kwargs['examples_good']:
example, _ = experiment.examples.get_or_create(
content_type=ContentType.objects.get_for_model(obj.__class__),
object_id=obj.id)
example_ids.append(example.id)
ExperimentExample.objects.filter(id__in=example_ids).update(good=True)
del kwargs['examples_good']
if 'examples_bad' in kwargs:
example_ids = []
for obj in kwargs['examples_bad']:
example, _ = experiment.examples.get_or_create(
content_type=ContentType.objects.get_for_model(obj.__class__),
object_id=obj.id)
example_ids.append(example.id)
ExperimentExample.objects.filter(id__in=example_ids).update(good=False)
del kwargs['examples_bad']
if 'test_contents' in kwargs:
for obj in kwargs['test_contents']:
t, _ = experiment.test_contents.get_or_create(
content_type=ContentType.objects.get_for_model(obj.__class__),
object_id=obj.id)
ExperimentTestContent.objects.filter(id=t.id).update(
priority=experiment.content_priority(obj))
del kwargs['test_contents']
# default parameters
new_hit_settings = {
'frame_height': 768, # almost all workers have a screen height of 768
'duration': 60 * 60,
'lifetime': 3600 * 24 * 31,
'auto_approval_delay': 259200,
'feedback_bonus': Decimal('0.02'),
'auto_add_hits': False,
'max_active_hits': 200,
'max_total_hits': 100000,
'out_count_ratio': 1,
'qualifications': '{ "num_approved": 1000, "perc_approved": 97 }',
'requirements': '{}',
'content_filter': '{}',
}
if settings.MTURK_SANDBOX:
new_hit_settings.update({
'qualifications': '{}',
'min_output_consensus': 1,
'duration': 60 * 60 * 24 * 7,
#'max_active_hits': 50,
})
new_hit_settings.update(kwargs)
if 'min_output_consensus' not in new_hit_settings:
new_hit_settings['min_output_consensus'] = (
new_hit_settings['num_outputs_max'])
# sanity check
if new_hit_settings['reward'] > Decimal('0.2'):
raise ValueError("Reward too high")
experiment.set_new_hit_settings(**new_hit_settings)
def configure_all_experiments(show_progress=False):
""" Configure all experiments by searching for modules of the form
'<app>.experiments' (where "app" is an installed app). The method
configure_experiment() is then invoked for each such module found. """
from mturk.models import Experiment
with transaction.atomic():
# turn off all experiments unless turned on by configure_experiments
for exp in Experiment.objects.all():
if exp.new_hit_settings:
exp.new_hit_settings.auto_add_hits = False
exp.new_hit_settings.save()
from common.utils import import_modules
modules = import_modules(settings.MTURK_MODULES)
for mt in modules:
if show_progress:
print ' Running %s.configure_experiments()' % mt.__name__
mt.configure_experiments()
|
{
"content_hash": "e20de48bb51c8b3aeb4d43872b9e7ecf",
"timestamp": "",
"source": "github",
"line_count": 464,
"max_line_length": 84,
"avg_line_length": 37.831896551724135,
"alnum_prop": 0.6344992594280506,
"repo_name": "seanbell/opensurfaces",
"id": "5fa57ccadc2fdf2c3ef394522329fd69825a224b",
"size": "17554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/mturk/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2134399"
},
{
"name": "C++",
"bytes": "944309"
},
{
"name": "CMake",
"bytes": "1314"
},
{
"name": "CSS",
"bytes": "332038"
},
{
"name": "CoffeeScript",
"bytes": "245856"
},
{
"name": "HTML",
"bytes": "286807"
},
{
"name": "JavaScript",
"bytes": "395211"
},
{
"name": "Lua",
"bytes": "4605"
},
{
"name": "M",
"bytes": "43"
},
{
"name": "Makefile",
"bytes": "9862"
},
{
"name": "Matlab",
"bytes": "69652"
},
{
"name": "Objective-C",
"bytes": "547"
},
{
"name": "Python",
"bytes": "2161982"
},
{
"name": "Shell",
"bytes": "54309"
},
{
"name": "TeX",
"bytes": "35639"
}
],
"symlink_target": ""
}
|
"""Analysis plugin that labels events according to rules in a tagging file."""
from plaso.analysis import interface
from plaso.analysis import manager
from plaso.engine import tagging_file
class TaggingAnalysisPlugin(interface.AnalysisPlugin):
"""Analysis plugin that labels events according to rules in a tagging file."""
NAME = 'tagging'
def __init__(self):
"""Initializes a tagging analysis plugin."""
super(TaggingAnalysisPlugin, self).__init__()
self._tagging_rules = None
def ExamineEvent(self, mediator, event, event_data, event_data_stream):
"""Labels events according to the rules in a tagging file.
Args:
mediator (AnalysisMediator): mediates interactions between analysis
plugins and other components, such as storage and dfvfs.
event (EventObject): event to examine.
event_data (EventData): event data.
event_data_stream (EventDataStream): event data stream.
"""
matched_label_names = []
for label_name, filter_objects in self._tagging_rules.items():
for filter_object in filter_objects:
# Note that tagging events based on existing labels is currently
# not supported.
if filter_object.Match(event, event_data, event_data_stream, None):
matched_label_names.append(label_name)
break
if matched_label_names:
event_tag = self._CreateEventTag(event, matched_label_names)
mediator.ProduceEventTag(event_tag)
for label_name in matched_label_names:
self._analysis_counter[label_name] += 1
self._analysis_counter['event_tags'] += 1
def SetAndLoadTagFile(self, tagging_file_path):
"""Sets the tagging file to be used by the plugin.
Args:
tagging_file_path (str): path of the tagging file.
"""
tagging_file_object = tagging_file.TaggingFile(tagging_file_path)
self._tagging_rules = tagging_file_object.GetEventTaggingRules()
manager.AnalysisPluginManager.RegisterPlugin(TaggingAnalysisPlugin)
|
{
"content_hash": "d2a5fd78a638157015a48c0e25eed5f9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 80,
"avg_line_length": 35.07017543859649,
"alnum_prop": 0.704352176088044,
"repo_name": "kiddinn/plaso",
"id": "3133e721e62406f2f195eb6e90ecb1d8d243fc76",
"size": "2023",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plaso/analysis/tagging.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1047"
},
{
"name": "Makefile",
"bytes": "68"
},
{
"name": "PowerShell",
"bytes": "9560"
},
{
"name": "Python",
"bytes": "4878625"
},
{
"name": "Ruby",
"bytes": "926"
},
{
"name": "Shell",
"bytes": "26453"
}
],
"symlink_target": ""
}
|
from typing import Dict, List, NewType
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.team import Team
from backend.common.queries.dict_converters.converter_base import ConverterBase
TeamDict = NewType("TeamDict", Dict)
class TeamConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 4,
}
@classmethod
def _convert_list(
cls, model_list: List[Team], version: ApiMajorVersion
) -> List[TeamDict]:
CONVERTERS = {
ApiMajorVersion.API_V3: cls.teamsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def teamsConverter_v3(cls, teams: List[Team]) -> List[TeamDict]:
return list(map(cls.teamConverter_v3, teams))
@classmethod
def teamConverter_v3(cls, team: Team) -> TeamDict:
default_name = "Team {}".format(team.team_number)
team_dict = {
"key": team.key.id(),
"team_number": team.team_number,
"nickname": team.nickname if team.nickname else default_name,
"name": team.name if team.name else default_name,
"website": team.website,
"rookie_year": team.rookie_year,
"motto": None,
# "home_championship": team.championship_location, # TODO: event not ported yet
"school_name": team.school_name,
}
team_dict.update(cls.constructLocation_v3(team))
return TeamDict(team_dict)
@staticmethod
def dictToModel_v3(data: Dict) -> Team:
team = Team(id=data["key"])
team.team_number = data["team_number"]
team.nickname = data["nickname"]
team.name = data["name"]
team.website = data["website"]
team.rookie_year = data["rookie_year"]
team.motto = data["motto"]
team.city = data["city"]
team.state_prov = data["state_prov"]
team.country = data["country"]
team.school_name = data["school_name"]
return team
|
{
"content_hash": "bc231322472b0cfb384b6fdfe2a916d1",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 92,
"avg_line_length": 35.62068965517241,
"alnum_prop": 0.6205227492739593,
"repo_name": "the-blue-alliance/the-blue-alliance",
"id": "bffc4722c1ec653f036f0849306213317e34ec9c",
"size": "2066",
"binary": false,
"copies": "1",
"ref": "refs/heads/py3",
"path": "src/backend/common/queries/dict_converters/team_converter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "359032"
},
{
"name": "Dockerfile",
"bytes": "2503"
},
{
"name": "HTML",
"bytes": "5877313"
},
{
"name": "JavaScript",
"bytes": "755910"
},
{
"name": "Less",
"bytes": "244218"
},
{
"name": "PHP",
"bytes": "10727"
},
{
"name": "Pug",
"bytes": "1857"
},
{
"name": "Python",
"bytes": "4321885"
},
{
"name": "Ruby",
"bytes": "4677"
},
{
"name": "Shell",
"bytes": "27698"
}
],
"symlink_target": ""
}
|
""""""
import importlib
import os
import traceback
from collections import defaultdict
from pathlib import Path
from typing import Any, Callable
from datetime import datetime, timedelta
from concurrent.futures import ThreadPoolExecutor
from copy import copy
from tzlocal import get_localzone
from vnpy.event import Event, EventEngine
from vnpy.trader.engine import BaseEngine, MainEngine
from vnpy.trader.object import (
OrderRequest,
SubscribeRequest,
HistoryRequest,
LogData,
TickData,
BarData,
ContractData
)
from vnpy.trader.event import (
EVENT_TICK,
EVENT_ORDER,
EVENT_TRADE,
EVENT_POSITION
)
from vnpy.trader.constant import (
Direction,
OrderType,
Interval,
Exchange,
Offset,
Status
)
from vnpy.trader.utility import load_json, save_json, extract_vt_symbol, round_to
from vnpy.trader.rqdata import rqdata_client
from vnpy.trader.converter import OffsetConverter
from vnpy.trader.database import database_manager
from .base import (
APP_NAME,
EVENT_CTA_LOG,
EVENT_CTA_STRATEGY,
EVENT_CTA_STOPORDER,
EngineType,
StopOrder,
StopOrderStatus,
STOPORDER_PREFIX
)
from .template import CtaTemplate
STOP_STATUS_MAP = {
Status.SUBMITTING: StopOrderStatus.WAITING,
Status.NOTTRADED: StopOrderStatus.WAITING,
Status.PARTTRADED: StopOrderStatus.TRIGGERED,
Status.ALLTRADED: StopOrderStatus.TRIGGERED,
Status.CANCELLED: StopOrderStatus.CANCELLED,
Status.REJECTED: StopOrderStatus.CANCELLED
}
class CtaEngine(BaseEngine):
""""""
engine_type = EngineType.LIVE # live trading engine
setting_filename = "cta_strategy_setting.json"
data_filename = "cta_strategy_data.json"
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super(CtaEngine, self).__init__(
main_engine, event_engine, APP_NAME)
self.strategy_setting = {} # strategy_name: dict
self.strategy_data = {} # strategy_name: dict
self.classes = {} # class_name: stategy_class
self.strategies = {} # strategy_name: strategy
self.symbol_strategy_map = defaultdict(
list) # vt_symbol: strategy list
self.orderid_strategy_map = {} # vt_orderid: strategy
self.strategy_orderid_map = defaultdict(
set) # strategy_name: orderid list
self.stop_order_count = 0 # for generating stop_orderid
self.stop_orders = {} # stop_orderid: stop_order
self.init_executor = ThreadPoolExecutor(max_workers=1)
self.rq_client = None
self.rq_symbols = set()
self.vt_tradeids = set() # for filtering duplicate trade
self.offset_converter = OffsetConverter(self.main_engine)
def init_engine(self):
"""
"""
self.init_rqdata()
self.load_strategy_class()
self.load_strategy_setting()
self.load_strategy_data()
self.register_event()
self.write_log("CTA策略引擎初始化成功")
def close(self):
""""""
self.stop_all_strategies()
def register_event(self):
""""""
self.event_engine.register(EVENT_TICK, self.process_tick_event)
self.event_engine.register(EVENT_ORDER, self.process_order_event)
self.event_engine.register(EVENT_TRADE, self.process_trade_event)
self.event_engine.register(EVENT_POSITION, self.process_position_event)
def init_rqdata(self):
"""
Init RQData client.
"""
result = rqdata_client.init()
if result:
self.write_log("RQData数据接口初始化成功")
def query_bar_from_rq(
self, symbol: str, exchange: Exchange, interval: Interval, start: datetime, end: datetime
):
"""
Query bar data from RQData.
"""
req = HistoryRequest(
symbol=symbol,
exchange=exchange,
interval=interval,
start=start,
end=end
)
data = rqdata_client.query_history(req)
return data
def process_tick_event(self, event: Event):
""""""
tick = event.data
strategies = self.symbol_strategy_map[tick.vt_symbol]
if not strategies:
return
self.check_stop_order(tick)
for strategy in strategies:
if strategy.inited:
self.call_strategy_func(strategy, strategy.on_tick, tick)
def process_order_event(self, event: Event):
""""""
order = event.data
self.offset_converter.update_order(order)
strategy = self.orderid_strategy_map.get(order.vt_orderid, None)
if not strategy:
return
# Remove vt_orderid if order is no longer active.
vt_orderids = self.strategy_orderid_map[strategy.strategy_name]
if order.vt_orderid in vt_orderids and not order.is_active():
vt_orderids.remove(order.vt_orderid)
# For server stop order, call strategy on_stop_order function
if order.type == OrderType.STOP:
so = StopOrder(
vt_symbol=order.vt_symbol,
direction=order.direction,
offset=order.offset,
price=order.price,
volume=order.volume,
stop_orderid=order.vt_orderid,
strategy_name=strategy.strategy_name,
status=STOP_STATUS_MAP[order.status],
vt_orderids=[order.vt_orderid],
)
self.call_strategy_func(strategy, strategy.on_stop_order, so)
# Call strategy on_order function
self.call_strategy_func(strategy, strategy.on_order, order)
def process_trade_event(self, event: Event):
""""""
trade = event.data
# Filter duplicate trade push
if trade.vt_tradeid in self.vt_tradeids:
return
self.vt_tradeids.add(trade.vt_tradeid)
self.offset_converter.update_trade(trade)
strategy = self.orderid_strategy_map.get(trade.vt_orderid, None)
if not strategy:
return
# Update strategy pos before calling on_trade method
if trade.direction == Direction.LONG:
strategy.pos += trade.volume
else:
strategy.pos -= trade.volume
self.call_strategy_func(strategy, strategy.on_trade, trade)
# Sync strategy variables to data file
self.sync_strategy_data(strategy)
# Update GUI
self.put_strategy_event(strategy)
def process_position_event(self, event: Event):
""""""
position = event.data
self.offset_converter.update_position(position)
def check_stop_order(self, tick: TickData):
""""""
for stop_order in list(self.stop_orders.values()):
if stop_order.vt_symbol != tick.vt_symbol:
continue
long_triggered = (
stop_order.direction == Direction.LONG and tick.last_price >= stop_order.price
)
short_triggered = (
stop_order.direction == Direction.SHORT and tick.last_price <= stop_order.price
)
if long_triggered or short_triggered:
strategy = self.strategies[stop_order.strategy_name]
# To get excuted immediately after stop order is
# triggered, use limit price if available, otherwise
# use ask_price_5 or bid_price_5
if stop_order.direction == Direction.LONG:
if tick.limit_up:
price = tick.limit_up
else:
price = tick.ask_price_5
else:
if tick.limit_down:
price = tick.limit_down
else:
price = tick.bid_price_5
contract = self.main_engine.get_contract(stop_order.vt_symbol)
vt_orderids = self.send_limit_order(
strategy,
contract,
stop_order.direction,
stop_order.offset,
price,
stop_order.volume,
stop_order.lock
)
# Update stop order status if placed successfully
if vt_orderids:
# Remove from relation map.
self.stop_orders.pop(stop_order.stop_orderid)
strategy_vt_orderids = self.strategy_orderid_map[strategy.strategy_name]
if stop_order.stop_orderid in strategy_vt_orderids:
strategy_vt_orderids.remove(stop_order.stop_orderid)
# Change stop order status to cancelled and update to strategy.
stop_order.status = StopOrderStatus.TRIGGERED
stop_order.vt_orderids = vt_orderids
self.call_strategy_func(
strategy, strategy.on_stop_order, stop_order
)
self.put_stop_order_event(stop_order)
def send_server_order(
self,
strategy: CtaTemplate,
contract: ContractData,
direction: Direction,
offset: Offset,
price: float,
volume: float,
type: OrderType,
lock: bool
):
"""
Send a new order to server.
"""
# Create request and send order.
original_req = OrderRequest(
symbol=contract.symbol,
exchange=contract.exchange,
direction=direction,
offset=offset,
type=type,
price=price,
volume=volume,
reference=f"{APP_NAME}_{strategy.strategy_name}"
)
# Convert with offset converter
req_list = self.offset_converter.convert_order_request(original_req, lock)
# Send Orders
vt_orderids = []
for req in req_list:
vt_orderid = self.main_engine.send_order(
req, contract.gateway_name)
# Check if sending order successful
if not vt_orderid:
continue
vt_orderids.append(vt_orderid)
self.offset_converter.update_order_request(req, vt_orderid)
# Save relationship between orderid and strategy.
self.orderid_strategy_map[vt_orderid] = strategy
self.strategy_orderid_map[strategy.strategy_name].add(vt_orderid)
return vt_orderids
def send_limit_order(
self,
strategy: CtaTemplate,
contract: ContractData,
direction: Direction,
offset: Offset,
price: float,
volume: float,
lock: bool
):
"""
Send a limit order to server.
"""
return self.send_server_order(
strategy,
contract,
direction,
offset,
price,
volume,
OrderType.LIMIT,
lock
)
def send_server_stop_order(
self,
strategy: CtaTemplate,
contract: ContractData,
direction: Direction,
offset: Offset,
price: float,
volume: float,
lock: bool
):
"""
Send a stop order to server.
Should only be used if stop order supported
on the trading server.
"""
return self.send_server_order(
strategy,
contract,
direction,
offset,
price,
volume,
OrderType.STOP,
lock
)
def send_local_stop_order(
self,
strategy: CtaTemplate,
direction: Direction,
offset: Offset,
price: float,
volume: float,
lock: bool
):
"""
Create a new local stop order.
"""
self.stop_order_count += 1
stop_orderid = f"{STOPORDER_PREFIX}.{self.stop_order_count}"
stop_order = StopOrder(
vt_symbol=strategy.vt_symbol,
direction=direction,
offset=offset,
price=price,
volume=volume,
stop_orderid=stop_orderid,
strategy_name=strategy.strategy_name,
lock=lock
)
self.stop_orders[stop_orderid] = stop_order
vt_orderids = self.strategy_orderid_map[strategy.strategy_name]
vt_orderids.add(stop_orderid)
self.call_strategy_func(strategy, strategy.on_stop_order, stop_order)
self.put_stop_order_event(stop_order)
return [stop_orderid]
def cancel_server_order(self, strategy: CtaTemplate, vt_orderid: str):
"""
Cancel existing order by vt_orderid.
"""
order = self.main_engine.get_order(vt_orderid)
if not order:
self.write_log(f"撤单失败,找不到委托{vt_orderid}", strategy)
return
req = order.create_cancel_request()
self.main_engine.cancel_order(req, order.gateway_name)
def cancel_local_stop_order(self, strategy: CtaTemplate, stop_orderid: str):
"""
Cancel a local stop order.
"""
stop_order = self.stop_orders.get(stop_orderid, None)
if not stop_order:
return
strategy = self.strategies[stop_order.strategy_name]
# Remove from relation map.
self.stop_orders.pop(stop_orderid)
vt_orderids = self.strategy_orderid_map[strategy.strategy_name]
if stop_orderid in vt_orderids:
vt_orderids.remove(stop_orderid)
# Change stop order status to cancelled and update to strategy.
stop_order.status = StopOrderStatus.CANCELLED
self.call_strategy_func(strategy, strategy.on_stop_order, stop_order)
self.put_stop_order_event(stop_order)
def send_order(
self,
strategy: CtaTemplate,
direction: Direction,
offset: Offset,
price: float,
volume: float,
stop: bool,
lock: bool
):
"""
"""
contract = self.main_engine.get_contract(strategy.vt_symbol)
if not contract:
self.write_log(f"委托失败,找不到合约:{strategy.vt_symbol}", strategy)
return ""
# Round order price and volume to nearest incremental value
price = round_to(price, contract.pricetick)
volume = round_to(volume, contract.min_volume)
if stop:
if contract.stop_supported:
return self.send_server_stop_order(strategy, contract, direction, offset, price, volume, lock)
else:
return self.send_local_stop_order(strategy, direction, offset, price, volume, lock)
else:
return self.send_limit_order(strategy, contract, direction, offset, price, volume, lock)
def cancel_order(self, strategy: CtaTemplate, vt_orderid: str):
"""
"""
if vt_orderid.startswith(STOPORDER_PREFIX):
self.cancel_local_stop_order(strategy, vt_orderid)
else:
self.cancel_server_order(strategy, vt_orderid)
def cancel_all(self, strategy: CtaTemplate):
"""
Cancel all active orders of a strategy.
"""
vt_orderids = self.strategy_orderid_map[strategy.strategy_name]
if not vt_orderids:
return
for vt_orderid in copy(vt_orderids):
self.cancel_order(strategy, vt_orderid)
def get_engine_type(self):
""""""
return self.engine_type
def get_pricetick(self, strategy: CtaTemplate):
"""
Return contract pricetick data.
"""
contract = self.main_engine.get_contract(strategy.vt_symbol)
if contract:
return contract.pricetick
else:
return None
def load_bar(
self,
vt_symbol: str,
days: int,
interval: Interval,
callback: Callable[[BarData], None],
use_database: bool
):
""""""
symbol, exchange = extract_vt_symbol(vt_symbol)
end = datetime.now(get_localzone())
start = end - timedelta(days)
bars = []
# Pass gateway and RQData if use_database set to True
if not use_database:
# Query bars from gateway if available
contract = self.main_engine.get_contract(vt_symbol)
if contract and contract.history_data:
req = HistoryRequest(
symbol=symbol,
exchange=exchange,
interval=interval,
start=start,
end=end
)
bars = self.main_engine.query_history(req, contract.gateway_name)
# Try to query bars from RQData, if not found, load from database.
else:
bars = self.query_bar_from_rq(symbol, exchange, interval, start, end)
if not bars:
bars = database_manager.load_bar_data(
symbol=symbol,
exchange=exchange,
interval=interval,
start=start,
end=end,
)
for bar in bars:
callback(bar)
def load_tick(
self,
vt_symbol: str,
days: int,
callback: Callable[[TickData], None]
):
""""""
symbol, exchange = extract_vt_symbol(vt_symbol)
end = datetime.now()
start = end - timedelta(days)
ticks = database_manager.load_tick_data(
symbol=symbol,
exchange=exchange,
start=start,
end=end,
)
for tick in ticks:
callback(tick)
def call_strategy_func(
self, strategy: CtaTemplate, func: Callable, params: Any = None
):
"""
Call function of a strategy and catch any exception raised.
"""
try:
if params:
func(params)
else:
func()
except Exception:
strategy.trading = False
strategy.inited = False
msg = f"触发异常已停止\n{traceback.format_exc()}"
self.write_log(msg, strategy)
def add_strategy(
self, class_name: str, strategy_name: str, vt_symbol: str, setting: dict
):
"""
Add a new strategy.
"""
if strategy_name in self.strategies:
self.write_log(f"创建策略失败,存在重名{strategy_name}")
return
strategy_class = self.classes.get(class_name, None)
if not strategy_class:
self.write_log(f"创建策略失败,找不到策略类{class_name}")
return
if "." not in vt_symbol:
self.write_log("创建策略失败,本地代码缺失交易所后缀")
return
_, exchange_str = vt_symbol.split(".")
if exchange_str not in Exchange.__members__:
self.write_log("创建策略失败,本地代码的交易所后缀不正确")
return
strategy = strategy_class(self, strategy_name, vt_symbol, setting)
self.strategies[strategy_name] = strategy
# Add vt_symbol to strategy map.
strategies = self.symbol_strategy_map[vt_symbol]
strategies.append(strategy)
# Update to setting file.
self.update_strategy_setting(strategy_name, setting)
self.put_strategy_event(strategy)
def init_strategy(self, strategy_name: str):
"""
Init a strategy.
"""
self.init_executor.submit(self._init_strategy, strategy_name)
def _init_strategy(self, strategy_name: str):
"""
Init strategies in queue.
"""
strategy = self.strategies[strategy_name]
if strategy.inited:
self.write_log(f"{strategy_name}已经完成初始化,禁止重复操作")
return
self.write_log(f"{strategy_name}开始执行初始化")
# Call on_init function of strategy
self.call_strategy_func(strategy, strategy.on_init)
# Restore strategy data(variables)
data = self.strategy_data.get(strategy_name, None)
if data:
for name in strategy.variables:
value = data.get(name, None)
if value:
setattr(strategy, name, value)
# Subscribe market data
contract = self.main_engine.get_contract(strategy.vt_symbol)
if contract:
req = SubscribeRequest(
symbol=contract.symbol, exchange=contract.exchange)
self.main_engine.subscribe(req, contract.gateway_name)
else:
self.write_log(f"行情订阅失败,找不到合约{strategy.vt_symbol}", strategy)
# Put event to update init completed status.
strategy.inited = True
self.put_strategy_event(strategy)
self.write_log(f"{strategy_name}初始化完成")
def start_strategy(self, strategy_name: str):
"""
Start a strategy.
"""
strategy = self.strategies[strategy_name]
if not strategy.inited:
self.write_log(f"策略{strategy.strategy_name}启动失败,请先初始化")
return
if strategy.trading:
self.write_log(f"{strategy_name}已经启动,请勿重复操作")
return
self.call_strategy_func(strategy, strategy.on_start)
strategy.trading = True
self.put_strategy_event(strategy)
def stop_strategy(self, strategy_name: str):
"""
Stop a strategy.
"""
strategy = self.strategies[strategy_name]
if not strategy.trading:
return
# Call on_stop function of the strategy
self.call_strategy_func(strategy, strategy.on_stop)
# Change trading status of strategy to False
strategy.trading = False
# Cancel all orders of the strategy
self.cancel_all(strategy)
# Sync strategy variables to data file
self.sync_strategy_data(strategy)
# Update GUI
self.put_strategy_event(strategy)
def edit_strategy(self, strategy_name: str, setting: dict):
"""
Edit parameters of a strategy.
"""
strategy = self.strategies[strategy_name]
strategy.update_setting(setting)
self.update_strategy_setting(strategy_name, setting)
self.put_strategy_event(strategy)
def remove_strategy(self, strategy_name: str):
"""
Remove a strategy.
"""
strategy = self.strategies[strategy_name]
if strategy.trading:
self.write_log(f"策略{strategy.strategy_name}移除失败,请先停止")
return
# Remove setting
self.remove_strategy_setting(strategy_name)
# Remove from symbol strategy map
strategies = self.symbol_strategy_map[strategy.vt_symbol]
strategies.remove(strategy)
# Remove from active orderid map
if strategy_name in self.strategy_orderid_map:
vt_orderids = self.strategy_orderid_map.pop(strategy_name)
# Remove vt_orderid strategy map
for vt_orderid in vt_orderids:
if vt_orderid in self.orderid_strategy_map:
self.orderid_strategy_map.pop(vt_orderid)
# Remove from strategies
self.strategies.pop(strategy_name)
return True
def load_strategy_class(self):
"""
Load strategy class from source code.
"""
path1 = Path(__file__).parent.joinpath("strategies")
self.load_strategy_class_from_folder(
path1, "vnpy.app.cta_strategy.strategies")
path2 = Path.cwd().joinpath("strategies")
self.load_strategy_class_from_folder(path2, "strategies")
def load_strategy_class_from_folder(self, path: Path, module_name: str = ""):
"""
Load strategy class from certain folder.
"""
for dirpath, dirnames, filenames in os.walk(str(path)):
for filename in filenames:
if filename.split(".")[-1] in ("py", "pyd", "so"):
strategy_module_name = ".".join([module_name, filename.split(".")[0]])
self.load_strategy_class_from_module(strategy_module_name)
def load_strategy_class_from_module(self, module_name: str):
"""
Load strategy class from module file.
"""
try:
module = importlib.import_module(module_name)
for name in dir(module):
value = getattr(module, name)
if (isinstance(value, type) and issubclass(value, CtaTemplate) and value is not CtaTemplate):
self.classes[value.__name__] = value
except: # noqa
msg = f"策略文件{module_name}加载失败,触发异常:\n{traceback.format_exc()}"
self.write_log(msg)
def load_strategy_data(self):
"""
Load strategy data from json file.
"""
self.strategy_data = load_json(self.data_filename)
def sync_strategy_data(self, strategy: CtaTemplate):
"""
Sync strategy data into json file.
"""
data = strategy.get_variables()
data.pop("inited") # Strategy status (inited, trading) should not be synced.
data.pop("trading")
self.strategy_data[strategy.strategy_name] = data
save_json(self.data_filename, self.strategy_data)
def get_all_strategy_class_names(self):
"""
Return names of strategy classes loaded.
"""
return list(self.classes.keys())
def get_strategy_class_parameters(self, class_name: str):
"""
Get default parameters of a strategy class.
"""
strategy_class = self.classes[class_name]
parameters = {}
for name in strategy_class.parameters:
parameters[name] = getattr(strategy_class, name)
return parameters
def get_strategy_parameters(self, strategy_name):
"""
Get parameters of a strategy.
"""
strategy = self.strategies[strategy_name]
return strategy.get_parameters()
def init_all_strategies(self):
"""
"""
for strategy_name in self.strategies.keys():
self.init_strategy(strategy_name)
def start_all_strategies(self):
"""
"""
for strategy_name in self.strategies.keys():
self.start_strategy(strategy_name)
def stop_all_strategies(self):
"""
"""
for strategy_name in self.strategies.keys():
self.stop_strategy(strategy_name)
def load_strategy_setting(self):
"""
Load setting file.
"""
self.strategy_setting = load_json(self.setting_filename)
for strategy_name, strategy_config in self.strategy_setting.items():
self.add_strategy(
strategy_config["class_name"],
strategy_name,
strategy_config["vt_symbol"],
strategy_config["setting"]
)
def update_strategy_setting(self, strategy_name: str, setting: dict):
"""
Update setting file.
"""
strategy = self.strategies[strategy_name]
self.strategy_setting[strategy_name] = {
"class_name": strategy.__class__.__name__,
"vt_symbol": strategy.vt_symbol,
"setting": setting,
}
save_json(self.setting_filename, self.strategy_setting)
def remove_strategy_setting(self, strategy_name: str):
"""
Update setting file.
"""
if strategy_name not in self.strategy_setting:
return
self.strategy_setting.pop(strategy_name)
save_json(self.setting_filename, self.strategy_setting)
def put_stop_order_event(self, stop_order: StopOrder):
"""
Put an event to update stop order status.
"""
event = Event(EVENT_CTA_STOPORDER, stop_order)
self.event_engine.put(event)
def put_strategy_event(self, strategy: CtaTemplate):
"""
Put an event to update strategy status.
"""
data = strategy.get_data()
event = Event(EVENT_CTA_STRATEGY, data)
self.event_engine.put(event)
def write_log(self, msg: str, strategy: CtaTemplate = None):
"""
Create cta engine log event.
"""
if strategy:
msg = f"{strategy.strategy_name}: {msg}"
log = LogData(msg=msg, gateway_name=APP_NAME)
event = Event(type=EVENT_CTA_LOG, data=log)
self.event_engine.put(event)
def send_email(self, msg: str, strategy: CtaTemplate = None):
"""
Send email to default receiver.
"""
if strategy:
subject = f"{strategy.strategy_name}"
else:
subject = "CTA策略引擎"
self.main_engine.send_email(subject, msg)
|
{
"content_hash": "0f219c9541c3c19701e0c871c54f174a",
"timestamp": "",
"source": "github",
"line_count": 937,
"max_line_length": 110,
"avg_line_length": 30.76947705442903,
"alnum_prop": 0.5719537997294579,
"repo_name": "bigdig/vnpy",
"id": "64bbee9664eb90b3f9ca8b1c6289d59007083677",
"size": "29227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vnpy/app/cta_strategy/engine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "390"
},
{
"name": "C",
"bytes": "1652953"
},
{
"name": "C++",
"bytes": "13737810"
},
{
"name": "Objective-C",
"bytes": "1200"
},
{
"name": "Python",
"bytes": "2979947"
},
{
"name": "Shell",
"bytes": "6050"
}
],
"symlink_target": ""
}
|
from bigbuild.models import PageList
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Tests if page directories are valid'
def handle(self, *args, **options):
page_list = PageList()
self.stdout.write(
self.style.SUCCESS('All %s pages are valid' % len(page_list))
)
|
{
"content_hash": "09d35e73b01e5eb3e3e66ad861ce4d2e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 73,
"avg_line_length": 29.25,
"alnum_prop": 0.6638176638176638,
"repo_name": "datadesk/django-bigbuild",
"id": "717447d095e0e92f9fce176db1239b337e79364a",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bigbuild/management/commands/validatepages.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9999"
},
{
"name": "HTML",
"bytes": "169360"
},
{
"name": "JavaScript",
"bytes": "470862"
},
{
"name": "Makefile",
"bytes": "383"
},
{
"name": "Python",
"bytes": "84514"
}
],
"symlink_target": ""
}
|
import setuptools
import os.path
import datetime
setuptools.setup(
name = "doublecheck",
version = "0.1pre" + datetime.datetime.utcnow().replace(microsecond=0).isoformat(),
url = "https://github.com/kennknowles/python-doublecheck",
maintainer = "Kenn Knowles",
maintainer_email = "kenn.knowles@gmail.com",
license = 'Apache 2.0',
packages = setuptools.find_packages(),
description = "Pythonic library for QuickCheck-style randomized testing and SmallCheck-style exhaustive testing of the same test suite.",
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Software Development :: Testing",
"License :: OSI Approved :: Apache Software License",
],
)
|
{
"content_hash": "54befd9fc13e1c849d1fbd5adb7c4fa0",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 141,
"avg_line_length": 40.75,
"alnum_prop": 0.6785276073619632,
"repo_name": "kennknowles/python-doublecheck",
"id": "eb7b6f05925faa046972f2a52d38e5fe5db847a4",
"size": "815",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14743"
}
],
"symlink_target": ""
}
|
import fileinput, optparse, sys
# Command-line parser
parser = optparse.OptionParser(
usage="""
%prog [options] [files]
Expand tab to spaces, printing to the standard output by default.
When no files are given, read from the standard input.
Examples:
expand in one file
% expand_tabs.py -t 4 file.txt
expand tabs in Python source files
% find . -name "*.py" | xargs expand_tabs.py -it 4
""".strip(),
formatter=optparse.IndentedHelpFormatter(max_help_position=30)
)
parser.add_option("-t", "--tabsize", type="int", metavar="SIZE")
parser.add_option("-i", "--inplace", action="store_true", help="change the files in-place (don't print)")
parser.add_option("-b", "--backupext", default="", metavar="EXT", help="backup extension to use (default: no backup)")
options, args = parser.parse_args()
if options.tabsize is None:
parser.error("tab size not specified")
# Do the work
for line in fileinput.input(files=args, inplace=options.inplace, backup=options.backupext):
sys.stdout.write( line.expandtabs(options.tabsize) )
|
{
"content_hash": "e7cc1521ab239d25537db5de62877f6e",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 118,
"avg_line_length": 36.55172413793103,
"alnum_prop": 0.7047169811320755,
"repo_name": "ActiveState/code",
"id": "014ba47ef8b4a0abb152e703b7757be40f5add87",
"size": "1082",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/498179_expandtabspy__exptabs_files/recipe-498179.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
}
|
import logging
import os
from pathlib import Path
from atom.api import Event
# Set up a verbose debugger level for tracing
TRACE_LEVEL_NUM = 5
logging.addLevelName(TRACE_LEVEL_NUM, "TRACE")
def trace(self, message, *args, **kws):
# Yes, logger takes its '*args' as 'args'.
if self.isEnabledFor(TRACE_LEVEL_NUM):
self._log(TRACE_LEVEL_NUM, message, args, **kws)
logging.Logger.trace = trace
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
# Flag indicating whether user configuration file was loaded.
CONFIG_LOADED = False
exclude = ['_d_storage', '_d_engine', '_flags', '_parent', '_children']
class SimpleState(object):
def __getstate__(self):
state = super(SimpleState, self).__getstate__()
for k, v in self.members().items():
if isinstance(v, Event):
del state[k]
elif k in exclude:
del state[k]
elif v.metadata and v.metadata.get('transient', False):
del state[k]
return state
def __setstate__(self, state):
for key, value in state.items():
setattr(self, key, value)
def get_config_folder():
user_path = Path('~') / 'psi'
return user_path.expanduser()
def get_config_file():
default = get_config_folder() / 'config.py'
return Path(os.environ.get('PSI_CONFIG', default))
def create_config(base_directory=None):
config_template = Path(__file__).parent / 'templates' / 'config.txt'
target = get_config_file()
target.parent.mkdir(exist_ok=True, parents=True)
if base_directory is None:
base_directory = str(target.parent)
config_text = config_template.read_text()
config_text = config_text.format(base_directory)
target.write_text(config_text)
def create_io_manifest(template):
io_template = Path(__file__).parent / 'templates' / 'io' / template
io_template = io_template.with_suffix('.enaml')
io = Path(get_config('IO_ROOT')) / template
io = io.with_suffix('.enaml')
io.parent.mkdir(exist_ok=True, parents=True)
io_text = io_template.read_text()
io.write_text(io_text)
def create_config_dirs():
config = load_config()
for name, value in vars(config).items():
if name.endswith('_ROOT'):
Path(value).mkdir(exist_ok=True, parents=True)
def load_config():
# Load the default settings
global CONFIG_LOADED
import importlib.util
from os import environ
from . import config
config_path = get_config_file()
if config_path.exists():
try:
spec = importlib.util.spec_from_file_location('settings', config_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
for name, value in vars(module).items():
if name == name.upper():
setattr(config, name, value)
CONFIG_LOADED = True
except Exception as e:
log.exception(e)
for name, value in vars(config).items():
if name == name.upper():
log.debug('CONFIG %s : %r', name, value)
return config
_config = load_config()
def set_config(setting, value):
'''
Set value of setting
'''
setattr(_config, setting, value)
CFG_ERR_MESG = '''
Could not find setting "{}" in configuration. This may be because the
configuration file is missing. Please run psi-config to create it.
'''
def get_config(setting=None):
'''
Get value of setting
'''
if setting is not None:
try:
return getattr(_config, setting)
except AttributeError as e:
if CONFIG_LOADED:
raise
mesg = CFG_ERR_MESG.strip().format(setting)
raise SystemError(mesg) from e
else:
setting_names = [s for s in dir(_config) if s.upper() == s]
setting_values = [getattr(_config, s) for s in setting_names]
return dict(zip(setting_names, setting_values))
# Monkeypatch built-in JSON library to better handle special types. The
# json-tricks library handles quite a few different types of Python objects
# fairly well. This ensures that third-party libraries (e.g., bcolz) that see
# psiexperiment data structures can properly deal with them.
import json
import json_tricks
for fn_name in ('dump', 'dumps', 'load', 'loads'):
fn = getattr(json_tricks, fn_name)
setattr(json, fn_name, fn)
log.debug('Monkeypatched system JSON')
|
{
"content_hash": "aa22f9ca55f78ec4b84c5ed21ce9591d",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 82,
"avg_line_length": 29.71794871794872,
"alnum_prop": 0.6084987057808455,
"repo_name": "bburan/psiexperiment",
"id": "3a29bfa9ca80ad21d74b8dc151329fa5ecd6e1b0",
"size": "4636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psi/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "468917"
}
],
"symlink_target": ""
}
|
from unittest.mock import MagicMock, patch
import orjson
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import Message, Stream, get_realm, get_user
class TestIntegrationsDevPanel(ZulipTestCase):
zulip_realm = get_realm("zulip")
def test_check_send_webhook_fixture_message_for_error(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/airbrake?api_key={bot.api_key}"
target_url = "/devtools/integrations/check_send_webhook_fixture_message"
body = "{}" # This empty body should generate a KeyError on the webhook code side.
data = {
"url": url,
"body": body,
"custom_headers": "{}",
"is_json": "true",
}
with self.assertLogs(level="ERROR") as logs:
response = self.client_post(target_url, data)
self.assertEqual(response.status_code, 500) # Since the response would be forwarded.
expected_response = {"result": "error", "msg": "Internal server error"}
self.assertEqual(orjson.loads(response.content), expected_response)
# Intention of this test looks like to trigger keyError
# so just testing KeyError is printed along with Traceback in logs
self.assertTrue("KeyError" in logs.output[0])
self.assertTrue("Traceback (most recent call last)" in logs.output[0])
self.assertEqual(logs.output[1], "ERROR:django.request:Internal Server Error: /api/v1/external/airbrake")
def test_check_send_webhook_fixture_message_for_success_without_headers(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/airbrake?api_key={bot.api_key}&stream=Denmark&topic=Airbrake Notifications"
target_url = "/devtools/integrations/check_send_webhook_fixture_message"
with open("zerver/webhooks/airbrake/fixtures/error_message.json") as f:
body = f.read()
data = {
"url": url,
"body": body,
"custom_headers": "{}",
"is_json": "true",
}
response = self.client_post(target_url, data)
expected_response = {'responses': [{'status_code': 200, 'message': {"result": "success", "msg": ""}}], 'result': 'success', 'msg': ''}
response_content = orjson.loads(response.content)
response_content["responses"][0]["message"] = orjson.loads(response_content["responses"][0]["message"])
self.assertEqual(response.status_code, 200)
self.assertEqual(response_content, expected_response)
latest_msg = Message.objects.latest('id')
expected_message = "[ZeroDivisionError](https://zulip.airbrake.io/projects/125209/groups/1705190192091077626): \"Error message from logger\" occurred."
self.assertEqual(latest_msg.content, expected_message)
self.assertEqual(Stream.objects.get(id=latest_msg.recipient.type_id).name, "Denmark")
self.assertEqual(latest_msg.topic_name(), "Airbrake Notifications")
def test_check_send_webhook_fixture_message_for_success_with_headers(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/github?api_key={bot.api_key}&stream=Denmark&topic=GitHub Notifications"
target_url = "/devtools/integrations/check_send_webhook_fixture_message"
with open("zerver/webhooks/github/fixtures/ping__organization.json") as f:
body = f.read()
data = {
"url": url,
"body": body,
"custom_headers": orjson.dumps({"X_GITHUB_EVENT": "ping"}).decode(),
"is_json": "true",
}
response = self.client_post(target_url, data)
self.assertEqual(response.status_code, 200)
latest_msg = Message.objects.latest('id')
expected_message = "GitHub webhook has been successfully configured by eeshangarg."
self.assertEqual(latest_msg.content, expected_message)
self.assertEqual(Stream.objects.get(id=latest_msg.recipient.type_id).name, "Denmark")
self.assertEqual(latest_msg.topic_name(), "GitHub Notifications")
def test_check_send_webhook_fixture_message_for_success_with_headers_and_non_json_fixtures(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=WordPress Notifications"
target_url = "/devtools/integrations/check_send_webhook_fixture_message"
with open("zerver/webhooks/wordpress/fixtures/publish_post_no_data_provided.txt") as f:
body = f.read()
data = {
"url": url,
"body": body,
"custom_headers": orjson.dumps({"Content-Type": "application/x-www-form-urlencoded"}).decode(),
"is_json": "false",
}
response = self.client_post(target_url, data)
self.assertEqual(response.status_code, 200)
latest_msg = Message.objects.latest('id')
expected_message = "New post published:\n* [New WordPress Post](WordPress Post URL)"
self.assertEqual(latest_msg.content, expected_message)
self.assertEqual(Stream.objects.get(id=latest_msg.recipient.type_id).name, "Denmark")
self.assertEqual(latest_msg.topic_name(), "WordPress Notifications")
def test_get_fixtures_for_nonexistant_integration(self) -> None:
target_url = "/devtools/integrations/somerandomnonexistantintegration/fixtures"
response = self.client_get(target_url)
expected_response = {'msg': '"somerandomnonexistantintegration" is not a valid webhook integration.', 'result': 'error'}
self.assertEqual(response.status_code, 404)
self.assertEqual(orjson.loads(response.content), expected_response)
@patch("zerver.views.development.integrations.os.path.exists")
def test_get_fixtures_for_integration_without_fixtures(self, os_path_exists_mock: MagicMock) -> None:
os_path_exists_mock.return_value = False
target_url = "/devtools/integrations/airbrake/fixtures"
response = self.client_get(target_url)
expected_response = {'msg': 'The integration "airbrake" does not have fixtures.', 'result': 'error'}
self.assertEqual(response.status_code, 404)
self.assertEqual(orjson.loads(response.content), expected_response)
def test_get_fixtures_for_success(self) -> None:
target_url = "/devtools/integrations/airbrake/fixtures"
response = self.client_get(target_url)
self.assertEqual(response.status_code, 200)
self.assertIsNotNone(orjson.loads(response.content)["fixtures"])
def test_get_dev_panel_page(self) -> None:
# Just to satisfy the test suite.
target_url = "/devtools/integrations/"
response = self.client_get(target_url)
self.assertEqual(response.status_code, 200)
def test_send_all_webhook_fixture_messages_for_success(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/appfollow?api_key={bot.api_key}&stream=Denmark&topic=Appfollow Bulk Notifications"
target_url = "/devtools/integrations/send_all_webhook_fixture_messages"
data = {
"url": url,
"custom_headers": "{}",
"integration_name": "appfollow",
}
response = self.client_post(target_url, data)
expected_responses = [
{
"fixture_name": "sample.json",
"status_code": 200,
"message": {"msg": "", "result": "success"},
},
{
"fixture_name": "review.json",
"status_code": 200,
"message": {"msg": "", "result": "success"},
},
]
responses = orjson.loads(response.content)["responses"]
for r in responses:
r["message"] = orjson.loads(r["message"])
self.assertEqual(response.status_code, 200)
for r in responses:
# We have to use this roundabout manner since the order may vary each time.
# This is not an issue.
self.assertTrue(r in expected_responses)
expected_responses.remove(r)
new_messages = Message.objects.order_by('-id')[0:2]
expected_messages = ["Webhook integration was successful.\nTest User / Acme (Google Play)", "Acme - Group chat\nApp Store, Acme Technologies, Inc.\n★★★★★ United States\n**Great for Information Management**\nAcme enables me to manage the flow of information quite well. I only wish I could create and edit my Acme Post files in the iOS app.\n*by* **Mr RESOLUTIONARY** *for v3.9*\n[Permalink](http://appfollow.io/permalink) · [Add tag](http://watch.appfollow.io/add_tag)"]
for msg in new_messages:
# new_messages -> expected_messages or expected_messages -> new_messages shouldn't make
# a difference since equality is commutative.
self.assertTrue(msg.content in expected_messages)
expected_messages.remove(msg.content)
self.assertEqual(Stream.objects.get(id=msg.recipient.type_id).name, "Denmark")
self.assertEqual(msg.topic_name(), "Appfollow Bulk Notifications")
def test_send_all_webhook_fixture_messages_for_success_with_non_json_fixtures(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=WordPress Bulk Notifications"
target_url = "/devtools/integrations/send_all_webhook_fixture_messages"
data = {
"url": url,
"custom_headers": "{}",
"integration_name": "wordpress",
}
response = self.client_post(target_url, data)
expected_responses = [
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "user_register.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "publish_post_no_data_provided.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "unknown_action_no_data.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "publish_page.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "unknown_action_no_hook_provided.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "publish_post_type_not_provided.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "wp_login.txt",
"status_code": 400,
},
{
"message": {'msg': 'Unknown WordPress webhook action: WordPress Action', 'result': 'error'},
"fixture_name": "publish_post.txt",
"status_code": 400,
},
]
responses = orjson.loads(response.content)["responses"]
for r in responses:
r["message"] = orjson.loads(r["message"])
self.assertEqual(response.status_code, 200)
for r in responses:
# We have to use this roundabout manner since the order may vary each time. This is not
# an issue. Basically, we're trying to compare 2 lists and since we're not resorting to
# using sets or a sorted order, we're sticking with O(n*m) time complexity for this
# comparison (where n and m are the lengths of the two lists respectively). But since
# this is just a unit test and more importantly n = m = some-low-number we don't really
# care about the time complexity being what it is.
self.assertTrue(r in expected_responses)
expected_responses.remove(r)
@patch("zerver.views.development.integrations.os.path.exists")
def test_send_all_webhook_fixture_messages_for_missing_fixtures(self, os_path_exists_mock: MagicMock) -> None:
os_path_exists_mock.return_value = False
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
url = f"/api/v1/external/appfollow?api_key={bot.api_key}&stream=Denmark&topic=Appfollow Bulk Notifications"
data = {
"url": url,
"custom_headers": "{}",
"integration_name": "appfollow",
}
response = self.client_post("/devtools/integrations/send_all_webhook_fixture_messages", data)
expected_response = {'msg': 'The integration "appfollow" does not have fixtures.', 'result': 'error'}
self.assertEqual(response.status_code, 404)
self.assertEqual(orjson.loads(response.content), expected_response)
|
{
"content_hash": "54887d7a74ff25a8ecc187fa3cc13505",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 478,
"avg_line_length": 51.28030303030303,
"alnum_prop": 0.6193677057172403,
"repo_name": "showell/zulip",
"id": "bd2155402ee44cc394061c6dfffcf6f023524c46",
"size": "13549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zerver/tests/test_integrations_dev_panel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "433235"
},
{
"name": "Dockerfile",
"bytes": "2941"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "634357"
},
{
"name": "Handlebars",
"bytes": "235334"
},
{
"name": "JavaScript",
"bytes": "3341135"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "79720"
},
{
"name": "Python",
"bytes": "8120030"
},
{
"name": "Ruby",
"bytes": "8480"
},
{
"name": "Shell",
"bytes": "133132"
},
{
"name": "TypeScript",
"bytes": "20603"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns
from django.conf.urls import url
from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView, \
SwaggerEditorView, SwaggerAPISettingView, SaveSwaggerAPISettingView, ManageSwaggerEditorView, \
SwaggerAPIJsonView
urlpatterns = patterns(
'',
url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"),
url(r'^editor/$', SwaggerEditorView.as_view(), name="django.swagger.editor.view"),
url(r'^manage-editor/$', ManageSwaggerEditorView.as_view(), name="django.swagger.manage.editor.view"),
url(r'^api-setting/$', SwaggerAPISettingView.as_view(), name="django.swagger.api.setting.view"),
url(r'^save-api-setting/$', SaveSwaggerAPISettingView.as_view(), name="django.swagger.save.api.setting.view"),
url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"),
url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'),
url(r'^api-json/(?P<id>.*)/?$', SwaggerAPIJsonView.as_view(), name='django.swagger.api.json.view'),
)
|
{
"content_hash": "5cae154936f491f6fb200221127dd050",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 114,
"avg_line_length": 60.94444444444444,
"alnum_prop": 0.7283500455788514,
"repo_name": "yoku2010/api-specification-system",
"id": "931b376f38cdc357ca59f6f38c0a5a063530def4",
"size": "1097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apiss/rest_framework_swagger/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "49289"
},
{
"name": "HTML",
"bytes": "12976"
},
{
"name": "JavaScript",
"bytes": "353178"
},
{
"name": "Python",
"bytes": "201566"
}
],
"symlink_target": ""
}
|
"""
WSGI config for opencodereview project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencodereview.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
{
"content_hash": "e4ff66b9e4c98c04e3ebbd08a8e5ef9a",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 78,
"avg_line_length": 27.555555555555557,
"alnum_prop": 0.7963709677419355,
"repo_name": "lukasmartinelli/opencodereview",
"id": "5ef8d25db4e8d4ff0d4be810140563f19d6516b7",
"size": "496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opencodereview/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3273"
},
{
"name": "HTML",
"bytes": "14014"
},
{
"name": "Python",
"bytes": "13847"
},
{
"name": "Shell",
"bytes": "215"
}
],
"symlink_target": ""
}
|
"""## Functions for copying elements from one graph to another.
These functions allow for recursive copying of elements (ops and variables)
from one graph to another. The copied elements are initialized inside a
user-specified scope in the other graph. There are separate functions to
copy ops and variables.
There is also a function to retrieve the copied version of an op from the
first graph inside a scope in the second graph.
@@copy_op_to_graph
@@copy_variable_to_graph
@@get_copied_op
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from copy import deepcopy
from tensorflow.python.ops.variables import Variable
from tensorflow.python.client.session import Session
from tensorflow.python.framework import ops
__all__ = ['copy_op_to_graph', 'copy_variable_to_graph', 'get_copied_op']
def copy_variable_to_graph(org_instance, to_graph, scope=''):
"""Given a `Variable` instance from one `Graph`, initializes and returns
a copy of it from another `Graph`, under the specified scope
(default `""`).
Args:
org_instance: A `Variable` from some `Graph`.
to_graph: The `Graph` to copy the `Variable` to.
scope: A scope for the new `Variable` (default `""`).
Returns:
The copied `Variable` from `to_graph`.
Raises:
TypeError: If `org_instance` is not a `Variable`.
"""
if not isinstance(org_instance, Variable):
raise TypeError(str(org_instance) + ' is not a Variable')
#The name of the new variable
if scope != '':
new_name = (scope + '/' + org_instance.name[:org_instance.name.index(':')])
else:
new_name = org_instance.name[:org_instance.name.index(':')]
#Get the collections that the new instance needs to be added to.
#The new collections will also be a part of the given scope,
#except the special ones required for variable initialization and
#training.
collections = []
for name, collection in org_instance.graph._collections.items():
if org_instance in collection:
if (name == ops.GraphKeys.GLOBAL_VARIABLES or
name == ops.GraphKeys.TRAINABLE_VARIABLES or scope == ''):
collections.append(name)
else:
collections.append(scope + '/' + name)
#See if it's trainable.
trainable = (
org_instance in org_instance.graph.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES))
#Get the initial value
with org_instance.graph.as_default():
temp_session = Session()
init_value = temp_session.run(org_instance.initialized_value())
#Initialize the new variable
with to_graph.as_default():
new_var = Variable(
init_value,
trainable,
name=new_name,
collections=collections,
validate_shape=False)
return new_var
def copy_op_to_graph(org_instance, to_graph, variables, scope=''):
"""Returns a copy of an operation from another Graph under a specified scope.
Given an `Operation` `org_instance` from one `Graph`,
initializes and returns a copy of it from another `Graph`,
under the specified scope (default `""`).
The copying is done recursively, so any `Operation` whose output
is required to evaluate the `org_instance`, is also copied (unless
already done).
Since `Variable` instances are copied separately, those required
to evaluate `org_instance` must be provided as input.
Args:
org_instance: An `Operation` from some `Graph`. Could be a
`Placeholder` as well.
to_graph: The `Graph` to copy `org_instance` to.
variables: An iterable of `Variable` instances to copy `org_instance` to.
scope: A scope for the new `Variable` (default `""`).
Returns:
The copied `Operation` from `to_graph`.
Raises:
TypeError: If `org_instance` is not an `Operation` or `Tensor`.
"""
#The name of the new instance
if scope != '':
new_name = scope + '/' + org_instance.name
else:
new_name = org_instance.name
#Extract names of variables
copied_variables = dict((x.name, x) for x in variables)
#If a variable by the new name already exists, return the
#correspondng tensor that will act as an input
if new_name in copied_variables:
return to_graph.get_tensor_by_name(copied_variables[new_name].name)
#If an instance of the same name exists, return appropriately
try:
already_present = to_graph.as_graph_element(
new_name, allow_tensor=True, allow_operation=True)
return already_present
except:
pass
#Get the collections that the new instance needs to be added to.
#The new collections will also be a part of the given scope.
collections = []
for name, collection in org_instance.graph._collections.items():
if org_instance in collection:
if scope == '':
collections.append(name)
else:
collections.append(scope + '/' + name)
#Take action based on the class of the instance
if isinstance(org_instance, ops.Tensor):
#If it's a Tensor, it is one of the outputs of the underlying
#op. Therefore, copy the op itself and return the appropriate
#output.
op = org_instance.op
new_op = copy_op_to_graph(op, to_graph, variables, scope)
output_index = op.outputs.index(org_instance)
new_tensor = new_op.outputs[output_index]
#Add to collections if any
for collection in collections:
to_graph.add_to_collection(collection, new_tensor)
return new_tensor
elif isinstance(org_instance, ops.Operation):
op = org_instance
#If it has an original_op parameter, copy it
if op._original_op is not None:
new_original_op = copy_op_to_graph(op._original_op, to_graph, variables,
scope)
else:
new_original_op = None
#If it has control inputs, call this function recursively on each.
new_control_inputs = [
copy_op_to_graph(x, to_graph, variables, scope)
for x in op.control_inputs
]
#If it has inputs, call this function recursively on each.
new_inputs = [
copy_op_to_graph(x, to_graph, variables, scope) for x in op.inputs
]
#Make a new node_def based on that of the original.
#An instance of tensorflow.core.framework.node_def_pb2.NodeDef, it
#stores String-based info such as name, device and type of the op.
#Unique to every Operation instance.
new_node_def = deepcopy(op.node_def)
#Change the name
new_node_def.name = new_name
#Copy the other inputs needed for initialization
output_types = op._output_types[:]
input_types = op._input_types[:]
#Make a copy of the op_def too.
#Its unique to every _type_ of Operation.
op_def = deepcopy(op.op_def)
#Initialize a new Operation instance
new_op = ops.Operation(new_node_def, to_graph, new_inputs, output_types,
new_control_inputs, input_types, new_original_op,
op_def)
#Use Graph's hidden methods to add the op
to_graph._record_op_seen_by_control_dependencies(new_op)
for device_function in reversed(to_graph._device_function_stack):
new_op._set_device(device_function(new_op))
return new_op
else:
raise TypeError('Could not copy instance: ' + str(org_instance))
def get_copied_op(org_instance, graph, scope=''):
"""Given an `Operation` instance from some `Graph`, returns
its namesake from `graph`, under the specified scope
(default `""`).
If a copy of `org_instance` is present in `graph` under the given
`scope`, it will be returned.
Args:
org_instance: An `Operation` from some `Graph`.
graph: The `Graph` to be searched for a copr of `org_instance`.
scope: The scope `org_instance` is present in.
Returns:
The `Operation` copy from `graph`.
"""
#The name of the copied instance
if scope != '':
new_name = scope + '/' + org_instance.name
else:
new_name = org_instance.name
return graph.as_graph_element(
new_name, allow_tensor=True, allow_operation=True)
|
{
"content_hash": "323519e8a56804eded23b59fd795876e",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 79,
"avg_line_length": 33.087136929460584,
"alnum_prop": 0.6798344620015049,
"repo_name": "jart/tensorflow",
"id": "5931c8a27996534cca80797e8b840559c124297c",
"size": "8663",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/copy_graph/python/util/copy_elements.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "310149"
},
{
"name": "C++",
"bytes": "44871792"
},
{
"name": "CMake",
"bytes": "206735"
},
{
"name": "Go",
"bytes": "1163781"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "799574"
},
{
"name": "Jupyter Notebook",
"bytes": "2455980"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52050"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99265"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "38792793"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "447966"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0001_initial_after_reset'),
]
operations = [
migrations.AlterField(
model_name='project',
name='catalog',
field=models.ForeignKey(help_text='The catalog which will be used for this project.', on_delete=django.db.models.deletion.CASCADE, related_name='+', to='questions.Catalog', verbose_name='catalog'),
),
migrations.AlterField(
model_name='project',
name='description',
field=models.TextField(blank=True, help_text='You can use markdown syntax in the description.', verbose_name='description'),
),
migrations.AlterField(
model_name='project',
name='title',
field=models.CharField(max_length=256, verbose_name='title'),
),
]
|
{
"content_hash": "d2c306688f4606c745ea57e0664e52f4",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 209,
"avg_line_length": 34.310344827586206,
"alnum_prop": 0.6211055276381909,
"repo_name": "DMPwerkzeug/DMPwerkzeug",
"id": "def99e39879865c0b5cf9f102453e6289f488410",
"size": "1065",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rdmo/projects/migrations/0002_meta.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9735"
},
{
"name": "HTML",
"bytes": "126570"
},
{
"name": "JavaScript",
"bytes": "46177"
},
{
"name": "Python",
"bytes": "120676"
}
],
"symlink_target": ""
}
|
"""Common pathname manipulations, OS/2 EMX version.
Instead of importing this module directly, import os and refer to this
module as os.path.
"""
import os
import stat
__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"basename","dirname","commonprefix","getsize","getmtime",
"getatime","getctime", "islink","exists","isdir","isfile","ismount",
"walk","expanduser","expandvars","normpath","abspath","splitunc",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"realpath","supports_unicode_filenames"]
# strings representing various path-related bits and pieces
curdir = '.'
pardir = '..'
extsep = '.'
sep = '/'
altsep = '\\'
pathsep = ';'
defpath = '.;C:\\bin'
# Normalize the case of a pathname and map slashes to backslashes.
# Other normalizations (such as optimizing '../' away) are not done
# (this is done by normpath).
def normcase(s):
"""Normalize case of pathname.
Makes all characters lowercase and all altseps into seps."""
return s.replace('\\', '/').lower()
# Return whether a path is absolute.
# Trivial in Posix, harder on the Mac or MS-DOS.
# For DOS it is absolute if it starts with a slash or backslash (current
# volume), or if a pathname after the volume letter and colon / UNC resource
# starts with a slash or backslash.
def isabs(s):
"""Test whether a path is absolute"""
s = splitdrive(s)[1]
return s != '' and s[:1] in '/\\'
# Join two (or more) paths.
def join(a, *p):
"""Join two or more pathname components, inserting sep as needed"""
path = a
for b in p:
if isabs(b):
path = b
elif path == '' or path[-1:] in '/\\:':
path = path + b
else:
path = path + '/' + b
return path
# Split a path in a drive specification (a drive letter followed by a
# colon) and the path specification.
# It is always true that drivespec + pathspec == p
def splitdrive(p):
"""Split a pathname into drive and path specifiers. Returns a 2-tuple
"(drive,path)"; either part may be empty"""
if p[1:2] == ':':
return p[0:2], p[2:]
return '', p
# Parse UNC paths
def splitunc(p):
"""Split a pathname into UNC mount point and relative path specifiers.
Return a 2-tuple (unc, rest); either part may be empty.
If unc is not empty, it has the form '//host/mount' (or similar
using backslashes). unc+rest is always the input path.
Paths containing drive letters never have an UNC part.
"""
if p[1:2] == ':':
return '', p # Drive letter present
firstTwo = p[0:2]
if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
# is a UNC path:
# vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
# \\machine\mountpoint\directories...
# directory ^^^^^^^^^^^^^^^
normp = normcase(p)
index = normp.find('/', 2)
if index == -1:
##raise RuntimeError, 'illegal UNC path: "' + p + '"'
return ("", p)
index = normp.find('/', index + 1)
if index == -1:
index = len(p)
return p[:index], p[index:]
return '', p
# Split a path in head (everything up to the last '/') and tail (the
# rest). After the trailing '/' is stripped, the invariant
# join(head, tail) == p holds.
# The resulting head won't end in '/' unless it is the root.
def split(p):
"""Split a pathname.
Return tuple (head, tail) where tail is everything after the final slash.
Either part may be empty."""
d, p = splitdrive(p)
# set i to index beyond p's last slash
i = len(p)
while i and p[i-1] not in '/\\':
i = i - 1
head, tail = p[:i], p[i:] # now tail has no slashes
# remove trailing slashes from head, unless it's all slashes
head2 = head
while head2 and head2[-1] in '/\\':
head2 = head2[:-1]
head = head2 or head
return d + head, tail
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
def splitext(p):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end.
Return (root, ext), either part may be empty."""
root, ext = '', ''
for c in p:
if c in ['/','\\']:
root, ext = root + ext + c, ''
elif c == '.':
if ext:
root, ext = root + ext, c
else:
ext = c
elif ext:
ext = ext + c
else:
root = root + c
return root, ext
# Return the tail (basename) part of a path.
def basename(p):
"""Returns the final component of a pathname"""
return split(p)[1]
# Return the head (dirname) part of a path.
def dirname(p):
"""Returns the directory component of a pathname"""
return split(p)[0]
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
prefix = m[0]
for item in m:
for i in range(len(prefix)):
if prefix[:i+1] != item[:i+1]:
prefix = prefix[:i]
if i == 0: return ''
break
return prefix
# Get size, mtime, atime of files.
def getsize(filename):
"""Return the size of a file, reported by os.stat()"""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()"""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()"""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the creation time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Is a path a symbolic link?
# This will always return false on systems where posix.lstat doesn't exist.
def islink(path):
"""Test for symbolic link. On OS/2 always returns false"""
return False
# Does a path exist?
# This is false for dangling symbolic links.
def exists(path):
"""Test whether a path exists"""
try:
st = os.stat(path)
except os.error:
return False
return True
# Is a path a directory?
def isdir(path):
"""Test whether a path is a directory"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
# Is a path a regular file?
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path.
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a mount point? Either a root (with or without drive letter)
# or an UNC path with at most a / or \ after the mount point.
def ismount(path):
"""Test whether a path is a mount point (defined as root of drive)"""
unc, rest = splitunc(path)
if unc:
return rest in ("", "/", "\\")
p = splitdrive(path)[1]
return len(p) == 1 and p[0] in '/\\'
# Directory tree walk.
# For each directory under top (including top itself, but excluding
# '.' and '..'), func(arg, dirname, filenames) is called, where
# dirname is the name of the directory and filenames is the list
# of files (and subdirectories etc.) in the directory.
# The func may modify the filenames list, to implement a filter,
# or to impose a different order of visiting.
def walk(top, func, arg):
"""Directory tree walk whth callback function.
walk(top, func, arg) calls func(arg, d, files) for each directory d
in the tree rooted at top (including top itself); files is a list
of all the files and subdirs in directory d."""
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
exceptions = ('.', '..')
for name in names:
if name not in exceptions:
name = join(top, name)
if isdir(name):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
# '~' means $HOME; '~user' means that user's home directory.
# If the path doesn't begin with '~', or if the user or $HOME is unknown,
# the path is returned unchanged (leaving error reporting to whatever
# function is called with the expanded path as argument).
# See also module 'glob' for expansion of *, ? and [...] in pathnames.
# (A function should also be defined to do full *sh-style environment
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructs.
If user or $HOME is unknown, do nothing."""
if path[:1] != '~':
return path
i, n = 1, len(path)
while i < n and path[i] not in '/\\':
i = i + 1
if i == 1:
if 'HOME' in os.environ:
userhome = os.environ['HOME']
elif not 'HOMEPATH' in os.environ:
return path
else:
try:
drive = os.environ['HOMEDRIVE']
except KeyError:
drive = ''
userhome = join(drive, os.environ['HOMEPATH'])
else:
return path
return userhome + path[i:]
# Expand paths containing shell variable substitutions.
# The following rules apply:
# - no expansion within single quotes
# - no escape character, except for '$$' which is translated into '$'
# - ${varname} is accepted.
# - varnames can be made out of letters, digits and the character '_'
# XXX With COMMAND.COM you can use any characters in a variable name,
# XXX except '^|<>='.
def expandvars(path):
"""Expand shell variables of form $var and ${var}.
Unknown variables are left unchanged."""
if '$' not in path:
return path
import string
varchars = string.letters + string.digits + '_-'
res = ''
index = 0
pathlen = len(path)
while index < pathlen:
c = path[index]
if c == '\'': # no expansion within single quotes
path = path[index + 1:]
pathlen = len(path)
try:
index = path.index('\'')
res = res + '\'' + path[:index + 1]
except ValueError:
res = res + path
index = pathlen - 1
elif c == '$': # variable or '$$'
if path[index + 1:index + 2] == '$':
res = res + c
index = index + 1
elif path[index + 1:index + 2] == '{':
path = path[index+2:]
pathlen = len(path)
try:
index = path.index('}')
var = path[:index]
if var in os.environ:
res = res + os.environ[var]
except ValueError:
res = res + path
index = pathlen - 1
else:
var = ''
index = index + 1
c = path[index:index + 1]
while c != '' and c in varchars:
var = var + c
index = index + 1
c = path[index:index + 1]
if var in os.environ:
res = res + os.environ[var]
if c != '':
res = res + c
else:
res = res + c
index = index + 1
return res
# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
def normpath(path):
"""Normalize path, eliminating double slashes, etc."""
path = path.replace('\\', '/')
prefix, path = splitdrive(path)
while path[:1] == '/':
prefix = prefix + '/'
path = path[1:]
comps = path.split('/')
i = 0
while i < len(comps):
if comps[i] == '.':
del comps[i]
elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
del comps[i-1:i+1]
i = i - 1
elif comps[i] == '' and i > 0 and comps[i-1] != '':
del comps[i]
else:
i = i + 1
# If the path is now empty, substitute '.'
if not prefix and not comps:
comps.append('.')
return prefix + '/'.join(comps)
# Return an absolute path.
def abspath(path):
"""Return the absolute version of a path"""
if not isabs(path):
path = join(os.getcwd(), path)
return normpath(path)
# realpath is a no-op on systems without islink support
realpath = abspath
supports_unicode_filenames = False
|
{
"content_hash": "aa5f6287f04d721caac20862aeaefefd",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 79,
"avg_line_length": 30.042857142857144,
"alnum_prop": 0.5688698684419083,
"repo_name": "OS2World/APP-INTERNET-torpak_2",
"id": "09982aadc1656a77dd3f9da44f1b26720de93f1c",
"size": "12679",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Lib/os2emxpath.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from __future__ import unicode_literals
"""Utilities for testing Motor with Tornado."""
from tornado import gen
from test import version, SkipTest
def one(s):
"""Get one element of a set"""
return next(iter(s))
def delay(sec):
# Javascript sleep() available in MongoDB since version ~1.9
return 'sleep(%s * 1000); return true' % sec
@gen.coroutine
def get_command_line(client):
command_line = yield client.admin.command('getCmdLineOpts')
assert command_line['ok'] == 1, "getCmdLineOpts() failed"
raise gen.Return(command_line['argv'])
@gen.coroutine
def server_is_master_with_slave(client):
command_line = yield get_command_line(client)
raise gen.Return('--master' in command_line)
@gen.coroutine
def server_is_mongos(client):
ismaster_response = yield client.admin.command('ismaster')
raise gen.Return(ismaster_response.get('msg') == 'isdbgrid')
@gen.coroutine
def skip_if_mongos(client):
is_mongos = yield server_is_mongos(client)
if is_mongos:
raise SkipTest("connected to mongos")
@gen.coroutine
def remove_all_users(db):
version_check = yield version.at_least(db.connection, (2, 5, 4))
if version_check:
yield db.command({"dropAllUsersFromDatabase": 1})
else:
yield db.system.users.remove({})
|
{
"content_hash": "0af1a874524cd5b18fc2dc53fe7e0596",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 68,
"avg_line_length": 25.057692307692307,
"alnum_prop": 0.6891788181120491,
"repo_name": "asvetlov/motor",
"id": "e43a73cd982b2118feb6a2ef331be9763d208148",
"size": "1882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "542195"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.