content
stringlengths 5
1.05M
|
|---|
import unittest
from seedpod_ground_risk.pathfinding.a_star import *
from seedpod_ground_risk.pathfinding.heuristic import *
from seedpod_ground_risk.pathfinding.rjps_a_star import *
from tests.pathfinding.test_data import SMALL_TEST_GRID, LARGE_TEST_GRID, SMALL_DEADEND_TEST_GRID
class BaseAStarTestCase(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
self.small_deadend_environment = GridEnvironment(SMALL_DEADEND_TEST_GRID, diagonals=True)
self.small_diag_environment = GridEnvironment(SMALL_TEST_GRID, diagonals=True)
self.small_no_diag_environment = GridEnvironment(SMALL_TEST_GRID, diagonals=False)
self.large_diag_environment = GridEnvironment(LARGE_TEST_GRID, diagonals=True)
self.large_no_diag_environment = GridEnvironment(LARGE_TEST_GRID, diagonals=False)
self.start = Node((0, 0))
self.end = Node((4, 4))
def test_start_is_goal(self):
"""
Test case of start and goal being the same node
"""
# Do not test base class!
if self.__class__ is BaseAStarTestCase:
return
path = self.algo.find_path(self.small_diag_environment, self.start, self.start)
self.assertEqual(path, [self.start])
def test_goal_unreachable(self):
"""
Test behaviour when path is impossible due to obstacles
"""
# Do not test base class!
if self.__class__ is BaseAStarTestCase:
return
path = self.algo.find_path(self.small_deadend_environment, self.start, self.end)
self.assertEqual(path, None, "Impossible path should be None")
class RiskGridAStarTestCase(BaseAStarTestCase):
def setUp(self) -> None:
super().setUp()
self.algo = RiskGridAStar(heuristic=ManhattanRiskHeuristic(self.small_no_diag_environment))
def test_direct_no_diagonals(self):
"""
Test simplest case of direct path on small grid with no diagonals ignoring node values
"""
path = self.algo.find_path(self.small_no_diag_environment, self.start, self.end)
self.assertEqual(path[0], self.start, 'Start node not included in path')
self.assertEqual(path[-1], self.end, 'Goal node not included in path')
# Could take either zigzag path both of which are correct but have same path length
# There are no other paths of length 9 other than these zigzag paths, so tests for either of these
self.assertLess(len(path), 10, 'Path wrong length (not direct?)')
def test_direct_with_diagonals(self):
"""
Test simplest case of direct path on small grid with diagonals ignoring node values
"""
path = self.algo.find_path(self.small_diag_environment, self.start, self.end)
self.assertEqual(path[0], self.start, "Start node not included in path")
self.assertEqual(path[-1], self.end, 'Goal node not included in path')
self.assertEqual(path, [
Node((0, 0)),
Node((2, 0)),
Node((3, 1)),
Node((4, 2)),
Node((4, 3)),
Node((4, 4))
],
"Incorrect path")
def test_large_env_with_diagonals(self):
"""
Test on realistic costmap. Used mainly for profiling code
"""
algo = RiskGridAStar(ManhattanRiskHeuristic(self.large_diag_environment,
risk_to_dist_ratio=1))
path = algo.find_path(self.large_diag_environment,
Node((10, 10)),
Node((490, 490)))
self.assertIsNotNone(path, 'Failed to find possible path')
def test_repeatability(self):
import matplotlib.pyplot as mpl
import numpy as np
start, end = Node((450, 50)), Node((100, 450))
repeats = 2
equal_paths = []
rdrs = np.linspace(100, 10000, 10)
risk_sums = []
def make_path(start, end, rdr):
algo = RiskGridAStar(ManhattanRiskHeuristic(self.large_diag_environment,
risk_to_dist_ratio=rdr))
return algo.find_path(self.large_diag_environment, start, end)
# def run_params(rdr):
# paths = [make_path(start, end, rdr) for _ in range(repeats)]
# equal_paths.append(all([p == paths[0] for p in paths]))
# if not paths[0]:
# return [rdr, np.inf]
# risk_sum = sum([self.large_diag_environment.grid[n[0], n[1]] for n in paths[0]])
# return [rdr, risk_sum]
#
# pool = ProcessPool(nodes=8)
# params = np.array(rdrs)
# risk_sums = pool.map(run_params, params)
# pool.close()
for rdr in rdrs:
paths = [make_path(start, end, rdr) for _ in range(repeats)]
equal_paths.append(all([p == paths[0] for p in paths]))
if not paths[0]:
risk_sums.append([rdr, np.inf])
continue
risk_sum = sum([self.large_diag_environment.grid[n.position[0], n.position[1]] for n in paths[0]])
risk_sums.append([rdr, risk_sum])
fig = mpl.figure()
ax = fig.add_subplot(111)
for path in paths:
ax.plot([n.position[1] for n in path], [n.position[0] for n in path], color='red')
im = ax.imshow(self.large_no_diag_environment.grid)
fig.colorbar(im, ax=ax, label='Population')
ax.set_title(f'RiskA* with RDR={rdr:.4g} \n Risk sum={risk_sum:.4g}')
fig.show()
risk_sums = np.array(risk_sums)
rdr_fig = mpl.figure()
ax = rdr_fig.add_subplot(111)
ax.scatter(risk_sums[:, 0], risk_sums[:, 1])
# ax.set_xscale('log')
ax.set_yscale('symlog')
ax.set_xlabel('Risk-Distance Ratio')
ax.set_ylabel('Path Risk sum')
ax.set_title('Risk Grid A*')
rdr_fig.show()
self.assertTrue(all(equal_paths), 'Paths are not generated repeatably')
if __name__ == '__main__':
unittest.main()
|
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
import seaborn.apionly as sns
from scipy import stats
def fitMVN():
# set plotting style
sns.set_style("darkgrid")
# load PM dataset
pm_daily = np.loadtxt('../data/epa_hourly/alabamatest.csv', delimiter=',')
#Nyears = int(np.shape(pm_daily)[0] / 365*24)
Nsites = np.shape(pm_daily)[1]
# Months = ['May', 'June', 'July', 'August', 'September', 'October', 'November', 'December', 'January', 'February',
# 'March', 'April']
# calculate standard deviation in daily flows each month and squared Mahalanobis distances
#StdMonthly = calc_monthly_std(pm_daily, Nyears, Nsites)
#D2 = calcD2(Nyears, Nsites, np.log(pm_daily))
#D2 = calcD2(Nsites, np.log(pm_daily))
# calculate theoretical quantiles for a chi^2 distribution with dof = Nsites, and for the standard normal distribution
m = np.array(range(1, Nyears + 1))
p = (m - 0.5) / Nyears
chi2 = stats.chi2.ppf(p, Nsites)
norm = stats.norm.ppf(p, 0, 1)
# initialize matrices to store correlation coefficients and significance levels for marginal normal distributions and chi^2 distributions
normCorr = np.zeros([Nsites, 12])
norm_sigLevel = np.zeros([Nsites, 12])
chi2Corr = np.zeros([12])
chi2_sigLevel = np.zeros([12])
for i in range(len(Months)):
# plot histograms of standard deviation of daily flows each month, and of their logs
plotHistograms(Nsites, StdMonthly[:, :, i], 'Standard Deviation of Daily ' + Months[i] + ' Flows',
Months[i] + 'Hist.png')
plotHistograms(Nsites, np.log(StdMonthly[:, :, i]), 'log(Standard Deviation of Daily ' + Months[i] + ' Flows)', \
'Log' + Months[i] + 'Hist.png')
# plot QQ plots of standard deviation of daily flows each month, and of their logs
plotNormQQ(Nsites, StdMonthly[:, :, i], norm, 'Standard Deviation of Daily ' + Months[i] + ' Flows',
Months[i] + 'QQ.png')
normCorr[:, i] = plotNormQQ(Nsites, np.log(StdMonthly[:, :, i]), norm,
'log(Standard Deviation of Daily ' + Months[i] + ' Flows)',
'Log' + Months[i] + 'QQ.png')
# plot QQ plot of Chi Squared distribution of log of standard deviation in daily flows each month
chi2Corr[i] = plotChi2QQ(Nsites, D2[:, i], chi2,
'D$\mathregular{^2}\!$ of log(Standard Deviation of Daily ' + Months[i] + ' Flows)', \
'Log' + Months[i] + 'Chi2QQ.png')
# find significance levels
chi2_sigLevel[i] = chi2_MC(Nsites, Nyears, chi2, chi2Corr[i])
norm_sigLevel[:, i] = norm_MC(Nsites, Nyears, norm, normCorr[:, i])
np.savetxt('Norm_sigLevels.txt', np.transpose(norm_sigLevel))
np.savetxt('Norm_corr.txt', np.transpose(normCorr))
np.savetxt('Chi2_sigLevels.txt', chi2_sigLevel)
np.savetxt('Chi2_corr.txt', chi2Corr)
return None
def calc_monthly_std(Qdaily, Nyears, Nsites):
Nmonths = 12
# first month = May (1st month of water year)
DaysPerMonth = np.array([31, 30, 31, 31, 30, 31, 30, 31, 31, 28, 31, 30])
Qmonthly = np.zeros([Nsites, Nyears, Nmonths])
StdMonthly = np.zeros([Nsites, Nyears, Nmonths])
for year in range(Nyears):
for month in range(Nmonths):
start = year * 365 + np.sum(DaysPerMonth[0:month])
for i in range(Nsites):
# find total flow each month
Qmonthly[i, year, month] = 86400 * np.sum(Qdaily[start:start + DaysPerMonth[month], i])
# find standard deviation in daily flows each month
for i in range(Nsites):
for j in range(DaysPerMonth[month]):
StdMonthly[i, year, month] = StdMonthly[i, year, month] + \
(
86400 * Qdaily[start + j, i] - Qmonthly[i, year, month] / DaysPerMonth[
month]) ** 2
StdMonthly[i, year, month] = np.sqrt((1 / (DaysPerMonth[month] - 1)) * StdMonthly[i, year, month])
return StdMonthly
def plotHistograms(Nsites, data, xlabel, filename):
fig = plt.figure()
for i in range(Nsites):
ax = fig.add_subplot(1, Nsites, i + 1)
ax.hist(data[i, :], bins=10, color='navy', alpha=0.8)
ax.set_title('Site ' + str(i + 1), fontsize=16)
fig.text(0.1, 0.5, 'Frequency', va='center', rotation='vertical', fontsize=14)
fig.text(0.5, 0.04, xlabel, ha='center', fontsize=14)
fig.subplots_adjust(bottom=0.15)
fig.set_size_inches([22.525, 4.825])
fig.savefig('Hists/' + filename)
fig.clf()
return None
def plotNormQQ(Nsites, data, norm, title, filename):
corr = np.zeros([Nsites])
fig = plt.figure()
for i in range(Nsites):
corr[i] = np.corrcoef(np.sort(data[i, :]), norm)[0, 1]
z = (data[i, :] - np.mean(data[i, :])) / np.std(data[i, :])
ax = fig.add_subplot(1, Nsites, i + 1)
ax.scatter(norm, np.sort(z))
ax.plot([-3, 3], [-3, 3], c='r')
ax.set_title('Site ' + str(i + 1), fontsize=16)
ax.set_xlim([-3, 3])
ax.set_ylim([-3, 3])
fig.text(0.1, 0.5, 'Sample Quantiles', va='center', rotation='vertical', fontsize=14)
fig.text(0.5, 0.04, 'Theoretical Quantiles', ha='center', fontsize=14)
fig.suptitle('Normal Q-Q Plot of ' + title, fontsize=16)
fig.subplots_adjust(bottom=0.15, top=0.85)
fig.set_size_inches([22.525, 4.825])
fig.savefig('QQplots/' + filename)
fig.clf()
return corr
def calcD2(Nsites, data):
D2 = np.zeros([Nyears, 12])
X = np.zeros([Nyears, Nsites])
Xprime = np.zeros([Nyears, Nsites])
S = np.zeros(Nsites)
for i in range(12):
# fill data matrix, X, for ith month
for j in range(Nsites):
X[:, j] = data[j, :, i]
# calculate covariance matrix, S, for ith month
Xprime = X - (1 / Nyears) * np.dot(np.ones([Nyears, Nyears]), X)
S = (1 / (Nyears - 1)) * np.dot(np.transpose(Xprime), Xprime)
# calculate Mahalanobis distance, D2, for each year's ith month
for j in range(Nyears):
D2[j, i] = np.dot(np.dot((X[j, :] - np.mean(X, 0)), np.linalg.inv(S)),
(np.transpose(X[j, :] - np.mean(X, 0))))
return D2
def plotChi2QQ(Nsites, data, chi2, title, filename):
corr = np.corrcoef(np.sort(data), chi2)[0, 1]
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.scatter(chi2, np.sort(data))
ax.plot([0, 1.1 * np.max(chi2)], [0, 1.1 * np.max(chi2)], c='r')
ax.set_xlabel('Theoretical Quantiles', fontsize=16)
ax.set_xlim([0, 1.1 * np.max(chi2)])
ax.set_ylabel('Sample Quantiles', fontsize=16)
ax.set_ylim([0, 1.1 * np.max(data)])
ax.tick_params(axis='both', labelsize=14)
ax.set_title(r'$\chi^2$' + ' Q-Q Plot of ' + title, fontsize=16)
fig.savefig('QQplots/' + filename)
fig.clf()
return corr
def chi2_MC(Nsites, Nyears, theoretical, dataCorr):
corr = np.zeros(10000)
for i in range(10000): # 10,000 MC simulations
simulated = stats.chi2.rvs(Nsites, size=Nyears)
corr[i] = np.corrcoef(np.sort(simulated), theoretical)[0, 1]
# find significance levels
corr = np.sort(corr)
for i in range(10000):
if dataCorr > corr[i]:
sigLevel = (i + 0.5) / 10000
return sigLevel
def norm_MC(Nsites, Nyears, theoretical, dataCorr):
sigLevel = np.zeros(Nsites)
corr = np.zeros([10000])
for i in range(10000): # 10,000 MC simulations
simulated = stats.norm.rvs(0, 1, size=Nyears)
corr[i] = np.corrcoef(np.sort(simulated), theoretical)[0, 1]
# find significance levels
corr = np.sort(corr)
for i in range(10000):
for j in range(Nsites):
if dataCorr[j] > corr[i]:
sigLevel[j] = (i + 0.5) / 10000
return sigLevel
fitMVN()
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
from os import makedirs
from os.path import isdir, join
import logging
import csv
from .base import BaseProvider
class CsvProvider(BaseProvider):
'''
Core provider for records configured in csv files on disk.
config:
class: octodns.provider.csv.CsvProvider
# The location of yaml config files (required)
directory: ./config
'''
SUPPORTS_GEO = True
SUPPORTS = set(('A', 'AAAA', 'ALIAS', 'CAA', 'CNAME', 'MX', 'NAPTR', 'NS',
'PTR', 'SSHFP', 'SPF', 'SRV', 'TXT'))
def __init__(self, id, directory, *args, **kwargs):
self.log = logging.getLogger('CsvProvider[{}]'.format(id))
self.log.debug('__init__: id=%s, directory=%s', id, directory)
super(CsvProvider, self).__init__(id, *args, **kwargs)
self.directory = directory
def populate(self, zone, target=False, lenient=False):
self.log.debug('populate: name=%s, target=%s, lenient=%s', zone.name,
target, lenient)
self.log.info('populate: found %s records',
len(zone.records))
def _apply(self, plan):
desired = plan.desired
changes = plan.changes
self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name,
len(changes))
csv_columns = ['zone',
'type',
'record',
'ttl',
'value',
'geo',
'healthcheck']
# Since we don't have existing we'll only see creates
records = [c.new for c in changes]
dict_data = []
for record in records:
d = record.data
if 'values' in d:
d['value'] = d['values']
del d['values']
d['type'] = record._type
record_key = "{}.{}".format(record.name, desired.name)
d['record'] = record_key
d['zone'] = desired.name
dict_data.append(d)
if not isdir(self.directory):
makedirs(self.directory)
filename = join(self.directory, '{}csv'.format(desired.name))
self.log.debug('_apply: writing filename=%s', filename)
with open(filename, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for data in dict_data:
writer.writerow(data)
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import logging
import pytest
import datetime
import msrest
from azure.servicebus.aio.management import ServiceBusAdministrationClient
from azure.servicebus.management import SubscriptionProperties
from utilities import get_logger
from azure.core.exceptions import HttpResponseError, ResourceExistsError
from devtools_testutils import AzureMgmtTestCase, CachedResourceGroupPreparer
from servicebus_preparer import (
CachedServiceBusNamespacePreparer,
ServiceBusNamespacePreparer
)
from mgmt_test_utilities_async import async_pageable_to_list, clear_topics
_logger = get_logger(logging.DEBUG)
class ServiceBusAdministrationClientSubscriptionAsyncTests(AzureMgmtTestCase):
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_create_by_name(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "topic_testaddf"
subscription_name = "sub_testkkk"
try:
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(topic_name, subscription_name)
subscription = await mgmt_service.get_subscription(topic_name, subscription_name)
assert subscription.name == subscription_name
assert subscription.availability_status == 'Available'
assert subscription.status == 'Active'
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_create_with_subscription_description(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "iweidk"
subscription_name = "kdosako"
try:
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(
topic_name,
subscription_name=subscription_name,
auto_delete_on_idle=datetime.timedelta(minutes=10),
dead_lettering_on_message_expiration=True,
default_message_time_to_live=datetime.timedelta(minutes=11),
enable_batched_operations=True,
lock_duration=datetime.timedelta(seconds=13),
max_delivery_count=14,
requires_session=True
)
subscription = await mgmt_service.get_subscription(topic_name, subscription_name)
assert subscription.name == subscription_name
assert subscription.auto_delete_on_idle == datetime.timedelta(minutes=10)
assert subscription.dead_lettering_on_message_expiration == True
assert subscription.default_message_time_to_live == datetime.timedelta(minutes=11)
assert subscription.enable_batched_operations == True
assert subscription.lock_duration == datetime.timedelta(seconds=13)
assert subscription.max_delivery_count == 14
assert subscription.requires_session == True
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_create_with_forward_to(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "iweidkforward"
subscription_name = "kdosakoforward"
queue_name = "dkfthj"
try:
await mgmt_service.create_queue(queue_name)
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(
topic_name,
subscription_name=subscription_name,
forward_dead_lettered_messages_to=queue_name,
forward_to=queue_name,
)
subscription = await mgmt_service.get_subscription(topic_name, subscription_name)
# Test forward_to (separately, as it changes auto_delete_on_idle when you enable it.)
# Note: We endswith to avoid the fact that the servicebus_namespace_name is replacered locally but not in the properties bag, and still test this.
assert subscription.forward_to.endswith(".servicebus.windows.net/{}".format(queue_name))
assert subscription.forward_dead_lettered_messages_to.endswith(".servicebus.windows.net/{}".format(queue_name))
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
await mgmt_service.delete_queue(queue_name)
mgmt_service.close()
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_create_duplicate(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "dqkodq"
subscription_name = 'kkaqo'
try:
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(topic_name, subscription_name)
with pytest.raises(ResourceExistsError):
await mgmt_service.create_subscription(topic_name, subscription_name)
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_update_success(self, servicebus_namespace_connection_string, servicebus_namespace, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "fjrui"
subscription_name = "eqkovc"
queue_name = "dfkla"
try:
await mgmt_service.create_queue(queue_name)
topic_description = await mgmt_service.create_topic(topic_name)
subscription_description = await mgmt_service.create_subscription(topic_description.name, subscription_name)
# Try updating one setting.
subscription_description.lock_duration = datetime.timedelta(minutes=2)
await mgmt_service.update_subscription(topic_description.name, subscription_description)
subscription_description = await mgmt_service.get_subscription(topic_name, subscription_name)
assert subscription_description.lock_duration == datetime.timedelta(minutes=2)
# Now try updating all settings.
subscription_description.auto_delete_on_idle = datetime.timedelta(minutes=10)
subscription_description.dead_lettering_on_message_expiration = True
subscription_description.default_message_time_to_live = datetime.timedelta(minutes=11)
subscription_description.lock_duration = datetime.timedelta(seconds=12)
subscription_description.max_delivery_count = 14
# topic_description.enable_partitioning = True # Cannot be changed after creation
# topic_description.requires_session = True # Cannot be changed after creation
await mgmt_service.update_subscription(topic_description.name, subscription_description)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
assert subscription_description.auto_delete_on_idle == datetime.timedelta(minutes=10)
assert subscription_description.dead_lettering_on_message_expiration == True
assert subscription_description.default_message_time_to_live == datetime.timedelta(minutes=11)
assert subscription_description.max_delivery_count == 14
assert subscription_description.lock_duration == datetime.timedelta(seconds=12)
# assert topic_description.enable_partitioning == True
# assert topic_description.requires_session == True
# Finally, test forward_to (separately, as it changes auto_delete_on_idle when you enable it.)
subscription_description.forward_to = "sb://{}.servicebus.windows.net/{}".format(servicebus_namespace.name, topic_name)
subscription_description.forward_dead_lettered_messages_to = "sb://{}.servicebus.windows.net/{}".format(servicebus_namespace.name, topic_name)
await mgmt_service.update_subscription(topic_description.name, subscription_description)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
# Note: We endswith to avoid the fact that the servicebus_namespace_name is replacered locally but not in the properties bag, and still test this.
assert subscription_description.forward_to.endswith(".servicebus.windows.net/{}".format(topic_name))
assert subscription_description.forward_dead_lettered_messages_to.endswith(".servicebus.windows.net/{}".format(topic_name))
# Update forward_to with entity name
subscription_description.forward_to = queue_name
subscription_description.forward_dead_lettered_messages_to = queue_name
await mgmt_service.update_subscription(topic_description.name, subscription_description)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
# Note: We endswith to avoid the fact that the servicebus_namespace_name is replacered locally but not in the properties bag, and still test this.
assert subscription_description.forward_to.endswith(".servicebus.windows.net/{}".format(queue_name))
assert subscription_description.forward_dead_lettered_messages_to.endswith(".servicebus.windows.net/{}".format(queue_name))
# Update forward_to with None
subscription_description.forward_to = None
subscription_description.forward_dead_lettered_messages_to = None
await mgmt_service.update_subscription(topic_description.name, subscription_description)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
# Note: We endswith to avoid the fact that the servicebus_namespace_name is replacered locally but not in the properties bag, and still test this.
assert subscription_description.forward_to is None
assert subscription_description.forward_dead_lettered_messages_to is None
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
await mgmt_service.delete_queue(queue_name)
mgmt_service.close()
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_update_invalid(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "dfjfj"
subscription_name = "kwqxc"
try:
topic_description = await mgmt_service.create_topic(topic_name)
subscription_description = await mgmt_service.create_subscription(topic_name, subscription_name)
# handle a null update properly.
with pytest.raises(TypeError):
await mgmt_service.update_subscription(topic_name, None)
# handle an invalid type update properly.
with pytest.raises(TypeError):
await mgmt_service.update_subscription(topic_name, Exception("test"))
# change the name to a topic that doesn't exist; should fail.
subscription_description.name = "iewdm"
with pytest.raises(HttpResponseError):
await mgmt_service.update_subscription(topic_name, subscription_description)
subscription_description.name = subscription_name
# change the name to a topic with an invalid name exist; should fail.
subscription_description.name = ''
with pytest.raises(msrest.exceptions.ValidationError):
await mgmt_service.update_subscription(topic_name, subscription_description)
subscription_description.name = topic_name
# change to a setting with an invalid value; should still fail.
subscription_description.lock_duration = datetime.timedelta(days=25)
with pytest.raises(HttpResponseError):
await mgmt_service.update_subscription(topic_name, subscription_description)
subscription_description.lock_duration = datetime.timedelta(minutes=5)
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_delete(self, servicebus_namespace_connection_string):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = 'test_topicgda'
subscription_name_1 = 'test_sub1da'
subscription_name_2 = 'test_sub2gcv'
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(topic_name, subscription_name_1)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 1
await mgmt_service.create_subscription(topic_name, subscription_name_2)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 2
description = await mgmt_service.get_subscription(topic_name, subscription_name_1)
await mgmt_service.delete_subscription(topic_name, description.name)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 1 and subscriptions[0].name == subscription_name_2
await mgmt_service.delete_subscription(topic_name, subscription_name_2)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 0
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_list(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = 'lkoqxc'
subscription_name_1 = 'testsub1'
subscription_name_2 = 'testsub2'
await mgmt_service.create_topic(topic_name)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 0
await mgmt_service.create_subscription(topic_name, subscription_name_1)
await mgmt_service.create_subscription(topic_name, subscription_name_2)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 2
assert subscriptions[0].name == subscription_name_1
assert subscriptions[1].name == subscription_name_2
await mgmt_service.delete_subscription(topic_name, subscription_name_1)
await mgmt_service.delete_subscription(topic_name, subscription_name_2)
subscriptions = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
assert len(subscriptions) == 0
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_list_runtime_properties(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = 'dkoamv'
subscription_name = 'cxqplc'
await mgmt_service.create_topic(topic_name)
subs = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
subs_infos = await async_pageable_to_list(mgmt_service.list_subscriptions_runtime_properties(topic_name))
assert len(subs) == len(subs_infos) == 0
await mgmt_service.create_subscription(topic_name, subscription_name)
subs = await async_pageable_to_list(mgmt_service.list_subscriptions(topic_name))
subs_infos = await async_pageable_to_list(mgmt_service.list_subscriptions_runtime_properties(topic_name))
assert len(subs) == 1 and len(subs_infos) == 1
assert subs[0].name == subs_infos[0].name == subscription_name
info = subs_infos[0]
assert info.accessed_at_utc is not None
assert info.updated_at_utc is not None
assert info.created_at_utc is not None
assert info.total_message_count == 0
assert info.active_message_count == 0
assert info.dead_letter_message_count == 0
assert info.transfer_dead_letter_message_count == 0
assert info.transfer_message_count == 0
await mgmt_service.delete_subscription(topic_name, subscription_name)
subs_infos = await async_pageable_to_list(mgmt_service.list_subscriptions_runtime_properties(topic_name))
assert len(subs_infos) == 0
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_async_mgmt_subscription_get_runtime_properties_basic(self, servicebus_namespace_connection_string):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = 'dcvxqa'
subscription_name = 'xvazzag'
await mgmt_service.create_topic(topic_name)
await mgmt_service.create_subscription(topic_name, subscription_name)
sub_runtime_properties = await mgmt_service.get_subscription_runtime_properties(topic_name, subscription_name)
assert sub_runtime_properties
assert sub_runtime_properties.name == subscription_name
assert sub_runtime_properties.created_at_utc is not None
assert sub_runtime_properties.accessed_at_utc is not None
assert sub_runtime_properties.updated_at_utc is not None
assert sub_runtime_properties.total_message_count == 0
assert sub_runtime_properties.active_message_count == 0
assert sub_runtime_properties.dead_letter_message_count == 0
assert sub_runtime_properties.transfer_dead_letter_message_count == 0
assert sub_runtime_properties.transfer_message_count == 0
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_mgmt_subscription_async_update_dict_success(self, servicebus_namespace_connection_string, servicebus_namespace, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "fjrui"
subscription_name = "eqkovc"
try:
topic_description = await mgmt_service.create_topic(topic_name)
subscription_description = await mgmt_service.create_subscription(topic_description.name, subscription_name)
subscription_description_dict = dict(subscription_description)
# Try updating one setting.
subscription_description_dict["lock_duration"] = datetime.timedelta(minutes=2)
await mgmt_service.update_subscription(topic_description.name, subscription_description_dict)
subscription_description = await mgmt_service.get_subscription(topic_name, subscription_name)
assert subscription_description.lock_duration == datetime.timedelta(minutes=2)
# Now try updating all settings.
subscription_description_dict = dict(subscription_description)
subscription_description_dict["auto_delete_on_idle"] = datetime.timedelta(minutes=10)
subscription_description_dict["dead_lettering_on_message_expiration"] = True
subscription_description_dict["default_message_time_to_live"] = datetime.timedelta(minutes=11)
subscription_description_dict["lock_duration"] = datetime.timedelta(seconds=12)
subscription_description_dict["max_delivery_count"] = 14
# topic_description.enable_partitioning = True # Cannot be changed after creation
# topic_description.requires_session = True # Cannot be changed after creation
await mgmt_service.update_subscription(topic_description.name, subscription_description_dict)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
assert subscription_description.auto_delete_on_idle == datetime.timedelta(minutes=10)
assert subscription_description.dead_lettering_on_message_expiration == True
assert subscription_description.default_message_time_to_live == datetime.timedelta(minutes=11)
assert subscription_description.max_delivery_count == 14
assert subscription_description.lock_duration == datetime.timedelta(seconds=12)
# assert topic_description.enable_partitioning == True
# assert topic_description.requires_session == True
# Finally, test forward_to (separately, as it changes auto_delete_on_idle when you enable it.)
subscription_description_dict = dict(subscription_description)
subscription_description_dict["forward_to"] = "sb://{}.servicebus.windows.net/{}".format(servicebus_namespace.name, topic_name)
subscription_description_dict["forward_dead_lettered_messages_to"] = "sb://{}.servicebus.windows.net/{}".format(servicebus_namespace.name, topic_name)
await mgmt_service.update_subscription(topic_description.name, subscription_description_dict)
subscription_description = await mgmt_service.get_subscription(topic_description.name, subscription_name)
# Note: We endswith to avoid the fact that the servicebus_namespace_name is replacered locally but not in the properties bag, and still test this.
assert subscription_description.forward_to.endswith(".servicebus.windows.net/{}".format(topic_name))
assert subscription_description.forward_dead_lettered_messages_to.endswith(".servicebus.windows.net/{}".format(topic_name))
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
@CachedResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_mgmt_subscription_async_update_dict_error(self, servicebus_namespace_connection_string, **kwargs):
mgmt_service = ServiceBusAdministrationClient.from_connection_string(servicebus_namespace_connection_string)
await clear_topics(mgmt_service)
topic_name = "fjrui"
subscription_name = "eqkovc"
try:
topic_description = await mgmt_service.create_topic(topic_name)
subscription_description = await mgmt_service.create_subscription(topic_description.name, subscription_name)
# send in subscription dict without non-name keyword args
subscription_description_only_name = {"name": topic_name}
with pytest.raises(TypeError):
await mgmt_service.update_subscription(topic_description.name, subscription_description_only_name)
finally:
await mgmt_service.delete_subscription(topic_name, subscription_name)
await mgmt_service.delete_topic(topic_name)
|
from crownstone_core.packets.behaviour.BehaviourBase import BehaviourBase
from crownstone_core.packets.behaviour.BehaviourTypes import BehaviourType
from crownstone_core.packets.behaviour.PresenceDescription import BehaviourPresence, BehaviourPresenceType, \
DEFAULT_PRESENCE_DELAY
DEFAULT_PRESENCE = BehaviourPresence().setSpherePresence(BehaviourPresenceType.someoneInSphere)
class SwitchBehaviour(BehaviourBase):
"""
Implements packet generation for SwitchBehaviours
"""
def __init__(self, profileIndex=0, behaviourType=BehaviourType.behaviour, intensity=None, activeDays=None,
time=None, presence=None, endCondition=None, idOnCrownstone=None):
super().__init__(profileIndex, behaviourType, intensity, activeDays, time, presence, endCondition,
idOnCrownstone)
def ignorePresence(self):
self.presence = None
return self
def setPresenceIgnore(self):
return self.ignorePresence()
def setPresenceSomebody(self):
self.setPresenceSomebodyInSphere()
return self
def setPresenceNobody(self):
self.setPresenceNobodyInSphere()
return self
def setPresenceSomebodyInSphere(self):
self.presence = BehaviourPresence().setSpherePresence(BehaviourPresenceType.someoneInSphere)
return self
def setPresenceNobodyInSphere(self):
self.presence = BehaviourPresence().setSpherePresence(BehaviourPresenceType.nobodyInSphere)
return self
def setPresenceInSphere(self):
self.setPresenceSomebodyInSphere()
return self
def setPresenceInLocations(self, locationIds):
self.setPresenceSomebodyInLocations(locationIds)
return self
def setPresenceSomebodyInLocations(self, locationIds, delay=DEFAULT_PRESENCE_DELAY):
self.presence = BehaviourPresence().setLocationPresence(BehaviourPresenceType.somoneInLocation, locationIds,
delay)
return self
def setPresenceNobodyInLocations(self, locationIds, delay=DEFAULT_PRESENCE_DELAY):
self.presence = BehaviourPresence().setLocationPresence(BehaviourPresenceType.nobodyInLocation, locationIds,
delay)
return self
def serialize(self):
arr = super().serialize()
if self.presence is not None:
arr += self.presence.serialize()
else:
anypresence = BehaviourPresence()
arr += anypresence.serialize()
return arr
|
import os
import sys
from util import running
from util.logging import *
from util.files import *
import subprocess
FLASH_SCRIPT_PATH = "/META-INF/com/google/android/flash-script.sh"
XPOSED_TRAIL = "_xposed"
START_PARSING = "- Placing files"
STOP_PARSING = "if [ $IS64BIT ]; then"
INSTALL_AND_LINK = "install_and_link"
XPOSED_INSTALLER = "/system/app/XposedInstaller/XposedInstaller_3.0_alpha4.apk"
def run_command(cmd):
if isinstance(cmd, basestring):
cmd = cmd.split(" ")
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
e = None
success = True
except OSError as e:
out = None
err = None
success = False
return ({"success":success, "out":out, "err":err, "exception":e})
def parse_flash_script(xposed_path):
startParsing = False
full_path = xposed_path + FLASH_SCRIPT_PATH
xposed_files = []
print_debug("Parsing " + full_path)
with open(full_path) as script:
for line in script:
line = line.strip()
if not startParsing:
if line.find(START_PARSING) != -1:
startParsing = True
else:
if line.find(STOP_PARSING) != -1:
break
elif line == "":
continue
else:
print line
lineInfo = line.split(" ")
cmd = lineInfo[0]
path = lineInfo[1]
if cmd == INSTALL_AND_LINK:
path = path+"_xposed"
if not os.path.exists(xposed_path + path):
print "File %s not exists!" % (xposed_path + path)
else:
xposed_files.append(path)
# Add xposed installation file
xposed_files.append(XPOSED_INSTALLER)
return xposed_files
def copy_xposed_file(args, xposed_files):
sys_dir = get_real_path(args.sys_dir)
xpo_dir = get_real_path(args.xpo_dir)
print_info("Copying %s ---> %s..." % (xpo_dir, sys_dir))
for src_file in xposed_files:
if src_file.find(XPOSED_TRAIL) != -1:
dst_file = src_file.strip(XPOSED_TRAIL)
else:
dst_file = src_file
# Check dir
check_missing_dir(sys_dir+ dst_file, args.simulation)
cmd = "cp %s %s" %(xpo_dir+src_file, sys_dir+ dst_file)
ret = running.run_command(cmd, args.simulation)
if not ret["success"]:
print "Error in running command " + cmd
else:
print_debug(cmd)
def add_files(args):
print_info("Adding xposed files")
try:
xpo_dir = get_real_path(args.xpo_dir)
except Exception, e:
print_error("Error: " + e.message)
sys.exit()
xposed_files = parse_flash_script(xpo_dir)
copy_xposed_file(args, xposed_files)
|
import csv
import os
from utils import *
class SongList():
def __init__(self, fname):
self.songs = {}
self.fname = fname
self.current_key = 0
def save(self):
with open(self.fname, 'wb') as csvfile:
writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_ALL)
for k, song in self.songs.iteritems():
writer.writerow(song.toCSV())
def load(self):
self.songs = {}
if not os.path.exists(self.fname):
log("CSV File Doesn't Exist")
return
log("Opening CSV File and Loading Database")
with open(self.fname, 'r') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.readline())
csvfile.seek(0)
reader = csv.reader(csvfile, dialect)
for line in reader:
temp = SongRow()
temp.fromCSV(line)
self.add(temp, log_new=False)
log("Loaded %d Songs from Database." % self.current_key)
def add(self, song, log_new=True, log_dup=True):
if not self.checkDuplicate(song.title, song.artist):
self.songs[self.current_key] = song
self.songs[self.current_key].key = self.current_key
self.current_key += 1
if log_new:
log("Added %s to list" % song.name())
else:
if log_dup:
log("%s is already in the list" % song.name())
def remove(self, i):
del self.songs[i]
def checkDuplicate(self, title, artist):
for k, song in self.songs.iteritems():
if song.title == title.replace('"', "'") and song.artist.replace('"', "'") == artist:
return True
return False
def getSongsByProperty(self, property, value):
matches = []
for k, song in self.songs.iteritems():
if getattr(song, property) == value:
matches.append(song)
return matches
class SongRow():
def __init__(self, key=0, title="", artist="", album="", artwork="", source="Unknown", status="Unknown", url="", filename=""):
self.key = key
self.title = title.replace('"', "'")
self.artist = artist.replace('"', "'")
self.album = album
self.artwork = artwork
self.source = source
self.status = status
self.url = url
self.filename = filename
def toCSV(self):
return [self.key, self.title, self.artist, self.album, self.artwork, self.source, self.status, self.url, self.filename]
def fromCSV(self, list):
try:
self.key = list[0]
self.title = list[1]
self.artist = list[2]
self.album = list[3]
self.artwork = list[4]
self.source = list[5]
self.status = list[6]
self.url = list[7]
self.filename = list[9]
except:
pass
def name(self):
return "%s by %s" % (self.title, self.artist)
|
import sys, os, subprocess, fnmatch, traceback
import datetime, arcpy, json, logging, pythonaddins
from arcpy import env
from arcpy.sa import *
from Delineation import Delineation as Delineation
from BasinParameters import BasinParameters as BasinParameters
from UpdateS3 import Main as UpdateS3
from PullFromS3 import Main as PullFromS3
import time
import json
class Toolbox(object):
def __init__(self):
self.label = "StreamStats Data Tools"
self.alias = "ss-tools"
# List of tool classes associated with this toolbox
self.tools = [basinDelin, basinParams, updateS3Bucket, pullS3]
class updateS3Bucket(object):
def __init__(self):
self.label = "Update S3 Bucket"
self.description = ""
def getParameterInfo(self):
#Define parameter definitions
log_Note = arcpy.Parameter(
displayName = "Describe changes made in this update (limit 50 chars)",
name = "log_Note",
datatype="GPString",
parameterType="Required",
direction="Input")
editor_name = arcpy.Parameter(
displayName="Your name",
name="editor_name",
datatype="GPString",
parameterType="Required",
direction="Input")
workspace = arcpy.Parameter(
displayName = "Temporary Workspace",
name="workspace",
datatype="DEFolder",
parameterType="Required",
direction="Input"
)
state_folder = arcpy.Parameter(
displayName="Select state/region folder",
name="state_folder",
datatype="DEFolder",
parameterType="Optional",
direction="Input")
xml_file = arcpy.Parameter(
displayName="Select xml file",
name="xml_files",
datatype="DEFile",
parameterType="Required",
direction="Input")
copy_bc_layers = arcpy.Parameter(
displayName="Copy 'bc_layers' folder",
name="copy_data_bc_layers",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
copy_archydro = arcpy.Parameter(
displayName="Copy entire 'archydro' folder",
name="copy_data_archydro",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
copy_global = arcpy.Parameter(
displayName="Copy 'global.gdb' folder",
name="copy_global",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
huc_folders = arcpy.Parameter(
displayName="Input huc folders",
name="huc_folders",
datatype=["DEFolder", "GPString"],
parameterType="Optional",
direction="Input",
multiValue="True")
schema_file = arcpy.Parameter(
displayName="Select schema FGDB file",
name="schema_files",
datatype="DEType",
parameterType="Optional",
direction="Input")
parameters = [log_Note, editor_name, workspace, state_folder, xml_file, copy_bc_layers, copy_archydro, copy_global, huc_folders, schema_file]
return parameters
def isLicensed(self): #optional
return True
def updateParameters(self, parameters):
if not parameters[2].altered:
staging = 'E:/staging/data'
if (os.path.isdir(staging)):
parameters[2].value = staging
return
def updateMessages(self, parameters):
if parameters[0].altered:
logNote = parameters[0].valueAsText
if len(logNote) > 50:
pythonaddins.MessageBox('Note cannot exceed 50 characters', 'WARNING', 0)
if not parameters[8].altered:
parameters[8].value = ''
if parameters[7].value == True or parameters[8].valueAsText:
parameters[6].value = False
return
def execute(self, parameters, messages):
updates3 = UpdateS3(parameters)
class basinDelin(object):
# region Constructor
def __init__(self):
self.label = "Basin Delineation"
self.description = ""
def getParameterInfo(self):
# Define parameter definitions
state_folder = arcpy.Parameter(
displayName="Select state/region folder",
name="state_folder",
datatype="DEType",
parameterType="Required",
direction="Input")
schema_file = arcpy.Parameter(
displayName="Regional schema FGDB file",
name="schema_file",
datatype="DEType",
parameterType="Required",
direction="Input")
xml_file = arcpy.Parameter(
displayName="Regional xml",
name="xml_file",
datatype="DEFile",
parameterType="Required",
direction="Input")
workspaceID = arcpy.Parameter(
displayName="Workspace folder",
name="workspaceID",
datatype="DEFolder",
parameterType="Required",
direction="Input")
pourpoint = arcpy.Parameter(
displayName="Pour point",
name="pourpoint",
datatype="GPString",
parameterType="Required",
direction="Input")
basin_params = arcpy.Parameter(
displayName="Calculate All Basin Characteristics",
name="basin_params",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
parameters_list = arcpy.Parameter(
displayName="Characteristics",
name="parameters_list",
datatype="GPString",
parameterType="Optional",
direction="Input")
parameters = [state_folder, schema_file, xml_file, workspaceID, pourpoint, basin_params, parameters_list]
return parameters
def isLicensed(self):
return True
def updateParameters(self, parameters):
if not parameters[5].altered:
parameters[5].value = '4326'
return
def UpdateMessages(self, parameters):
return
def execute(self, parameters, messages):
state_folder = parameters[0].valueAsText
schema_file = parameters[1].valueAsText
xml_file = parameters[2].valueAsText
workspaceID = parameters[3].valueAsText
pourpoint = parameters[4].valueAsText
basin_params = parameters[5].valueAsText
parameters_list = parameters[6].valueAsText
arcpy.env.overwriteOutput = True
stabbr = os.path.basename(state_folder)
workspace_name = os.path.basename(workspaceID)
GW_location = os.path.join(workspaceID, workspace_name + '.gdb', 'Layers')
GW_file = os.path.join(GW_location, 'GlobalWatershed')
GWP_file = os.path.join(GW_location, 'GlobalWatershedPoint')
def validatePourPoint(ppoint):
"""validatePourPoint(ppoint=None)
Determines if input pourpoint is a valid json point
"""
if ppoint.startswith('[') and ppoint.endswith(']'):
messages.addMessage('Found a valid pourpoint: ' + ppoint)
return ppoint
else:
messages.addErrorMessage('You did not select a valid pourpoint. Make sure it is contained within square brackets.')
sys.exit()
def validateXML(xml):
"""validateStreamStatsXML(xml=None)
Determines if input xml is a valid streamstats XML file
"""
#get filename
filename = xml.replace('\\','/').split('/')[-1]
#validate xml file
if fnmatch.fnmatch(filename, 'StreamStats*.xml'):
messages.addMessage('Found a valid .xml file: ' + filename)
return xml
else:
messages.addErrorMessage('You did not select a valid xml file: ' + filename)
sys.exit()
def validateSchema(item):
"""validateSchema(item=None)
Determines if input schema is either a valid .prj file or a valid file geodatabse
"""
filename = item.replace('\\','/').split('/')[-1]
#validate prj file
if os.path.isfile(item) and fnmatch.fnmatch(filename, '*.prj'):
try:
arcpy.SpatialReference(filename)
except:
messages.addErrorMessage('You did not select a valid prj file: ' + filename)
else:
messages.addMessage('Found a valid .prj file: ' + filename)
return item
#validate file gdb
elif os.path.isdir(item) and filename.find('gdb'):
try:
desc = arcpy.Describe(item)
if desc.dataType == 'Workspace':
messages.addMessage('Found a valid file geodatabase: ' + filename + ', item: ' + item )
return item
else:
messages.addErrorMessage('You did not select a valid file geodatabase: ' + filename)
except:
messages.addErrorMessage('You did not select a valid file geodatabase: ' + filename)
else:
messages.addErrorMessage('You did not select a valid schema: ' + item)
sys.exit()
wsID = os.path.basename(workspaceID)
wshed = os.path.join(workspaceID, wsID + '.gdb', 'Layers', 'GlobalWatershed')
if arcpy.Exists(wshed):
val = pythonaddins.MessageBox(wshed + ' already exists. Would you like to overwrite it?', 'Warning', 4)
if val == 'No':
messages.addWarningMessage('Script cancelled due to existing watershed.')
sys.exit()
messages.addMessage('Delineating Basin')
messages.addMessage('schema: ' + schema_file)
schemaCheck = validateSchema(schema_file)
xmlCheck = validateXML(xml_file)
ppoint = validatePourPoint(pourpoint)
try:
ssdel = Delineation(stabbr, schemaCheck, xmlCheck, workspaceID, state_folder)
ppoint = ssdel._buildAHPourpoint(ppoint, '4326')
ssdel.Delineate(ppoint)
except:
tb = traceback.format_exc()
if "SystemExit" not in tb:
messages.addErrorMessage(tb)
if arcpy.Exists(GW_file):
messages.addMessage('Placing on Map')
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
newlayer = arcpy.mapping.Layer(GW_file)
arcpy.mapping.AddLayer(df, newlayer,"BOTTOM")
if basin_params == "true" or parameters_list:
if not parameters_list:
parameters_list = ''
try:
messages.addMessage('Calculating Basin Characteristics')
ssBp = BasinParameters(stabbr, workspaceID, parameters_list, "none")
if ssBp.isComplete:
params = []
for parameter in ssBp.ParameterList:
params.append(parameter['code'])
messages.addMessage("Parameters: " + (',').join(params))
except:
tb = traceback.format_exc()
if "SystemExit" not in tb:
messages.addErrorMessage(tb)
else:
if ssdel.error != "":
messages.addErrorMessage('Delineation Error ' + ssdel.error)
if "no cataloguing unit" in ssdel.error:
messages.addErrorMessage('Delination Failed. Please make sure the point is in the given region. If delineation still fails, try again in another map document or ArcMap session.')
arcMessages = arcpy.GetMessages()
if arcMessages.find('ERROR') > -1 or arcMessages.find('Failed') > -1:
messages.addGPMessages()
arcpy.ResetEnvironments()
arcpy.ClearEnvironment("workspace")
class basinParams(object):
# region Constructor
def __init__(self):
self.label = "Calculate Basin Characteristics"
self.description = ""
def getParameterInfo(self):
# Define parameter definitions
state_folder = arcpy.Parameter(
displayName="Select state/region folder",
name="state_folder",
datatype="DEFolder",
parameterType="Required",
direction="Input")
parameters_list = arcpy.Parameter(
displayName="Characteristics",
name="parameters_list",
datatype="GPString",
parameterType="Optional",
direction="Input")
input_basin = arcpy.Parameter(
displayName="Input Watershed",
name="input_basin",
datatype="DEFeatureClass",
parameterType="Required",
direction="Input")
parameters = [state_folder, parameters_list, input_basin]
return parameters
def isLicensed(self):
return True
def updateParameters(self, parameters):
return
def UpdateMessages(self, parameters):
return
def execute(self, parameters, messages):
state_folder = parameters[0].valueAsText
parameters_list = parameters[1].valueAsText
input_basin = parameters[2].valueAsText
arcpy.env.overwriteOutput = True
if not parameters_list:
parameters_list = ''
workspace_gdb_name = os.path.dirname(os.path.dirname(input_basin))
workspaceID = os.path.dirname(workspace_gdb_name)
stabbr = os.path.basename(state_folder)
tempDir = os.path.join(workspaceID, 'Temp')
try:
messages.addMessage('Calculating Basin Characteristics')
ssBp = BasinParameters(stabbr, workspaceID, parameters_list, input_basin)
if ssBp.isComplete:
params = []
for parameter in ssBp.ParameterList:
params.append(parameter['code'])
messages.addMessage("Parameters: " + (',').join(params))
except:
tb = traceback.format_exc()
if "SystemExit" not in tb:
messages.addErrorMessage(tb)
finally:
arcMessages = arcpy.GetMessages()
if arcMessages.find('ERROR') > -1 or arcMessages.find('Failed') > -1:
messages.addGPMessages()
elif arcMessages.find('Raster not found') > -1:
messages.addWarningMessage('Raster not found for one or more characteristics. Please make sure the data for each characteristic is in the "bc_layers" folder.')
elif arcMessages.find('Cataloging Unit') > -1:
messages.addGPMessages()
messages.addErrorMessage('Please make sure the basin is in the given region. If computation still fails, try again in another map document or ArcMap session.')
arcpy.ResetEnvironments()
arcpy.ClearEnvironment("workspace")
class pullS3(object):
def __init__(self):
self.label = "Pull Regional Data From S3"
self.description = ""
def getParameterInfo(self):
#Define parameter definitions
region_id = arcpy.Parameter(
displayName="Region ID/Abbreviation",
name="region_id",
datatype="GPString",
parameterType="Required",
direction="Input"
)
workspace = arcpy.Parameter(
displayName = "Destination Folder",
name="workspace",
datatype="DEFolder",
parameterType="Required",
direction="Input"
)
copy_whole = arcpy.Parameter(
displayName="Copy whole regional folder",
name="copy_whole",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
copy_whole_archydro = arcpy.Parameter(
displayName="Copy whole 'archydro' folder",
name="copy_whole_archydro",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
copy_global = arcpy.Parameter(
displayName="Copy 'global.gdb' from 'archydro' folder",
name="copy_global",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
huc_folders = arcpy.Parameter(
displayName="Input huc IDs",
name="huc_folders",
datatype="GPString",
parameterType="Optional",
direction="Input")
copy_bc_layers = arcpy.Parameter(
displayName="Copy 'bc_layers' folder",
name="copy_bc_layers",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
copy_xml = arcpy.Parameter(
displayName="Copy '.xml' file",
name="copy_xml",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
copy_schema = arcpy.Parameter(
displayName="Copy 'schema' folder",
name="copy_schema",
datatype="GPBoolean",
parameterType="Optional",
direction="Input"
)
parameters = [region_id, workspace, copy_whole, copy_whole_archydro, copy_global, huc_folders, copy_bc_layers, copy_xml, copy_schema]
return parameters
def isLicensed(self): #optional
return True
def updateParameters(self, parameters): #optional
return
def updateMessages(self, parameters):
if not parameters[5].altered:
parameters[5].value = ''
if parameters[4].value == True or parameters[5].valueAsText:
parameters[3].value = False
if any([parameters[3].value == True, parameters[4].value == True, parameters[5].valueAsText, parameters[6].value == True, parameters[7].value == True, parameters[8].value == True]):
parameters[2].value = False
return
def execute(self, parameters, messages):
try:
PullFromS3(parameters)
except:
tb = traceback.format_exc()
if "SystemExit" not in tb:
messages.addErrorMessage(tb)
|
expected_output = {
"vrf": {
"default": {
"address_family": {
"ipv4": {
"prefix": {
"10.16.2.2/32": {
"epoch": 2,
"nexthop": {
"10.0.0.13": {
"outgoing_interface": {
"GigabitEthernet4": {
"local_label": 16002,
"outgoing_label": ["16002"],
"outgoing_label_backup": "16002",
"repair": "attached-nexthop 10.0.0.5 GigabitEthernet2",
}
}
},
"10.0.0.25": {
"outgoing_interface": {
"GigabitEthernet5": {
"local_label": 16002,
"outgoing_label": ["16002"],
"outgoing_label_backup": "16002",
"repair": "attached-nexthop 10.0.0.13 GigabitEthernet4",
}
}
},
"10.0.0.5": {
"outgoing_interface": {
"GigabitEthernet2": {
"local_label": 16002,
"outgoing_label": ["16002"],
"outgoing_label_backup": "16002",
"repair": "attached-nexthop 10.0.0.9 GigabitEthernet3",
}
}
},
"10.0.0.9": {
"outgoing_interface": {
"GigabitEthernet3": {
"local_label": 16002,
"outgoing_label": ["16002"],
"outgoing_label_backup": "16002",
"repair": "attached-nexthop 10.0.0.25 GigabitEthernet5",
}
}
},
},
"per_destination_sharing": True,
"sr_local_label_info": "global/16002 [0x1B]",
}
}
}
}
}
}
}
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
__author__ = 'bibow'
import json, uuid, os
from datetime import datetime, date
from decimal import Decimal
import logging
logger = logging.getLogger()
logger.setLevel(eval(os.environ["LOGGINGLEVEL"]))
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
sqs = boto3.resource('sqs')
configData = dynamodb.Table('config_data')
response = configData.get_item(
Key={
'key': "FRONTENDAPI"
}
)
FRONTENDAPI = response["Item"]["value"]
# Helper class to convert a DynamoDB item to JSON.
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
elif isinstance(o, (datetime, date)):
return o.strftime("%Y-%m-%d %H:%M:%S")
elif isinstance(o, (bytes, bytearray)):
return str(o)
else:
return super(JSONEncoder, self).default(o)
class InvoicesModel(object):
def __init__(self):
self._invoices = dynamodb.Table('invoices')
@property
def invoices(self):
return self._invoices
def _getInvoice(self, frontend, boInvoiceId):
response = self.invoices.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('bo_invoice_id').eq(boInvoiceId),
Limit=1
)
return response
def getInvoices(self):
pass
def getInvoice(self, frontend, boInvoiceId):
invoice = {}
response = self._getInvoice(frontend, boInvoiceId)
if response['Count'] != 0:
invoice = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(invoice, indent=4, cls=JSONEncoder))
}
def insertInvoice(self, backoffice, frontend, boInvoiceId, invoice):
invoice["tx_status"] = invoice.get("tx_status", "N")
invoice["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
invoice["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getInvoice(frontend, boInvoiceId)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if invoice['data'] != item['data']:
createDt = item["create_dt"]
invoice["id"] = _id
invoice["create_dt"] = createDt
self.invoices.put_item(Item=invoice)
log = "Successfully update invoice: {0}/{1}".format(frontend, boInvoiceId)
logger.info(log)
else:
log = "No update invoice: {0}/{1}".format(frontend, boInvoiceId)
logger.info(log)
response = self.invoices.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
invoice["id"] = _id
self.invoices.put_item(Item=invoice)
log = "Successfully insert invoice: {0}/{1}".format(frontend, boInvoiceId)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"bo_invoice_id": boInvoiceId
})
}
def updateInvoiceStatus(self, id, invoiceStatus):
response = self.invoices.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_invoice_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': invoiceStatus['tx_status'],
':val2': invoiceStatus['tx_note'],
':val3': invoiceStatus['fe_invoice_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class PurchaseOrdersModel(object):
def __init__(self):
self._purchaseOrders = dynamodb.Table('purchaseorders')
@property
def purchaseOrders(self):
return self._purchaseOrders
def _getPurchaseOrder(self, frontend, boPONum):
response = self.purchaseOrders.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('bo_po_num').eq(boPONum),
Limit=1
)
return response
def getPurchaseOrders(self):
pass
def getPurchaseOrder(self, frontend, boPONum):
purchaseOrder = {}
response = self._getPurchaseOrder(frontend, boPONum)
if response['Count'] != 0:
purchaseOrder = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(purchaseOrder, indent=4, cls=JSONEncoder))
}
def insertPurchaseOrder(self, backoffice, frontend, boPONum, purchaseOrder):
purchaseOrder["tx_status"] = purchaseOrder.get("tx_status", "N")
purchaseOrder["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
purchaseOrder["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getPurchaseOrder(frontend, boPONum)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if purchaseOrder['data'] != item['data']:
createDt = item["create_dt"]
purchaseOrder["id"] = _id
purchaseOrder["create_dt"] = createDt
self.purchaseOrders.put_item(Item=purchaseOrder)
log = "Successfully update purchase order: {0}/{1}".format(frontend, boPONum)
logger.info(log)
else:
log = "No update purchase order: {0}/{1}".format(frontend, boPONum)
logger.info(log)
response = self.purchaseOrders.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
purchaseOrder["id"] = _id
self.purchaseOrders.put_item(Item=purchaseOrder)
log = "Successfully insert purchase order: {0}/{1}".format(frontend, boPONum)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"bo_po_num": boPONum
})
}
def updatePurchaseOrderStatus(self, id, purchaseOrderStatus):
response = self.purchaseOrders.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_po_num=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': purchaseOrderStatus['tx_status'],
':val2': purchaseOrderStatus['tx_note'],
':val3': purchaseOrderStatus['fe_po_num']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsModel(object):
def __init__(self):
self._products = dynamodb.Table('products')
@property
def products(self):
return self._products
def _getProduct(self, frontend, sku):
response = self.products.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProducts(self):
pass
def getProduct(self, frontend, sku):
product = {}
response = self._getProduct(frontend, sku)
if response['Count'] != 0:
product = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(product, indent=4, cls=JSONEncoder))
}
def insertProduct(self, backoffice, frontend, sku, product):
product["tx_status"] = product.get("tx_status", "N")
product["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
product["tx_note"] = '{0} -> DataWald'.format(backoffice)
product["old_data"] = {}
response = self._getProduct(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if product['data'] != item['data']:
createDt = item["create_dt"]
product["id"] = _id
for k,v in item["data"].items():
if v != product["data"].get(k, None):
product["old_data"][k] = v
product["create_dt"] = createDt
self.products.put_item(Item=product)
log = "Successfully update product: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.products.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
product["id"] = _id
self.products.put_item(Item=product)
log = "Successfully insert product: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductStatus(self, id, productStatus):
response = self.products.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productStatus['tx_status'],
':val2': productStatus['tx_note'],
':val3': productStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsCustomOptionModel(object):
def __init__(self):
self._productsCustomOption = dynamodb.Table('products-customoption')
@property
def productsCustomOption(self):
return self._productsCustomOption
def _getProductCustomOption(self, frontend, sku):
response = self.productsCustomOption.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsCustomOption(self):
pass
def getProductCustomOption(self, frontend, sku):
productCustomOption = {}
response = self._getProductCustomOption(frontend, sku)
if response['Count'] != 0:
productCustomOption = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productCustomOption, indent=4, cls=JSONEncoder))
}
def insertProductCustomOption(self, backoffice, frontend, sku, productCustomOption):
productCustomOption["tx_status"] = productCustomOption.get("tx_status", "N")
productCustomOption["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productCustomOption["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductCustomOption(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productCustomOption['data'] != item['data']:
createDt = item["create_dt"]
productCustomOption["id"] = _id
productCustomOption["create_dt"] = createDt
self.productsCustomOption.put_item(Item=productCustomOption)
log = "Successfully update product custom option: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product custom option: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsCustomOption.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productCustomOption["id"] = _id
self.productsCustomOption.put_item(Item=productCustomOption)
log = "Successfully insert product custom option: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductCustomOptionStatus(self, id, productCustomOptionStatus):
response = self.productsCustomOption.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productCustomOptionStatus['tx_status'],
':val2': productCustomOptionStatus['tx_note'],
':val3': productCustomOptionStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsInventoryModel(object):
def __init__(self):
self._productsInventory = dynamodb.Table('products-inventory')
@property
def productsInventory(self):
return self._productsInventory
def _getProductInventory(self, frontend, sku):
response = self.productsInventory.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsInventory(self):
pass
def getProductInventory(self, frontend, sku):
productInventory = {}
response = self._getProductInventory(frontend, sku)
if response['Count'] != 0:
productInventory = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productInventory, indent=4, cls=JSONEncoder))
}
def setInventory(self, inData, data):
for line in inData:
rows = list(filter(lambda t: (t["warehouse"]==line["warehouse"]), data))
if len(rows) > 0:
line["past_on_hand"] = rows[0]["on_hand"]
if not line["full"]:
line["on_hand"] = line["past_on_hand"] + line["qty"]
else:
line["on_hand"] = line["qty"]
if line["on_hand"] > 0:
line["in_stock"] = True
def insertProductInventory(self, backoffice, frontend, sku, productInventory):
productInventory["tx_status"] = productInventory.get("tx_status", "N")
productInventory["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productInventory["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductInventory(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productInventory['data'] != item['data']:
self.setInventory(productInventory["data"], item["data"])
createDt = item["create_dt"]
productInventory["id"] = _id
productInventory["create_dt"] = createDt
self.productsInventory.put_item(Item=productInventory)
log = "Successfully update product inventory: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product inventory: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsInventory.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N", # if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productInventory["id"] = _id
self.productsInventory.put_item(Item=productInventory)
log = "Successfully insert product inventory: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductInventoryStatus(self, id, productInventoryStatus):
response = self.productsInventory.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productInventoryStatus['tx_status'],
':val2': productInventoryStatus['tx_note'],
':val3': productInventoryStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsImageGalleryModel(object):
def __init__(self):
self._productsImageGallery = dynamodb.Table('products-imagegallery')
@property
def productsImageGallery(self):
return self._productsImageGallery
def _getProductImageGallery(self, frontend, sku):
response = self.productsImageGallery.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsImageGallery(self):
pass
def getProductImageGallery(self, frontend, sku):
productImageGallery = {}
response = self._getProductImageGallery(frontend, sku)
if response['Count'] != 0:
productImageGallery = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productImageGallery, indent=4, cls=JSONEncoder))
}
def insertProductImageGallery(self, backoffice, frontend, sku, productImageGallery):
productImageGallery["tx_status"] = productImageGallery.get("tx_status", "N")
productImageGallery["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productImageGallery["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductImageGallery(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productImageGallery['data'] != item['data']:
createDt = item["create_dt"]
productImageGallery["id"] = _id
productImageGallery["create_dt"] = createDt
self.productsImageGallery.put_item(Item=productImageGallery)
log = "Successfully update product image gallery: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product image gallery: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsImageGallery.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productImageGallery["id"] = _id
self.productsImageGallery.put_item(Item=productImageGallery)
log = "Successfully insert product image gallery: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductImageGalleryStatus(self, id, productImageGalleryStatus):
response = self.productsImageGallery.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productImageGalleryStatus['tx_status'],
':val2': productImageGalleryStatus['tx_note'],
':val3': productImageGalleryStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsLinksModel(object):
def __init__(self):
self._productsLinks = dynamodb.Table('products-links')
@property
def productsLinks(self):
return self._productsLinks
def _getProductLinks(self, frontend, sku):
response = self.productsLinks.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsLinks(self):
pass
def getProductLinks(self, frontend, sku):
productLinks = {}
response = self._getProductLinks(frontend, sku)
if response['Count'] != 0:
productLinks = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productLinks, indent=4, cls=JSONEncoder))
}
def insertProductLinks(self, backoffice, frontend, sku, productLinks):
productLinks["tx_status"] = productLinks.get("tx_status", "N")
productLinks["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productLinks["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductLinks(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productLinks['data'] != item['data']:
createDt = item["create_dt"]
productLinks["id"] = _id
productLinks["create_dt"] = createDt
self.productsLinks.put_item(Item=productLinks)
log = "Successfully update product links: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product links: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsLinks.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productLinks["id"] = _id
self.productsLinks.put_item(Item=productLinks)
log = "Successfully insert product links: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductLinksStatus(self, id, productLinksStatus):
response = self.productsLinks.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productLinksStatus['tx_status'],
':val2': productLinksStatus['tx_note'],
':val3': productLinksStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsCategoriesModel(object):
def __init__(self):
self._productsCategories = dynamodb.Table('products-categories')
@property
def productsCategories(self):
return self._productsCategories
def _getProductCategories(self, frontend, sku):
response = self.productsCategories.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsCategories(self):
pass
def getProductCategories(self, frontend, sku):
productCategories = {}
response = self._getProductCategories(frontend, sku)
if response['Count'] != 0:
productCategories = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productCategories, indent=4, cls=JSONEncoder))
}
def insertProductCategories(self, backoffice, frontend, sku, productCategories):
productCategories["tx_status"] = productCategories.get("tx_status", "N")
productCategories["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productCategories["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductCategories(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productCategories['data'] != item['data']:
createDt = item["create_dt"]
productCategories["id"] = _id
productCategories["create_dt"] = createDt
self.productsCategories.put_item(Item=productCategories)
log = "Successfully update product categories: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product categories: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsCategories.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productCategories["id"] = _id
self.productsCategories.put_item(Item=productCategories)
log = "Successfully insert product categories: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductCategoriesStatus(self, id, productCategoriesStatus):
response = self.productsCategories.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productCategoriesStatus['tx_status'],
':val2': productCategoriesStatus['tx_note'],
':val3': productCategoriesStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsPriceLevelsModel(object):
def __init__(self):
self._productsPriceLevels = dynamodb.Table('products-pricelevels')
@property
def productsPriceLevels(self):
return self._productsPriceLevels
def _getProductPriceLevels(self, frontend, sku):
response = self.productsPriceLevels.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsPriceLevels(self):
pass
def getProductPriceLevels(self, frontend, sku):
productPriceLevels = {}
response = self._getProductPriceLevels(frontend, sku)
if response['Count'] != 0:
productPriceLevels = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productPriceLevels, indent=4, cls=JSONEncoder))
}
def insertProductPriceLevels(self, backoffice, frontend, sku, productPriceLevels):
productPriceLevels["tx_status"] = productPriceLevels.get("tx_status", "N")
productPriceLevels["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productPriceLevels["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductPriceLevels(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productPriceLevels['data'] != item['data']:
createDt = item["create_dt"]
productPriceLevels["id"] = _id
productPriceLevels["create_dt"] = createDt
self.productsPriceLevels.put_item(Item=productPriceLevels)
log = "Successfully update product pricelevels: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product pricelevels: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsPriceLevels.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productPriceLevels["id"] = _id
self.productsPriceLevels.put_item(Item=productPriceLevels)
log = "Successfully insert product pricelevels: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductPriceLevelsStatus(self, id, productPriceLevelsStatus):
response = self.productsPriceLevels.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productPriceLevelsStatus['tx_status'],
':val2': productPriceLevelsStatus['tx_note'],
':val3': productPriceLevelsStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ProductsVariantsModel(object):
def __init__(self):
self._productsVariants = dynamodb.Table('products-variants')
@property
def productsVariants(self):
return self._productsVariants
def _getProductVariants(self, frontend, sku):
response = self.productsVariants.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('sku').eq(sku),
Limit=1
)
return response
def getProductsVariants(self):
pass
def getProductVariants(self, frontend, sku):
productVariants = {}
response = self._getProductVariants(frontend, sku)
if response['Count'] != 0:
productVariants = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(productVariants, indent=4, cls=JSONEncoder))
}
def insertProductVariants(self, backoffice, frontend, sku, productVariants):
productVariants["tx_status"] = productVariants.get("tx_status", "N")
productVariants["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
productVariants["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getProductVariants(frontend, sku)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if productVariants['data'] != item['data']:
createDt = item["create_dt"]
productVariants["id"] = _id
productVariants["create_dt"] = createDt
self.productsVariants.put_item(Item=productVariants)
log = "Successfully update product variants: {0}/{1}".format(frontend, sku)
logger.info(log)
else:
log = "No update product variants: {0}/{1}".format(frontend, sku)
logger.info(log)
response = self.productsVariants.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
productVariants["id"] = _id
self.productsVariants.put_item(Item=productVariants)
log = "Successfully insert product variants: {0}/{1}".format(frontend, sku)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"sku": sku
})
}
def updateProductVariantsStatus(self, id, productVariantsStatus):
response = self.productsVariants.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_product_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': productVariantsStatus['tx_status'],
':val2': productVariantsStatus['tx_note'],
':val3': productVariantsStatus['fe_product_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class CustomersModel(object):
def __init__(self):
self._customers = dynamodb.Table('customers-fe')
@property
def customers(self):
return self._customers
def _getCustomer(self, frontend, boCustomerId):
response = self.customers.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('bo_customer_id').eq(boCustomerId),
Limit=1
)
return response
def getCustomers(self):
pass
def getCustomer(self, frontend, boCustomerId):
customer = {}
response = self._getCustomer(frontend, boCustomerId)
if response['Count'] != 0:
customer = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(customer, indent=4, cls=JSONEncoder))
}
def insertCustomer(self, backoffice, frontend, boCustomerId, customer):
customer["tx_status"] = customer.get("tx_status", "N")
customer["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
customer["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getCustomer(frontend, boCustomerId)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if customer['data'] != item['data']:
createDt = item["create_dt"]
customer["id"] = _id
customer["create_dt"] = createDt
self.customers.put_item(Item=customer)
log = "Successfully update customer: {0}/{1}".format(frontend, boCustomerId)
logger.info(log)
else:
log = "No update customer: {0}/{1}".format(frontend, boCustomerId)
logger.info(log)
response = self.customers.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
customer["id"] = _id
self.customers.put_item(Item=customer)
log = "Successfully insert customer: {0}/{1}".format(frontend, boCustomerId)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"bo_customer_id": boCustomerId
})
}
def updateCustomerStatus(self, id, customerStatus):
response = self.customers.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_customer_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': customerStatus['tx_status'],
':val2': customerStatus['tx_note'],
':val3': customerStatus['fe_customer_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
class ShipmentsModel(object):
def __init__(self):
self._shipments = dynamodb.Table('shipments')
@property
def shipments(self):
return self._shipments
def _getShipment(self, frontend, boShipmentId):
response = self.shipments.query(
IndexName="frontend_index",
KeyConditionExpression=Key('frontend').eq(frontend) & Key('bo_shipment_id').eq(boShipmentId),
Limit=1
)
return response
def getShipments(self):
pass
def getShipment(self, frontend, boShipmentId):
shipment = {}
response = self._getShipment(frontend, boShipmentId)
if response['Count'] != 0:
shipment = response["Items"][0]
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(shipment, indent=4, cls=JSONEncoder))
}
def insertShipment(self, backoffice, frontend, boShipmentId, shipment):
shipment["tx_status"] = shipment.get("tx_status", "N")
shipment["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
shipment["tx_note"] = '{0} -> DataWald'.format(backoffice)
response = self._getShipment(frontend, boShipmentId)
_id = str(uuid.uuid1())
if response['Count'] != 0:
item = response["Items"][0]
_id = item["id"]
if shipment['data'] != item['data']:
createDt = item["create_dt"]
shipment["id"] = _id
shipment["create_dt"] = createDt
self.shipments.put_item(Item=shipment)
log = "Successfully update shipment: {0}/{1}".format(frontend, boShipmentId)
logger.info(log)
else:
log = "No update shipment: {0}/{1}".format(frontend, boShipmentId)
logger.info(log)
response = self.shipments.update_item(
Key={
'id': _id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': "N" if item['tx_status'] in ('N', 'F') else 'I',
':val2': log
},
ReturnValues="UPDATED_NEW"
)
else:
shipment["id"] = _id
self.shipments.put_item(Item=shipment)
log = "Successfully insert shipment: {0}/{1}".format(frontend, boShipmentId)
logger.info(log)
return {
"statusCode": 200,
"headers": {},
"body": json.dumps({
"id": _id,
"frontend": frontend,
"bo_shipment_id": boShipmentId
})
}
def updateShipmentStatus(self, id, shipmentStatus):
response = self.shipments.update_item(
Key={
'id': id
},
UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, fe_shipment_id=:val3",
ExpressionAttributeValues={
':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
':val1': shipmentStatus['tx_status'],
':val2': shipmentStatus['tx_note'],
':val3': shipmentStatus['fe_shipment_id']
},
ReturnValues="UPDATED_NEW"
)
return {
"statusCode": 200,
"headers": {},
"body": (json.dumps(response, indent=4, cls=JSONEncoder))
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def string(zahl):
if zahl <= 9:
return str(zahl)
else:
return chr(55+zahl)
def horner(b, Z):
ergebnis = ''
while Z > 0:
rest = Z % b
ergebnis = string(rest) + ergebnis
Z = (Z - rest)/b
return ergebnis
if __name__ == "__main__":
r = horner(16, 31562)
print("Result:" + str(r))
|
import taichi as ti
import numpy as np
from material import get_texture_value, scatter
@ti.data_oriented
class Renderer:
def __init__(self, image_width, image_height, scene, camera, spp=16, max_depth=10, p_RR=0.8):
self.image_width = image_width
self.image_height = image_height
self.canvas = ti.Vector.field(3, ti.f32, shape=(image_width, image_height))
self.scene = scene
self.camera = camera
self.spp = spp
self.cnt = ti.field(ti.i32, shape=())
self.reset()
self.max_depth = max_depth
self.p_RR = p_RR
@ti.func
def ray_color(self, ray, si, sj, max_depth=10, p_RR=0.8):
color = ti.Vector([0.0, 0.0, 0.0])
attenuation = ti.Vector([1.0, 1.0, 1.0])
for i in range(max_depth):
if ti.random() > p_RR:
break
is_hit, t, p, u, v, n, f, m = self.scene.intersect_p(ray, si, sj)
if is_hit:
if m.m_type == 0:
color = get_texture_value(m, p, u, v) * attenuation
break
else:
is_scatter, scattered, attenuation_tmp = scatter(ray, p, u, v, n, f, m)
if not is_scatter:
break
attenuation = attenuation * attenuation_tmp
ray = scattered
else:
t = 0.5 * (ray.direction[1] + 1.0)
attenuation_tmp = (1.0 - t) * ti.Vector([1.0, 1.0, 1.0]) + t * ti.Vector([0.5, 0.7, 1.0])
color = attenuation * attenuation_tmp / p_RR
break
attenuation /= p_RR
return color
# @ti.func
# def ray_color(self, ray, si, sj, max_depth=10, p_RR=0.8):
# """
# Color Only.
# """
# is_hit, t, p, u, v, n, f, m = self.scene.intersect_p(ray, si, sj)
# color = ti.Vector([0.0, 0.0, 0.0])
# if is_hit:
# color = get_texture_value(m, p, u, v)
# else:
# t = 0.5 * (ray.direction[1] + 1.0)
# color = (1.0 - t) * ti.Vector([1.0, 1.0, 1.0]) + t * ti.Vector([0.5, 0.7, 1.0])
# return color
@ti.kernel
def render(self):
for i, j in self.canvas:
u = (i + ti.random()) / self.image_width
v = (j + ti.random()) / self.image_height
ray = self.camera.get_ray(u, v)
color = ti.Vector([0.0, 0.0, 0.0])
for n in range(self.spp):
color += self.ray_color(ray, i, j, self.max_depth, self.p_RR)
color /= self.spp
self.canvas[i, j] += color
self.cnt[None] += 1
def get_canvas_numpy(self):
return np.sqrt(self.canvas.to_numpy() / self.cnt[None])
def reset(self):
self.canvas.fill(0.0)
self.cnt[None] = 0
|
import unittest
# Testing support.
from test.stub_flickrapi import StubFlickrAPI
# Officially exported names.
from flickrsyncr import Config
from flickrsyncr import flickrwrapper
class TestFlickrWrapper(unittest.TestCase):
"""Exercise the methods in flickrwrapper.
"""
def setUp(self):
self.config = Config('albumname', '/my/dir', push=True, api_key='apikey',
api_secret='apisecret', tag='tag2')
self.stub_api = StubFlickrAPI()
self.apiwrapper = flickrwrapper.FlickrWrapper(self.stub_api, 'userid')
# Patch urlopen in module flickrwrapper.
flickrwrapper.urllib.request.urlopen = self.stub_api.stubURLOpenner()
def testGetAlbumID(self):
self.stub_api.stubAddAlbum('albumname', 123)
self.assertEqual(self.apiwrapper.getAlbumID('albumname'), 123)
def testCreateAlbum(self):
self.assertNotEqual(self.apiwrapper.createAlbum('albumname', 'userid'), 0)
def testListAlbum(self):
"""Seed the stub with album and photos and list the album.
"""
self.stub_api.stubAddAlbum('albumname', 123)
self.stub_api.stubAddPhoto(123, 'Photo 1', '1111', 'tag1 tag2 tag3', b'')
self.stub_api.stubAddPhoto(123, 'Photo 2', '2222', 'tag1', b'')
self.stub_api.stubAddPhoto(123, 'Photo 3', '3333', 'tag2 tag3', b'')
self.stub_api.stubAddPhoto(123, 'Photo 4', '4444', '', b'')
want = [
{
'title': 'Photo 1',
'id': '1111',
'tags': 'tag1 tag2 tag3',
},
{
'title': 'Photo 2',
'id': '2222',
'tags': 'tag1',
},
{
'title': 'Photo 3',
'id': '3333',
'tags': 'tag2 tag3',
},
{
'title': 'Photo 4',
'id': '4444',
'tags': '',
},
]
got = self.apiwrapper.listAlbum(123)
sort_key = lambda p: p['title']
self.assertEqual(sorted(got, key=sort_key), sorted(want, key=sort_key))
def testUploadExistingAlbum(self):
"""Upload to an existing album. Errors raise exceptions.
"""
self.apiwrapper.upload('/tmp/filename1', 'Photo Title 1', 'tag1 tag2', album_name='albumname', album_id=123)
self.assertEqual(self.stub_api.uploaded, ['/tmp/filename1'])
def testUploadNonexistingAlbum(self):
"""Uplaod to no album, so one is created. Errors raise exceptions.
"""
self.apiwrapper.upload('/tmp/filename1', 'Photo Title 1', 'tag1 tag2', album_name='albumname')
self.assertEqual(self.stub_api.uploaded, ['/tmp/filename1'])
def testDownload(self):
"""Seed the stub with file content and download it.
"""
self.stub_api.stubAddAlbum('albumname', 123)
self.stub_api.stubAddPhoto(123, 'Photo 1', 'photoid123', 'tag1 tag2 tag3', b'filecontent')
self.assertEqual(self.apiwrapper.download(photo_id='photoid123'), b'filecontent')
|
# -*- coding: utf-8 -*-
{
'name': 'Standard Accounting Report',
'version': '12.0.1.0.0',
'category': 'Accounting',
'author': 'Florent de Labarre',
'summary': 'Standard Accounting Report',
'website': 'https://github.com/fmdl',
'depends': ['account', 'report_xlsx'],
'data': [
'security/ir.model.access.csv',
'data/report_paperformat.xml',
'data/data_account_standard_report.xml',
'data/res_currency_data.xml',
'report/report_account_standard_report.xml',
'views/account_view.xml',
'views/account_standard.xml',
'views/account_standard_report_template_view.xml',
'views/res_currency_views.xml',
'wizard/account_standard_report_view.xml',
],
'demo': [],
'license': 'LGPL-3',
'support': 'https://github.com/fmdl',
'installable': True,
'auto_install': False,
'price': 0.0,
'currency': 'EUR',
'images': ['images/main_screenshot.png'],
}
|
class Wrapper:
def __init__(self, snakemake, command, input, output, params=None, threads=None):
self.snakemake = snakemake
self.input = input
self.output = output
self.params = params
self.threads = threads
self.command = command
self.input.assign(snakemake.input)
self.output.assign(snakemake.output)
if self.params:
self.params.assign(snakemake)
if self.threads:
self.threads.assign(snakemake)
def run(self):
pass
def shell_command(self):
# -1 denotes absolute priority. Those commands SHOULD come at the first of the command.
options = [('(', -1), (self.command, -1)] if self.snakemake.log else [(self.command, -1)]
if self.input:
options.extend(self.input.get_options())
if self.output:
options.extend(self.output.get_options())
if self.params:
options.extend(self.params.get_options())
if self.threads:
options.extend(self.threads.get_options())
# Sort commands according to the priorities.
options_with_high_priority = list(sorted([opt for opt, pr in options if pr is not None and pr < 100]))
options_with_low_priority = list(sorted([opt for opt, pr in options if pr is not None and pr >= 100]))
options_without_priority = [opt for opt, pr in options if pr is None]
options = options_with_high_priority + options_without_priority + options_with_low_priority
options.append(self.output.redirect_command())
rename_command = self.output.rename_command()
if rename_command:
options.append('&& ' + rename_command)
if self.snakemake.log:
options.append(')' + self.snakemake.log_fmt_shell(stdout=False, stderr=True))
return ' '.join(filter(lambda x: x, options)) # Discard empty strings, then join.
|
# -*- coding: utf-8 -*-
"""
@author: Yi Zhang.
Department of Aerodynamics
Faculty of Aerospace Engineering
TU Delft, Delft, Netherlands
"""
import sys
if './' not in sys.path: sys.path.append('./')
from root.config.main import *
from root.save import read
from scipy.interpolate import NearestNDInterpolator
from screws.quadrature import Quadrature
from objects.CSCG._3d.forms.trace.base.main import _3dCSCG_Standard_Trace
from objects.CSCG._3d.forms.trace._2tr.discretize.main import _3dCSCG_2Trace_Discretize
from objects.CSCG._3d.forms.trace._2tr.visualize import _3dCSCG_2Trace_Visualize
class _3dCSCG_2Trace(_3dCSCG_Standard_Trace):
"""
Trace 2-form.
:param mesh:
:param space:
:param orientation:
:param numbering_parameters:
:param name:
"""
def __init__(self, mesh, space, orientation='outer',
numbering_parameters='Naive', name='outer-oriented-2-trace-form'):
super().__init__(mesh, space, orientation, numbering_parameters, name)
self._k_ = 2
self.standard_properties.___PRIVATE_add_tag___('3dCSCG_trace_2form')
self.___PRIVATE_reset_cache___()
self._discretize_ = _3dCSCG_2Trace_Discretize(self)
self._visualize_ = None
self._freeze_self_()
def ___PRIVATE_reset_cache___(self):
super().___PRIVATE_reset_cache___()
def ___PRIVATE_TW_FUNC_body_checker___(self, func_body):
assert func_body.mesh.domain == self.mesh.domain
assert func_body.ndim == self.ndim == 3
if func_body.__class__.__name__ == '_3dCSCG_ScalarField':
assert func_body.ftype in ('standard',), \
f"3dCSCG 2-trace FUNC cannot accommodate _3dCSCG_ScalarField of ftype {func_body.ftype}."
elif func_body.__class__.__name__ == '_3dCSCG_VectorField':
assert func_body.ftype in ('standard',), \
f"3dCSCG 2-trace FUNC cannot accommodate _3dCSCG_VectorField of ftype {func_body.ftype}."
else:
raise NotImplementedError(
f"3d CSCG 2-trace form FUNC cannot accommodate {func_body}.")
def ___PRIVATE_TW_BC_body_checker___(self, func_body):
assert func_body.mesh.domain == self.mesh.domain
assert func_body.ndim == self.ndim == 3
if func_body.__class__.__name__ == '_3dCSCG_ScalarField':
assert func_body.ftype in ('standard', 'boundary-wise'), \
f"3dCSCG 2-trace BC cannot accommodate _3dCSCG_ScalarField of ftype {func_body.ftype}."
elif func_body.__class__.__name__ == '_3dCSCG_VectorField':
assert func_body.ftype in ('standard', 'boundary-wise'), \
f"3dCSCG 2-trace BC cannot accommodate _3dCSCG_VectorField of ftype {func_body.ftype}."
else:
raise NotImplementedError(
f"3d CSCG 2-trace form BC cannot accommodate {func_body}.")
@property
def visualize(self):
if self._visualize_ is None:
self._visualize_ = _3dCSCG_2Trace_Visualize(self)
return self._visualize_
@property
def discretize(self):
return self._discretize_
def reconstruct(self, xi, eta, sigma, ravel=False, i=None):
"""
Do the reconstruction.
:param xi: A 1d iterable object of floats between -1 and 1.
:param eta: A 1d iterable object of floats between -1 and 1.
:param sigma: A 1d iterable object of floats between -1 and 1.
:param bool ravel: (`default`:``False``) If we return 1d data?
:type xi: list, tuple, numpy.ndarray
:type eta: list, tuple, numpy.ndarray
:type sigma: list, tuple, numpy.ndarray
:param i: (`default`:``None``) Do the reconstruction for these
trace elements. if it is ``None``, then do it for all trace
elements.
:type i: int, None, list, tuple
"""
if i is None:
indices = self.mesh.trace.elements._elements_.keys()
else:
if not isinstance(i, (list, tuple)):
indices = [i,]
else:
indices = i
xietasigma, pb = self.do.evaluate_basis_at_meshgrid(xi, eta, sigma)
ii, jj, kk = np.size(xi), np.size(eta), np.size(sigma)
xyz = dict()
v = dict()
for key in indices:
if key in self.mesh.trace.elements:
te = self.mesh.trace.elements[key]
side = te.CHARACTERISTIC_side
ele = te.CHARACTERISTIC_element
xyz_i = te.coordinate_transformation.mapping(*xietasigma[side], from_element=ele, side=side)
g = te.coordinate_transformation.metric(*xietasigma[side])
prime_cochain = self.cochain.local_TEW[key]
if side in 'NS':
vi = np.einsum('i, j, ij -> j', prime_cochain, 1 / np.sqrt(g),
pb[side][0], optimize='greedy')
elif side in 'WE':
vi = np.einsum('i, j, ij -> j', prime_cochain, 1 / np.sqrt(g),
pb[side][0], optimize='greedy')
elif side in 'BF':
vi = np.einsum('i, j, ij -> j', prime_cochain, 1 / np.sqrt(g),
pb[side][0], optimize='greedy')
else:
raise Exception()
if ravel:
xyz[key] = xyz_i
v[key] = [vi,]
else:
if side in 'NS':
xyz[key] = [xyz_i[m].reshape(jj, kk, order='F') for m in range(3)]
v[key] = [vi.reshape((jj, kk), order='F'),]
elif side in 'WE':
xyz[key] = [xyz_i[m].reshape(ii, kk, order='F') for m in range(3)]
v[key] = [vi.reshape((ii, kk), order='F'),]
elif side in 'BF':
xyz[key] = [xyz_i[m].reshape(ii, jj, order='F') for m in range(3)]
v[key] = [vi.reshape((ii, jj), order='F'),]
else:
raise Exception
return xyz, v
def ___PRIVATE_generate_TEW_mass_matrices___(self):
"""Generate the trace-element-wise mass matrices."""
p = [self.dqp[i]+2 for i in range(self.ndim)] # +2 for safety, the mass matrices of standard forms use dqp
quad_nodes, quad_weights = Quadrature(p, category='Gauss').quad
qw = dict()
qw['NS'] = np.kron(quad_weights[2], quad_weights[1])
qw['WE'] = np.kron(quad_weights[2], quad_weights[0])
qw['BF'] = np.kron(quad_weights[1], quad_weights[0])
xietasigma, pb = self.do.evaluate_basis_at_meshgrid(*quad_nodes)
local_cache = dict()
MD = dict()
for i in self.mesh.trace.elements:
te = self.mesh.trace.elements[i]
side = te.CHARACTERISTIC_side
mark = te.type_wrt_metric.mark
if isinstance(mark, str) and mark in local_cache: # not an id (chaotic) mark, could be cached.
MD[i] = local_cache[mark]
else:
g = te.coordinate_transformation.metric(*xietasigma[side])
b = pb[side][0]
if side in 'NS':
M = np.einsum('im, jm, m -> ij', b, b, np.reciprocal(np.sqrt(g)) * qw['NS'], optimize='greedy')
elif side in 'WE':
M = np.einsum('im, jm, m -> ij', b, b, np.reciprocal(np.sqrt(g)) * qw['WE'], optimize='greedy')
elif side in 'BF':
M = np.einsum('im, jm, m -> ij', b, b, np.reciprocal(np.sqrt(g)) * qw['BF'], optimize='greedy')
else:
raise Exception()
if isinstance(mark, str): local_cache[mark] = M
MD[i] = M
return MD
def ___PRIVATE_do_resemble___(self, obj_or_filename):
"""
:param obj_or_filename:
:return:
"""
if isinstance(obj_or_filename, str):
ot = read(obj_or_filename)
else:
ot = obj_or_filename
assert ot.mesh.domain == self.mesh.domain, "domain must be same."
assert self.__class__.__name__ == ot.__class__.__name__
assert self.mesh.__class__.__name__ == ot.mesh.__class__.__name__
bp = int(np.ceil((20000 / self.mesh.elements.GLOBAL_num) ** (1/3)))
p = [bp + self.p[i] for i in range(3)]
gap = [1 / (p[i]+1) for i in range(3)]
r = np.linspace(-1 + gap[0], 1 - gap[0], p[0])
s = np.linspace(-1 + gap[1], 1 - gap[1], p[1])
t = np.linspace(-1 + gap[2], 1 - gap[2], p[2])
xyz, V = ot.reconstruct(r, s, t, ravel=True)
xyz = cOmm.gather(xyz, root=mAster_rank)
V = cOmm.gather(V, root=mAster_rank)
tep = dict()
for i in ot.mesh.trace.elements:
TEi = ot.mesh.trace.elements[i]
tep[i] = TEi.CHARACTERISTIC_side
tep = cOmm.gather(tep, root=mAster_rank)
if rAnk == mAster_rank:
XYZ, VVV, TEP = dict(), dict(), dict()
for i in range(len(xyz)):
XYZ.update(xyz[i])
VVV.update(V[i])
TEP.update(tep[i])
del xyz, V, tep
V_x, x_x, x_y, x_z, V_y, y_x, y_y, y_z, V_z, z_x, z_y, z_z = \
[np.array([]) for _ in range(12)]
I_func = dict()
for i in range(ot.mesh.trace.elements.GLOBAL_num):
ele_side = TEP[i]
if ele_side in 'NS':
x_x = np.append(x_x, XYZ[i][0])
x_y = np.append(x_y, XYZ[i][1])
x_z = np.append(x_z, XYZ[i][2])
V_x = np.append(V_x, VVV[i][0])
elif ele_side in 'WE':
y_x = np.append(y_x, XYZ[i][0])
y_y = np.append(y_y, XYZ[i][1])
y_z = np.append(y_z, XYZ[i][2])
V_y = np.append(V_y, VVV[i][0])
elif ele_side in 'BF':
z_x = np.append(z_x, XYZ[i][0])
z_y = np.append(z_y, XYZ[i][1])
z_z = np.append(z_z, XYZ[i][2])
V_z = np.append(V_z, VVV[i][0])
else:
raise Exception()
I_func['NS'] = NearestNDInterpolator((x_x, x_y, x_z), V_x)
I_func['WE'] = NearestNDInterpolator((y_x, y_y, y_z), V_y)
I_func['BF'] = NearestNDInterpolator((z_x, z_y, z_z), V_z)
else:
I_func = None
I_func = cOmm.bcast(I_func, root=mAster_rank)
func = (I_func['NS'], I_func['WE'], I_func['BF'])
self.func._body_ = func
self.discretize._standard_vector_()
self.func._body_ = None
if __name__ == '__main__':
# mpiexec -n 5 python objects\CSCG\_3d\forms\trace\_2_trace\main.py
from objects.CSCG._3d.master import MeshGenerator, SpaceInvoker, FormCaller#, ExactSolutionSelector
mesh = MeshGenerator('crazy', c=0.0)([2,2,2])
space = SpaceInvoker('polynomials')([('Lobatto',5), ('Lobatto',5), ('Lobatto',5)])
FC = FormCaller(mesh, space)
def p(t, x, y, z): return t + np.cos(2*np.pi*x) * np.cos(2*np.pi*y) * np.cos(2*np.pi*z)
S = FC('scalar', p)
t2 = FC('2-t')
t2.TW.func.body = S
t2.TW.current_time = 0
t2.TW.do.push_all_to_instant()
t2.discretize()
T = t2.matrices.trace
S = t2.matrices.selective
M = t2.matrices.mass
t2.visualize()
|
import requests
from bs4 import BeautifulSoup
import numpy as np
import pymysql
import time
from random import randint
from time import sleep
import re
# defining user-agents
headers = {"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}
# Database connection
conn = pymysql.connect(host='127.0.0.1', user='root', passwd=None, db='mysql',
charset='utf8')
cur = conn.cursor()
cur.execute("USE houzz")
# storing data
def store(Name, Details, Email, Number, Profile_url, Website, Address, Catagory, City, State, Country, Facebook,
Twitter,
Last_count):
cur.execute(
'INSERT IGNORE INTO lead_master_twmp (Name, Details, Email, Number, Profile_url, Website, Address, Catagory, City, State, Country, Facebook, Twitter, Last_count) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
(Name, Details, Email, Number, Profile_url, Website, Address, Catagory, City, State, Country, Facebook, Twitter,
Last_count))
cur.connection.commit()
# counter
n = 0
# make array
pages = np.arange(n, 9115, 1)
for p in pages:
a = p
try:
# make request to target website
page = requests.get(
"https://www.chamberofcommerce.com/retail-stores/clothing-and-accessories/?pg=" + str(
p),
headers=headers)
print(page)
# if connection error occurs then,
except requests.exceptions.ConnectionError:
print("Connection refused by the server..")
print("Let me sleep for 5 seconds")
print("ZZzzzz...")
time.sleep(15)
print("Was a nice sleep, now let me continue...")
continue
soup = BeautifulSoup(page.text, 'html.parser')
# retrieve all containers of profiles
containers = soup.find_all('div', class_="list_businesses")
# sleep for better scraping
sleep(randint(2, 10))
# loop through each container
for container in containers:
try:
sites1 = container.find('div', class_="bussiness_name")
except:
continue
try:
sites2 = sites1.find('a').get('href') # slave link
except:
continue
# defining master link
site = "https://www.chamberofcommerce.com"
# slave link and master link will make original profile url
profile_url = site + sites2
print(profile_url)
# splitting slave link to get city, state, country
location = sites2.split('/')
try:
country = location[1]
except:
country = 'NULL'
try:
state = location[2]
except:
state = 'NULL'
try:
city = location[3]
except:
city = 'NULL'
print(city, state, country)
try: # making request to profile url
url = requests.get(profile_url)
print(url)
# if connection error occurs then,
except requests.exceptions.ConnectionError:
print("Connection refused by the server..")
print("Let me sleep for 5 seconds")
print("ZZzzzz...")
time.sleep(15)
print("Was a nice sleep, now let me continue...")
continue
details = BeautifulSoup(url.text, 'html.parser')
# retrieving name of the destination profile
try:
name = details.find('div', class_="profile_business_name").text
except:
name = 'NULL'
print(name)
# retrieving address
try:
raw_add = details.find_all('div', class_='detail_text')
address = raw_add[0].text.strip()
except:
address = 'NULL'
print(address)
# retrieving contact number
try:
number = details.find('span', class_="d-none d-sm-block phone-align").text.strip()
except:
number = 'NULL'
print(number)
# retrieving main information or description
try:
info = details.find('div', class_='about_p_text').text
except:
info = 'NULL'
print(info)
# retrieving contact info
try:
contact_info = details.find('ul', class_='info_list')
contact_list = contact_info.find_all('li')
# retrieving website
try:
website = contact_info.select_one('.info_list .spr-web-icon+ a')
website = website.get('href')
except:
website = 'NULL'
print(website)
# retrieving email-id
try:
email = contact_info.find_all("a", href=re.compile(r"^mailto:"))
email = email[0].text
except:
email = 'NULL'
print(email)
# retrieving facebook profile link
try:
facebook = contact_info.select_one('.spr-fb-icon+ a')
facebook = facebook.get('href')
except:
facebook = 'NULL'
print(facebook)
# retrieving twitter profile link
try:
twitter = contact_info.select_one('img+ a')
twitter = twitter.get('href')
except:
twitter = 'NULL'
print(twitter)
except:
facebook = 'NULL'
twitter = 'NULL'
website = 'NULL'
email = 'NULL'
# storing last page no. which is just scraped
n = n + 1
last_count = int(a)
category = 'clothes'
store(name, info, email, number, profile_url, website, address, category, city, state, country, facebook,
twitter, last_count)
|
import responses
from requests.exceptions import ConnectionError
from transcriptic.config import Connection
from transcriptic.sampledata.project import sample_project_attr
from transcriptic.sampledata.run import sample_run_attr
from transcriptic.util import load_sampledata_json
class MockConnection(Connection):
"""
MockConnection object used for previewing Juypter objects without establishing a
connection.
Example Usage:
.. code-block:: python
mock_connection = MockConnection()
mock_connection.projects()
myRun = Run('r123')
"""
def __init__(self, *args, organization_id="sample-org", **kwargs):
super().__init__(*args, organization_id=organization_id, **kwargs)
@responses.activate
def _req_call(self, method, route, **kwargs):
self._register_mocked_responses()
try:
return super()._req_call(method, route, **kwargs)
except ConnectionError:
# Default raised exception lists all routes which is very verbose
if self.verbose:
raise
else:
raise ConnectionError(f"Mocked route not implemented: {route}")
def _register_mocked_responses(self):
# TODO: Everything is hardcoded right now. Move to Jinja
# Register Project routes
responses.add(
responses.GET,
self.get_route("get_project", project_id="p123"),
json=sample_project_attr,
status=200,
)
responses.add(
responses.GET,
self.get_route("deref_route", obj_id="p123"),
json=sample_project_attr,
status=200,
)
responses.add(
responses.GET,
self.get_route("get_projects", org_id="sample-org"),
json=load_sampledata_json("sample-org-projects.json"),
status=200,
)
responses.add(
responses.GET,
self.get_route("get_project_runs", org_id="sample-org", project_id="p123"),
json=load_sampledata_json("p123-runs.json"),
)
# Register Run routes
responses.add(
responses.GET,
self.get_route("deref_route", obj_id="r123"),
json=sample_run_attr,
status=200,
)
# Register Container routes
responses.add(
responses.GET,
self.get_route("deref_route", obj_id="ct123"),
json=load_sampledata_json("ct123.json"),
status=200,
)
responses.add(
responses.GET,
self.get_route("deref_route", obj_id="ct124"),
json=load_sampledata_json("ct124.json"),
status=200,
)
# Register Dataset routes
for data_id in ["d123", "d124", "d125", "d126", "d127"]:
# Note: `match_querystring` is important for correct resolution
responses.add(
responses.GET,
self.get_route("dataset_short", data_id=data_id),
json=load_sampledata_json(f"{data_id}.json"),
status=200,
match_querystring=True,
)
responses.add(
responses.GET,
self.get_route("dataset", data_id=data_id, key="*"),
json=load_sampledata_json(f"{data_id}-raw.json"),
status=200,
match_querystring=True,
)
|
import os, sys
import time
import re
import argparse
import configparser
import ast
import glob
import pandas as pd
import matplotlib
matplotlib.use("TKAgg")
import matplotlib.pyplot as plt
import seaborn as sns
sys.path.append('../src/')
from python import helper as hp
from python import fixed_parameters as FP
parser = argparse.ArgumentParser(description='Do descriptor plot')
parser.add_argument('-fn','--filename', type=str, help='Path to the fine-tuning txt file', required=True)
parser.add_argument('-v','--verbose', type=bool, help='Verbose', required=True)
def do_combined_boxplot(df, desc_to_plot, epochs_to_plot, save_path, te):
fig, ax = plt.subplots(figsize=(12,8))
start_palette = ['#1575A4']
epoch_palette = ['#F5F5F5']*len(epochs_to_plot)
end_palette = ['#5A5A5A', '#D55E00']
sns.boxplot(x="seq_time", y="value", data=df, palette=start_palette+epoch_palette+end_palette,
showfliers=False, width=0.35)
sns.despine(offset=15, trim=True)
tick_font_sz = FP.PAPER_FONT['tick_font_sz']
label_font_sz = FP.PAPER_FONT['label_font_sz']
legend_sz = FP.PAPER_FONT['legend_sz']
ax.set_ylim(0,1)
plt.xlabel('')
plt.ylabel(desc_to_plot, fontsize=label_font_sz)
#+3 for: src space, tgt space and transfer learning set
ax.set_xticks([y for y in range(len(epochs_to_plot)+3)])
start_xticklabels = ['Source space']
epoch_xticklabels = [f'epoch {e}' for e in epochs_to_plot]
end_xticklabels = ['Transfer\nlearning\nset', 'Target space']
ax.set_xticklabels(start_xticklabels + epoch_xticklabels + end_xticklabels,
rotation=30,
fontsize=tick_font_sz)
plt.yticks(fontsize=tick_font_sz)
plt.savefig(f'{save_path}{desc_to_plot}_{te}.png', bbox_inches='tight')
def update_df(df, dict_temp):
df_temp = pd.DataFrame.from_dict(dict_temp)
frames = [df, df_temp]
df = pd.concat(frames)
return df
def get_dict_with_data(data_name, min_len, max_len, aug):
data_des = hp.load_obj(f'results/data/{data_name}/{min_len}_{max_len}_x{aug}/desc.pkl')
des = data_des[desc_to_plot]
dict_temp = {'seq_time': [data_name]*len(des),
'value': des}
return dict_temp
if __name__ == '__main__':
start = time.time()
####################################
# get back parameters
args = vars(parser.parse_args())
verbose = args['verbose']
filename = args['filename']
name_data = filename.split('/')[-1].replace('.txt','')
config = configparser.ConfigParser()
config.read('parameters.ini')
temp = float(config['EXPERIMENTS']['temp'])
min_len = int(config['PROCESSING']['min_len'])
max_len = int(config['PROCESSING']['max_len'])
desc_to_plot = FP.DESCRIPTORS['names']
desc_to_plot = re.search(r'\((.*?)\)', desc_to_plot).group(1)
if verbose: print('\nSTART DESCRIPTOR PLOT')
####################################
####################################
# Path to the descriptors
path_des = f'results/{name_data}/analysis/'
# Path to save the novo analysis
save_path = f'results/{name_data}/plot_descriptor/'
os.makedirs(save_path, exist_ok=True)
####################################
####################################
# get back data
df = pd.DataFrame(columns=['seq_time', 'value'])
# get back dataset descriptor
src_space_name = config['DATA']['source_space']
src_space_name = src_space_name.replace('.txt','')
dict_temp = get_dict_with_data(src_space_name, min_len, max_len,
int(config['AUGMENTATION']['source_space']))
df = update_df(df, dict_temp)
for fname in sorted(os.listdir(path_des)):
if fname.endswith('.pkl'):
if 'desc' in fname and str(temp) in fname:
name = fname.replace('.pkl', '')
epoch = int(name.split('_')[1])
seq_time = f'epoch {epoch}'
# get values
data = hp.load_obj(path_des + fname)
values = data[desc_to_plot]
# add to dataframe
dict_temp = {'seq_time': [seq_time]*len(values),
'value': values}
df = update_df(df, dict_temp)
dict_temp = get_dict_with_data(name_data, min_len, max_len,
int(config['AUGMENTATION']['fine_tuning']))
df = update_df(df, dict_temp)
tgt_space_name = config['DATA']['target_space']
tgt_space_name = tgt_space_name.replace('.txt','')
dict_temp = get_dict_with_data(tgt_space_name, min_len, max_len,
int(config['AUGMENTATION']['target_space']))
df = update_df(df, dict_temp)
# we get back the epoch sampled from the saved models
all_models = glob.glob(f'results/{name_data}/models/*.h5')
epochs_to_plot = sorted([x.split('/')[-1].replace('.h5', '') for x in all_models], key=int)
do_combined_boxplot(df, desc_to_plot, epochs_to_plot, save_path, temp)
end = time.time()
if verbose: print(f'DESCRIPTOR PLOT DONE in {end - start:.04} seconds')
####################################
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import json
from bs4 import BeautifulSoup
class Crawler:
def __init__(self, key_set, count):
self.key_set = key_set
self.count = count
self.images = []
self.header = {'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36"}
# 빈 이미지
self.null_set = []
for i in range(self.count):
self.null_set.append((None, None))
# 공백으로 자른뒤 +삽입(구글용)
def key_setting(self, key):
key = key.split()
key = '+'.join(key)
return key
def get_soup(self, url):
return BeautifulSoup(urllib2.urlopen(urllib2.Request(url,headers=self.header)),'html.parser')
# Google Crwaling (i = 배열번호)
def img_crawl(self):
for a in range(len(self.key_set)):
if self.key_set[a].key != None:
key = self.key_setting(self.key_set[a].key)
url = "https://www.google.de/search?q="+key+"&tbm=isch&source=lnt&tbs=itp:face"
# Normal
# https://www.google.com/search?q="+key+"&source=lnms&tbm=isch
# 라인아트 & 흑백
# https://www.google.de/search?q=+key+&tbs=ic:gray,itp:lineart&tbm=isch
# 얼굴 컬러
# https://www.google.de/search?q=geometry&tbm=isch&source=lnt&tbs=itp:face
while(1):
try:
soup = self.get_soup(url)
for b in soup.find_all("div", {"class":"rg_meta"}, limit=int(self.count)):
link , Type = json.loads(b.text)["tu"], json.loads(b.text)["ity"]
try:
self.images.append((link,Type))
except IOError as e:
return -2
break
except Exception as e:
continue
else:
self.images.extend(self.null_set)
return self.images
|
import infiltrate.scheduling
if __name__ == "__main__":
infiltrate.scheduling.recurring_update()
|
import json
import hashlib
from urllib.parse import urlparse
from dojo.models import Endpoint, Finding
__author__ = 'dr3dd589'
class WpscanJSONParser(object):
def __init__(self, file, test):
self.dupes = dict()
self.items = ()
if file is None:
return
data = file.read()
try:
tree = json.loads(str(data, 'utf-8'))
except:
tree = json.loads(data)
for content in tree:
node = tree[content]
vuln_arr = []
try:
vuln_arr = node['vulnerabilities']
except:
pass
if 'plugins' in content:
for plugin_content in node:
vuln_arr = node[plugin_content]['vulnerabilities']
target_url = tree['target_url']
parsedUrl = urlparse(target_url)
protocol = parsedUrl.scheme
query = parsedUrl.query
fragment = parsedUrl.fragment
path = parsedUrl.path
port = ''
try:
(host, port) = parsedUrl.netloc.split(':')
except:
host = parsedUrl.netloc
for vul in vuln_arr:
title = vul['title']
references = '\n'.join(vul['references']['url']) + '\n' \
+ '**wpvulndb : **' + str(vul['references']['wpvulndb'])
try:
mitigation = 'fixed in : ' + vul['fixed_in']
except:
mitigation = 'N/A'
severity = 'Info'
description = '**Title : **' + title
dupe_key = hashlib.md5(str(references + title).encode('utf-8')).hexdigest()
if dupe_key in self.dupes:
finding = self.dupes[dupe_key]
if finding.references:
finding.references = finding.references
self.dupes[dupe_key] = finding
else:
self.dupes[dupe_key] = True
finding = Finding(
title=title,
test=test,
active=False,
verified=False,
description=description,
severity=severity,
numerical_severity=Finding.get_numerical_severity(severity),
mitigation=mitigation,
references=references,
dynamic_finding=True,)
finding.unsaved_endpoints = list()
self.dupes[dupe_key] = finding
if target_url is not None:
finding.unsaved_endpoints.append(Endpoint(
host=host,
port=port,
path=path,
protocol=protocol,
query=query,
fragment=fragment,))
self.items = self.dupes.values()
|
"""
Helper functions for retrieving vocabulary aliases
"""
import argparse
import re
from typing import Any, Dict, List, Optional, Tuple
import requests
import urllib3 # type: ignore
ESCAPE_CHARS = [",", ":", "#"]
def unescape(s: str) -> str:
"""
Unescape
"""
for char in ESCAPE_CHARS:
s = s.replace("\\{}".format(char), char)
return s
def escape(s: str) -> str:
"""
Escape
"""
for char in ESCAPE_CHARS:
s = s.replace(char, "\\{}".format(char))
return s
class UnknownResult(Exception):
"""UnknownResult is used in API request (not 200 result)"""
def __init__(self, *args: Any, **kwargs: Dict[Any, Any]) -> None:
Exception.__init__(self, *args, **kwargs)
def parse_aliases(line: str) -> Tuple[str, List[str]]:
"""
Parse line on this form:
name:alias1, alias2, alias3
Supports escaping of ":", "," and "#"
returns:
name and list of aliases
"""
# Split on ":" unless they are escaped
vocab, aliases_str = re.split(r'(?<!\\):', line, maxsplit=1)
# Split aliases on ",", unless they are escaped
aliases = [unescape(a.strip())
for a in re.split(r'(?<!\\),', aliases_str)
if a.strip()]
return unescape(vocab.strip()), aliases
def parseargs() -> argparse.ArgumentParser:
""" Parse arguments """
parser = argparse.ArgumentParser(description="Fetch ISO 3166 data")
parser.add_argument('--http-timeout', dest='timeout', type=int,
default=120, help="Timeout")
parser.add_argument(
'--proxy-string',
dest='proxy_string',
help="Proxy to use for external queries")
parser.add_argument(
'--data-dir',
dest='data_dir',
required=True,
help="Output path for data")
return parser
def output_alias(filename: str, alias_map: Dict, exclude: Optional[List] = None) -> None:
"""
Output alias to file
"""
if not exclude:
exclude = []
with open(filename, "w") as f:
for key in sorted(alias_map.keys()):
aliases = alias_map[key]
aliases = [escape(alias).strip() for alias in sorted(aliases)
if alias not in exclude and alias != key]
f.write("{}:{}\n".format(escape(key).strip(), ",".join(aliases)))
def merge(*lists: Dict, lower: bool = False) -> Dict:
"""
Combine multiple alias lists in order.
If key exists as one of the previous alias lists, add aliases to the previous
main key
"""
combined: Dict = {}
for alias_list in lists:
for key, aliases in alias_list.items():
if lower:
key = key.lower()
aliases = [alias.lower() for alias in aliases]
# One of our aliases is already main group name in ATT&CK.
# Merge aliases from MISP (including "main" group name with ATT&CK)
alias_in_keys = [alias for alias in aliases if combined.get(alias)]
if alias_in_keys:
key = alias_in_keys[0]
combined[key] = list(set(combined.get(key, []) + aliases))
return combined
def fetch_json(
url: str,
proxy_string: str = "",
timeout: int = 60,
verify_https: bool = True) -> Dict:
"""Fetch remote URL as JSON
url (string): URL to fetch
proxy_string (string, optional): Optional proxy string on format host:port
timeout (int, optional): Timeout value for query (default=60 seconds)
"""
proxies = {
'http': proxy_string,
'https': proxy_string
}
options = {
"verify": verify_https,
"timeout": timeout,
"proxies": proxies,
"params": {}
}
if not verify_https:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Unable to infer type
req = requests.get(url, **options) # type: ignore
if not req.status_code == 200:
errmsg = "status_code: {0.status_code}: {0.content}"
raise UnknownResult(errmsg.format(req))
# Unable to infer type of json()
return req.json() # type: ignore
|
from __future__ import annotations
import random
from abc import ABC, abstractmethod
from typing import FrozenSet, Type, List
import casino.main
import casino.odds
class Player(ABC):
"""`Player` places bets in a game.
This is an abstract class, with no body for the `Player.place_bets()` method.
However, this class does implement the basic `Player.win()` method used by
all subclasses.
Attributes:
stake: The player's current stake represented as multiples of the table's
minimum bet. Set to the player's starting budget by the overall
simulation control.
rounds_to_go: Number of rounds left to play. Set by the overall
simulation control to the maximum number of rounds to play.
table: The `Table` object used to place individual `Bet` instances. The
`Table` object contains the `Game` object which contains a
`RandomEventFactory` object from which the player can get `Outcome`
objects used to build `Bet` instances.
"""
stake: int
rounds_to_go: int
def __init__(self, table: casino.main.Table) -> None:
"""Constructs the `Player` instance with a specific `Table` object for
placing `Bet` instances.
"""
self.table = table
self.stake = 0
self.rounds_to_go = 0
@abstractmethod
def place_bets(self) -> None:
"""Must be overridden in subclass as each `Player` will have different
betting strategy.
"""
pass
def reset(self, duration: int, stake: int) -> None:
"""Sets `stake` and `rounds_to_go` according to the values passed by the
overall simulation control. Called before the start of a new session.
Args:
duration: The number of `rounds_to_go` for the next session.
stake: The initial stake to begin the next session with.
"""
self.rounds_to_go = duration
self.stake = stake
def playing(self) -> bool:
"""Returns `True` while the player is still active.
A player is still active when they have a stake greater than 0
"""
return self.rounds_to_go > 0 and self.stake > 0 or bool(self.table.bets)
def win(self, bet: casino.main.Bet) -> None:
"""Notification from the `Game` object that the `Bet` instance was a
winner. Increases `Player.stake` accordingly.
Args:
bet: The `Bet` which won.
"""
self.stake += bet.win_amount()
def lose(self, bet: casino.main.Bet) -> None:
"""Notification from the `Game` object that the `Bet` instance was a loser.
Does nothing by default, some subclassed players will take particular actions
on losses.
"""
pass
def winners(self, outcomes: FrozenSet[casino.main.Outcome]) -> None:
"""This is notification from the `Game` class of all the winning outcomes.
Some subclasses will process this information.
Args:
outcomes: The `Outcome` set from a `Bin`.
"""
pass
def __str__(self):
return (
f"{self.__class__.__name__} has {self.rounds_to_go} rounds to"
f" go with a stake of {self.stake}."
)
def __repr__(self):
return (
f"{self.__class__.__name__}(stake={self.stake},"
f" rounds_to_go={self.rounds_to_go}"
)
class RoulettePlayer(Player):
"""A `Player` who places bets in Roulette. This is an abstract class all other
roulette player subclasses will inherit from.
Attributes:
stake: The player's current stake represented as multiples of the table's
minimum bet. Set to the player's starting budget by the overall
simulation control.
rounds_to_go: Number of rounds left to play. Set by the overall
simulation control to the maximum number of rounds to play.
table: The `Table` that is used to place individual `Bet` instances.
"""
def __init__(self, table: casino.main.Table) -> None:
"""Constructs the `Player` instance with a specific `Table` and `Wheel
for creating and resolving bets. Also creates the 'black' `Outcome` for
use in creating bets.
"""
super(RoulettePlayer, self).__init__(table)
@abstractmethod
def place_bets(self) -> None:
"""Places various `Bet` instances on the `Table` instance.
Must be overridden in subclass as each `Player` will have different
betting strategy.
"""
pass
class RouletteMartingale(RoulettePlayer):
"""`Martingale` is a `Player` subclass who places bets in Roulette. This player
doubles their bet on every loss and resets their bet to a base amount on each win.
Attributes:
table: The `Table` that is used to place individual `Bet` instances.
loss_count: The number of losses. This is the number of times to double
the bet.
bet_multiple: The bet multiplier, based on the number of losses. This
starts at 1, and is reset to 1 on each win. It is doubled with each
loss. This is always equal to 2**`loss_count`.
"""
bet_multiple: int
loss_count: int
def __init__(self, table: casino.main.Table) -> None:
super().__init__(table)
self.bet_multiple = 1
self.loss_count = 0
def reset(self, duration: int, stake: int) -> None:
"""Calls parent class reset method and also resets `Martingale` specific
attributes for a new session.
Args:
duration: The number of `rounds_to_go` for the next session.
stake: The initial stake to begin the next session with.
"""
super(RouletteMartingale, self).reset(duration, stake)
self.bet_multiple = 1
self.loss_count = 0
def place_bets(self) -> None:
"""Updates the `Table` object with a bet on 'black'. The amount bet is
2**`loss_count`, which is the value of `bet_multiple`.
If `bet_amount` exceeds `self.stake`, bet entire remaining stake. If
`bet_amount` exceeds `table.limit`, restart the betting strategy.
"""
assert self.table.game is not None, "table.game not set"
bet_amount = 2 ** self.loss_count
if bet_amount > self.stake:
bet_amount = self.stake
current_bet = casino.main.Bet(
bet_amount, self.table.game.event_factory.get_outcome("black"), self
)
try:
self.table.place_bet(current_bet)
except casino.main.InvalidBet:
self.reset(self.rounds_to_go, self.stake)
self.place_bets()
def win(self, bet: casino.main.Bet) -> None:
"""Uses the superclass `Player.win()` method to update the stake with an
amount won. Then resets `loss_count` to zero and resets `bet_multiple`
to 1.
Args:
bet: The winning bet.
"""
super(RouletteMartingale, self).win(bet)
self.loss_count = 0
self.bet_multiple = 1
def lose(self, bet: casino.main.Bet) -> None:
"""Uses the superclass `Player.lose()` to do whatever bookkeeping the
superclass already does. Increments `loss_count` by 1 and doubles
`bet_multiple`.
Args:
bet: The losing bet.
"""
super(RouletteMartingale, self).lose(bet)
self.loss_count += 1
self.bet_multiple *= 2
class RouletteSevenReds(RouletteMartingale):
"""This is a `Player` subclass who places bets in roulette. They wait until
the wheel has spun red seven times in a row before betting on black.
Attributes:
table: The `Table` that is used to place individual `Bet` instances.
red_count: The number of reds yet to go. Inits to 7, and is reset to 7 on
each non-red outcome, and decrements by 1 on each red outcome.
"""
def __init__(self, table: casino.main.Table) -> None:
"""Initialise parent class attributes and set `red_count` to it's starting
value of 7.
"""
super().__init__(table)
self.red_count = 7
def place_bets(self) -> None:
"""Places a bet on black using the Martingale betting system if we have
seen seven reds in a row.
"""
if self.red_count == 0:
super(RouletteSevenReds, self).place_bets()
def winners(self, outcomes: FrozenSet[casino.main.Outcome]) -> None:
"""This is notification from the `Game` class of all the winning outcomes.
If this includes red, then `red_count` is decremented. Otherwise, `red_count`
is reset to 7.
Args:
outcomes: The `Outcome` set from a `Bin`.
"""
assert self.table.game is not None, "table.game not set"
if self.table.game.event_factory.get_outcome("red") in outcomes:
self.red_count -= 1
else:
self.red_count = 7
class RouletteRandom(Player):
"""A `Player` subclass who places bets in roulette. This player makes random bets
around the layout.
Attributes:
rng: A random number generator for selecting outcomes to bet on.
table: The `Table` object which will accept the bets. It also provides
access to the `wheel.all_outcomes` structure to pick from.
"""
def __init__(self, table: casino.main.Table) -> None:
"""Invokes superclass constructor and and initialise the rng."""
super().__init__(table)
self.rng = random.Random()
def place_bets(self) -> None:
"""Updates the `Table` object with a randomly placed `Bet` instance."""
assert self.table.game is not None, "table.game not set"
random_outcome = self.rng.choice(
list(self.table.game.event_factory.all_outcomes.values())
)
current_bet = casino.main.Bet(1, random_outcome, self)
self.table.place_bet(current_bet)
class Roulette1326(RoulettePlayer):
""" "A `Player` subclass who follows the 1-3-2-6 betting system. The player has a preferred
`Outcome` instance. This should be an even money bet. The player also has a
current betting state that determines the current bet to place, and what next
state applies when the bet has won or lost.
Attributes:
table: The `Table` object which will accept the bets.
outcome: This is the player's preferred `Outcome` instance, fetched from the
`Table.Wheel` object.
state: The current state of the 1-3-2-6 betting system. It will be an instance
of the `Roulette1326State` class. This will be one of the four states:
no wins, one win, two wins or three wins.
"""
state: "Roulette1326State"
outcome: casino.main.Outcome
def __init__(self, table: casino.main.Table) -> None:
"""Invokes the superclass constructor and initialises the state and outcome."""
super().__init__(table)
assert self.table.game is not None, "table.game not set"
self.outcome = self.table.game.event_factory.get_outcome("Black")
self.state = Roulette1326NoWins(self)
def place_bets(self) -> None:
"""Updates the `Table` with a bet created by the current state. Delegates
`Bet` creation to the `self.state.current_bet` method.
"""
current_bet = self.state.current_bet()
if current_bet.amount > self.stake:
current_bet.amount = self.stake
self.table.place_bet(current_bet)
def win(self, bet: casino.main.Bet) -> None:
"""Uses the superclass method to update stake with the amount won. Uses
the current state to transition to the next state.
Args:
bet: The `Bet` which won.
"""
super(Roulette1326, self).win(bet)
self.state = self.state.next_won()
def lose(self, bet: casino.main.Bet) -> None:
"""Uses the current state to transition to the next state.
Args:
bet: The `Bet` which lost.
"""
self.state = self.state.next_lost()
class Roulette1326State:
"""Superclass for all of the states in the 1-3-2-6 betting system.
Attributes:
player: The `Roulette1326` player currently in this state. This object will
be used to provide the `Outcome` object used in creating the `Bet`
instance.
next_state_win: The next state to transition to if the bet was a winner.
bet_amount: The amount bet in this state.
"""
def __init__(
self,
player: Roulette1326,
next_state_win: Type["Roulette1326State"],
bet_amount: int,
) -> None:
"""Initialise class with arguments provided by the subclass state constructors."""
self.player = player
self.next_state_win = next_state_win
self.bet_amount = bet_amount
def current_bet(self) -> casino.main.Bet:
"""Constructs a new `Bet` object from the ``player``'s preferred `Outcome`
instance. Each subclass provides a different multiplier used when creating
this `Bet` object.
Returns:
The `Bet` to be placed when in this state.
"""
return casino.main.Bet(self.bet_amount, self.player.outcome, self.player)
def next_won(self) -> "Roulette1326State":
"""Constructs the new `Roulette1326State` instance to be used when the bet
was a winner
Returns:
The `Roulette1326State` to transition to on a winning bet.
"""
return self.next_state_win(self.player) # type: ignore
def next_lost(self) -> "Roulette1326State":
"""Constructs the new `Roulette1326State` instance to be used when the bet
was a loser. This method is the same for each subclass.
Returns:
The `Roulette1326State` to transition to on a losing bet.
"""
return Roulette1326NoWins(self.player)
class Roulette1326NoWins(Roulette1326State):
"""Defines bet and state transition rules in the 1-3-2-6 betting system
for when there are no wins.
"""
def __init__(self, player: Roulette1326):
super(Roulette1326NoWins, self).__init__(player, Roulette1326OneWin, 1)
class Roulette1326OneWin(Roulette1326State):
"""Defines bet and state transition rules in the 1-3-2-6 betting system
for when there is one win.
"""
def __init__(self, player: Roulette1326):
super(Roulette1326OneWin, self).__init__(player, Roulette1326TwoWins, 3)
class Roulette1326TwoWins(Roulette1326State):
"""Defines bet and state transition rules in the 1-3-2-6 betting system
for when there are two wins.
"""
def __init__(self, player: Roulette1326):
super(Roulette1326TwoWins, self).__init__(player, Roulette1326ThreeWins, 2)
class Roulette1326ThreeWins(Roulette1326State):
"""Defines bet and state transition rules in the 1-3-2-6 betting system
for when there are three wins.
"""
def __init__(self, player: Roulette1326):
super(Roulette1326ThreeWins, self).__init__(player, Roulette1326NoWins, 6)
class RouletteCancellation(RoulettePlayer):
"""A `Player` subclass who uses the cancellation betting system. This player allocates
their available budget into a sequence of bets that have an accelerating potential
gain as well as recouping any losses.
Attributes:
sequence: This `List` keeps the bet amounts. Wins are removed from this list
and losses are appended to this list. The current bet is the first value
plus the last value.
outcome: The player's preferred `Outcome` instance to bet on.
table: The `Table` object which will accept the bets.
"""
sequence: List[int]
outcome: casino.main.Outcome
def __init__(self, table: casino.main.Table) -> None:
"""Uses the `RouletteCancellation.reset_sequence` method to initialise the
sequences of numbers used to establish the bet amount. This also picks a
suitable even money `Outcome`.
"""
super().__init__(table)
assert self.table.game is not None, "table.game not set"
self.sequence = []
self.outcome = self.table.game.event_factory.get_outcome("Black")
def reset(self, duration, stake):
"""Sets `stake`, `rounds_to_go` and `sequence` back to their initial values."""
super(RouletteCancellation, self).reset(duration, stake)
self.reset_sequence()
def reset_sequence(self):
"""Puts the initial sequence of 6 values into the `self.sequence` attribute."""
self.sequence = [1, 2, 3, 4, 5, 6]
def place_bets(self) -> None:
"""Creates a bet from the sum of the first and last values of `self.sequence`
and the preferred outcome.
Reset the sequence once we have completed the betting strategy and
`self.sequence` is empty. Stop playing if a bet exceeds `table.limit`.
"""
if len(self.sequence) > 1:
current_bet = casino.main.Bet(
self.sequence[0] + self.sequence[-1], self.outcome, self
)
if current_bet.amount > self.stake:
current_bet.amount = self.stake
try:
self.table.place_bet(current_bet)
except casino.main.InvalidBet:
self.rounds_to_go = 0
else:
self.reset_sequence()
def win(self, bet: casino.main.Bet) -> None:
"""Uses the superclass method to update the stake with an amount won. It
then removes the first and last element from `self.sequence`.
Args:
bet: The `Bet` which won.
"""
super(RouletteCancellation, self).win(bet)
self.sequence = self.sequence[1:-1]
def lose(self, bet: casino.main.Bet) -> None:
"""Uses the superclass method to update the stake with an amount lose. It
then appends the sum of the first and last elements of `self.sequence` to
the end of `self.sequence`.
Args:
bet: The `Bet` which lost.
"""
super(RouletteCancellation, self).lose(bet)
self.sequence.append(self.sequence[0] + self.sequence[-1])
class RouletteFibonacci(RoulettePlayer):
"""A `Player` subclass who uses the Fibonacci betting system. This player allocates
their available budget into a sequence of bets that have an accelerating
potential gain.
Attributes:
recent: The most recent bet amount. Initially set to 1.
previous: The bet amount previous to the most recent bet. Initially set
to 0.
table: The `Table` object which will accept the bets.
"""
recent: int
previous: int
def __init__(self, table: casino.main.Table) -> None:
"""Initialise the Fibonacci player."""
super().__init__(table)
self.recent = 1
self.previous = 0
def reset(self, duration: int, stake: int) -> None:
super(RouletteFibonacci, self).reset(duration, stake)
self.reset_bet_state()
def reset_bet_state(self):
"""Reset `recent` and `previous` to their initial state."""
self.recent, self.previous = 1, 0
def place_bets(self) -> None:
"""Create and place a `Bet` of a value according to `recent` + `previous`."""
assert self.table.game is not None, "table.game not set"
current_bet = casino.main.Bet(
self.recent, self.table.game.event_factory.get_outcome("Black"), self
)
if current_bet.amount > self.stake:
current_bet.amount = self.stake
try:
self.table.place_bet(current_bet)
except casino.main.InvalidBet:
self.rounds_to_go = 0
def win(self, bet: casino.main.Bet) -> None:
"""Users the superclass method to update the stake with an amount won.
It also resets the betting system state.
Args:
bet: The `Bet` which won.
"""
super(RouletteFibonacci, self).win(bet)
self.reset_bet_state()
def lose(self, bet: casino.main.Bet) -> None:
"""Updates `recent` and `previous` to their values for the next step in
the betting strategy.
Args:
bet: The `Bet` which lost.
"""
super(RouletteFibonacci, self).lose(bet)
next_ = self.recent + self.previous
self.previous = self.recent
self.recent = next_
class CrapsPlayer(Player):
"""A `Player` who places bets in Craps. This is an abstract class all other
craps player subclasses will inherit from.
Attributes:
table: The `Table` used to place individual `Bet` instances.
stake: The player's current stake. Initialised to the player's starting
budget.
rounds_to_go: The number of rounds left to play. Initialised by the overall
simulation control to the maximum number of rounds to play. In Craps,
this is the number of throws of the dice, which may be a large number of
quick games or a small number of long-running games.
"""
table: casino.main.Table
stake: int
rounds_to_go: int
def __init__(self, table: casino.main.Table) -> None:
"""Constructs the `CrapsPlayer` instance with a specific table for placing
bets.
"""
super(CrapsPlayer, self).__init__(table)
@abstractmethod
def place_bets(self) -> None:
"""Places various `Bet` instances on the `Table` instance.
Must be overridden in subclass as each `Player` will have different
betting strategy.
"""
pass
class CrapsPass(CrapsPlayer):
"""A `CrapsPlayer` who places a Pass Line bet in Craps.
Attributes:
table: The `Table` used to place individual `Bet` instances.
"""
def __init__(self, table: casino.main.Table) -> None:
super(CrapsPass, self).__init__(table)
def place_bets(self) -> None:
"""Places a Pass Line bet on the `Table` if no Pass Line bet is present."""
if self.rounds_to_go > 0:
if not self.table.contains_outcome("Pass Line"):
self.table.place_bet(
casino.main.Bet(
1, casino.main.Outcome("Pass Line", casino.odds.PASS_COME), self
)
)
class CrapsMartingale(CrapsPlayer):
"""A `CrapsPlayer` who places bets in Craps. This player doubles their Pass
Line Odds bet on every loss and resets their Pass Line Odds bet to a base
amount on each win.
Attributes:
table: The `Table` used to place individual `Bet` instances.
loss_count: The number of losses. This is the number of times to double
the Pass Line Odds bet.
bet_multiple: The bet multiplier based on the number of losses.
"""
bet_multiple: int
loss_count: int
def __init__(self, table: casino.main.Table) -> None:
super(CrapsMartingale, self).__init__(table)
self.loss_count = 0
self.bet_multiple = 1
def place_bets(self) -> None:
"""If no Pass Line bet is present, this will update the `Table` with
a bet on the Pass Line at the base bet amount.
If no Pass Line Odds bet is present, this will update the `Table` with
a Pass Line Odds bet. The amount is the base amount times `self.bet_multiple`.
"""
if self.stake > 0 and self.table.game is not None:
if not self.table.contains_outcome("Pass Line"):
self.table.place_bet(
casino.main.Bet(
1, casino.main.Outcome("Pass Line", casino.odds.PASS_COME), self
)
)
elif not self.table.contains_outcome("Pass Odds"):
bet_amount = 2 ** self.loss_count
if bet_amount > self.stake:
bet_amount = self.stake
if bet_amount >= self.table.limit:
bet_amount = (
self.table.limit - 1
) # -1 to account for initial Pass Bet.
self.table.place_bet(
casino.main.Bet(
bet_amount,
casino.main.Outcome("Pass Odds", self.table.game.point_odds()), # type: ignore
self,
)
)
def win(self, bet: casino.main.Bet) -> None:
"""Uses the superclass `Player.win()` method to update the stake with an
amount won. Then resets `loss_count` to zero and resets `bet_multiple`
to 1 for Pass Odds bets only.
Args:
bet: The winning bet.
"""
super(CrapsMartingale, self).win(bet)
if bet.outcome.name == "Pass Odds":
self.loss_count = 0
self.bet_multiple = 1
def lose(self, bet: casino.main.Bet) -> None:
"""Uses the superclass `Player.lose()` to do whatever bookkeeping the
superclass already does. Increments `loss_count` by 1 and doubles
`bet_multiple` for Pass Odds bets only.
Args:
bet: The losing bet.
"""
super(CrapsMartingale, self).lose(bet)
if bet.outcome.name == "Pass Odds":
self.loss_count += 1
self.bet_multiple *= 2
|
import math
import statistics
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression as sk_linreg
from scipy.stats import f as f_dist
def voyage_finder(in_df, datetime='BaseDateTime', date_format='%Y-%m-%d %H:%M:%S', lead_cols=['lead_date_time', 'lead_LON', 'lead_LAT'],
stopped_speed=1, stopped_sec_req=2600, keep_calcs=False):
# don't mutate by accident
df = in_df.copy()
if df.shape[0] <= 1:
return in_df
# parse datetime
try:
df[datetime] = pd.to_datetime(df[datetime], format=date_format)
except:
raise ValueError('datetime field of in_df is an incorrect date format.')
df['date_timestamp'] = df[datetime].astype(np.int64)//10**9 # sec
# calc diffs
# leads
df[lead_cols] = df.sort_values(by=datetime).groupby(['MMSI'])[
['date_timestamp', 'LON', 'LAT']].shift(-1)
# time diff
df = df.assign(delta_sec=lambda x: x[lead_cols[0]] - x['date_timestamp'])
# dist diff
df = df.assign(dist=lambda x: ((60*abs(x['LAT']-x[lead_cols[2]]))**2 +
(60*abs(x['LON']-x[lead_cols[1]])*np.cos(math.pi*(x['LAT']+x[lead_cols[2]])/360))**2)**0.5)
# speed and stopped time
df = df.assign(speed_knots=lambda x: (x['dist']/x['delta_sec'])*3600)
df = df.assign(is_stopped=lambda x: x['speed_knots'] <= stopped_speed)
df = df.astype({'is_stopped': int})
df = df.assign(stopped_secs=lambda x: x['is_stopped'] * x['delta_sec'])
# time stopped in window
df['cumsum_window_stopped'] = df.groupby('MMSI')[['stopped_secs', datetime]].rolling(str(stopped_sec_req) + 's', on=datetime).sum().reset_index(level=0, drop=True)['stopped_secs']
# sort after index reset
df = df.sort_values(by=datetime)
# count voyages
df = df.assign(increment_voyage=lambda x: (x['cumsum_window_stopped'] >= stopped_sec_req) & (x['is_stopped']))
df = df.astype({'increment_voyage': int})
# shift increments
df['increment_voyage'] = df.sort_values(by=datetime).groupby(['MMSI'])['increment_voyage'].shift(1)
df['increment_voyage'] = df['increment_voyage'].fillna(value=0)
df = df.astype({'increment_voyage': int})
df['voyage_id'] = df[['increment_voyage', 'MMSI']].groupby('MMSI').increment_voyage.cumsum()
if not keep_calcs:
df = df.drop([lead_cols[0], lead_cols[1], lead_cols[2], 'increment_voyage', 'cumsum_window_stopped', 'is_stopped', 'stopped_secs', 'date_timestamp'], axis=1)
return df
def remove_dupes(df):
# remove dupes
df[['lead_LON', 'lead_LAT']] = df.sort_values(by='BaseDateTime').groupby(['MMSI'])[['LON', 'LAT']].shift(-1)
df = df.iloc(df.index.difference((df['LAT'] ==df['lead_LAT']) & (df['LON'] == df['lead_LON'])))
df = df.drop(column=['lead_LA', 'lead_LON'])
return df
def run_vf(df):
# run vf
df = voyage_finder(df)
df = df.sort_values(by='BaseDateTime', ascending=False)
df['BaseDateTime'] = pd.to_datetime(df['BaseDateTime'], unit='ms').dt.strftime('%Y-%m-%d %H:%M:%S.%f')
df = df.assign(vid=df['MMSI'].astype('str') + '#' + df['voyage_id'].astype('str'))
return df
def assign_id(df):
# remove short voyages
antidata = df['vid'].value_counts()
antidata = antidata.loc[antidata <= 2]
df = df.loc[~df['vid'].isin([val for val in antidata.keys()])]
# id
df['BaseDateTime'] = pd.to_datetime(df['BaseDateTime']).values.astype(np.int64)//10**6
voy_group = df.groupby(['vid'])['BaseDateTime'].min()
voy_group = voy_group.to_frame()
voy_group.reset_index(level=0, inplace=True)
voy_group['MMSI'] = [vid.split('#')[0] for vid in voy_group['vid']]
voy_group = voy_group.assign(voyage_id=lambda x: x['MMSI'] + '#' + x['BaseDateTime'].astype(str))
voy_group = voy_group.drop(['BaseDateTime', 'MMSI'], axis=1)
df = df.drop(['voyage_id'], axis=1)
df = df.merge(voy_group, on='vid', how='left')
df = df.drop(['vid'], axis=1)
df['BaseDateTime'] = pd.to_datetime(df['BaseDateTime'], unit='ms')
return df
def calc_accel(df, speed_col='SOG'):
df[speed_col] = df[speed_col].fillna(0)
# diff over 2
df[['lead_speed']] = df.sort_values(by='BaseDateTime').groupby(['MMSI'])[[speed_col]].shift(-1)
df['acceleration'] = (df['lead_speed'] - df[speed_col]) / df['delta_sec']
df = df.drop(['lead_speed'],axis=1)
return df
def calc_bearing_rate(df, bearing_col='Heading'):
df[bearing_col] = df[bearing_col].fillna(0)
# diff over 2
df[['lead_bearing']] = df.sort_values(by='BaseDateTime').groupby(['MMSI'])[[bearing_col]].shift(-1)
df['bearing_rate'] = (df['lead_bearing'] - df[bearing_col]) / df['delta_sec']
df = df.drop(['lead_bearing'],axis=1)
return df
|
from rest_framework import serializers
from observations_api import models
class SpeciesSerializer(serializers.ModelSerializer):
"""Serializer for species data"""
class Meta:
model = models.Species
fields = ['id','name','sc_name']
class ObservationSerializer(serializers.ModelSerializer):
"""Serializer for observation related data"""
class Meta:
model = models.Observation
fields = ['id','species','observer','country','state','location','lat','long']
extra_kwargs = {'observer': {'read_only': True}}
|
#!/usr/bin/env python3
import sys
import argparse
import ngram_tools
def to_int(ngrams):
for ngram, prob in ngrams:
freq = int(prob)
if freq == 0:
freq = 1
yield ngram, freq
def scale_ngrams(ngrams, scale):
for ngram, freq in ngrams:
yield ngram, freq * scale
def main():
parser = argparse.ArgumentParser()
parser.add_argument('scale', type=float)
args = parser.parse_args()
ngrams = ngram_tools.parse_ngrams_float(sys.stdin)
ngrams = scale_ngrams(ngrams, args.scale)
ngrams = to_int(ngrams)
ngram_tools.print_ngrams(ngrams, sys.stdout)
if __name__ == '__main__':
main()
|
#!usr/bin/python
# -*- coding: utf-8 -*-
import argparse
from helpers.execution_timer import measure
from typing import Tuple
""" ANAGRAM CHECK
Problem:
Given two strings, check to see if they are anagrams. An anagram is when the two strings can be written using
the exact same letters (so you can just rearrange the letters to get a different phrase or word).
Example:
"public relations" is an anagram of "crap built on lies"
"clint eastwood" is an anagram of "old west action"
Usage:
python anagram_check.py <first string> <second string> <number of solution to run>
e.g.:
python anagram_check.py "baba" "abba" 1
"""
def solution(first: str, second: str, solution_number: int) -> None:
""" Runs the program that determines if two given strings are anagrams
and measures the execution time using one of 3 solutions
Args:
first - first string
second - second string
solution_number - [1..3] selector of the problem solution
Returns:
prints the result and execution time into console
"""
solution_switch = {
1: is_anagram_one,
2: is_anagram_two,
3: is_anagram_three
}
print(rf'Given strings: "{first}" and "{second}"')
# prepare strings
try:
first, second = _prepare_strings(first, second)
except TypeError as e:
print(str(e))
except ValueError as e:
print(str(e))
result = measure(solution_switch[solution_number], first, second)
if result:
print('These solutions are anagrams!')
else:
print('These solutions are NOT anagrams')
return
def _prepare_strings(*strings) -> Tuple[str]:
""" Does sanitation of the given strings as parameters:
- removing spaces
- change all upper-case to lower-case
Args:
strings - a tuple of strings that has to be pre-checked
Returns:
A tuple of strings after all sanitation is done
Raises:
TypeError if not strings or has zero length
"""
result = []
for string in strings:
if not isinstance(string, str):
raise TypeError('One of the given parameters is not of a string-type')
string = string.replace(' ', '').lower()
if len(string) == 0:
raise ValueError('String shall contain at least one non-space symbol')
result.append(string)
return tuple(result)
def _size_pre_check(*strings) -> bool:
""" Checks if given strings have the same number of elements
Args:
strings - a tuple of strings that has to be pre-checked
Returns:
True if all strings passed as params have the same number of elements
False in other case
Raises:
ValueError if arguments is not a list of strings
"""
length = None
for string in strings:
if not isinstance(string, str):
raise ValueError(f'Expected strings as arguments! {type(string)} is given')
if length is None:
length = len(string)
else:
if len(string) != length:
return False
return True
def is_anagram_one(first: str, second: str) -> bool:
""" First variant of solution if two strings are anagrams.
For every symbol in first string find and remove it's occurrence in the second.
Args:
first - first given string
second - second given string
Returns:
True if all strings passed as params have the same number of elements
False in other case
"""
if not _size_pre_check(first, second):
return False
for char in first:
try:
i = second.index(char)
except ValueError:
return False
second = second[:i] + second[i+1:]
return True
def is_anagram_two(first: str, second: str) -> bool:
""" Second variant of solution if two strings are anagrams.
Sort and compare lists of symbols
Args:
first - first given string
second - second given string
Returns:
True if all strings passed as params have the same number of elements
False in other case
"""
if not _size_pre_check(first, second):
return False
a = list(first)
b = list(second)
if a.sort() == b.sort():
return True
return False
def is_anagram_three(first: str, second: str) -> bool:
""" Third variant of solution if two strings are anagrams.
Counting symbols while hashing them
Args:
first - first given string
second - second given string
Returns:
True if all strings passed as params have the same number of elements
False in other case
"""
if not _size_pre_check(first, second):
return False
letter_register = {}
for letter in first:
if letter in letter_register:
letter_register[letter] += 1
else:
letter_register[letter] = 1
for letter in second:
if letter in letter_register:
letter_register[letter] -= 1
else:
return False
for letter in letter_register:
if letter_register[letter] != 0:
return False
return True
if __name__ == "__main__":
command_line_arg_parser = argparse.ArgumentParser()
command_line_arg_parser.add_argument(
'first_string',
help='give 2 strings to check if they are an anagram one of another',
type=str
)
command_line_arg_parser.add_argument(
'second_string',
help='give 2 strings to check if they are an anagram one of another',
type=str
)
command_line_arg_parser.add_argument(
'solution_number',
help='give a number to check the exact implementation or leave empty. Debug only',
type=int,
nargs='?',
default=1,
)
args = command_line_arg_parser.parse_args()
solution(args.first_string, args.second_string, args.solution_number)
|
def usages_helper():
print('Hello Anaconda!')
|
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud certificate-manager maps entries delete` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.certificate_manager import certificate_map_entries
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.certificate_manager import flags
from googlecloudsdk.command_lib.certificate_manager import resource_args
from googlecloudsdk.command_lib.certificate_manager import util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(base.DeleteCommand):
"""Delete a certificate map entry.
Delete a certificate map entry resource.
## EXAMPLES
To delete the certificate map entry with name simple-entry, run:
$ {command} simple-entry --map=simple-map
"""
@staticmethod
def Args(parser):
resource_args.AddCertificateMapEntryResourceArg(parser, 'to delete')
flags.AddAsyncFlagToParser(parser)
def Run(self, args):
client = certificate_map_entries.CertificateMapEntryClient()
entry_ref = args.CONCEPTS.entry.Parse()
console_io.PromptContinue(
'You are about to delete certificate map entry \'{}\' from certificate map \'{}\''
.format(entry_ref.certificateMapEntriesId, entry_ref.certificateMapsId),
throw_if_unattended=True,
cancel_on_no=True)
response = client.Delete(entry_ref)
response = util.WaitForOperation(response, is_async=args.async_)
log.DeletedResource(entry_ref.Name(), 'map entry', is_async=args.async_)
return response
|
# Generated by Django 3.0.3 on 2020-03-10 12:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20200310_1745'),
('blog', '0005_auto_20200310_1700'),
]
operations = [
]
|
import json
import pandas as pd
import math
tiers = {4:list(range(0, 23)), 3:list(range(23, 30)), 2:list(range(30, 37)), 1:list(range(37, 46))}
def ivs(name, cp_num, highest=None, level=None, hp=None, tier=None):
if level != None:
lvlmax, lvlmin = level+1, level
else:
lvlmin, lvlmax = 1, 41
for a in range(16):
for d in range(16):
for s in range(16):
for lvl in range(lvlmin, lvlmax):
if cp(a, d, s, lvl, name) == cp_num:
if hp and hp_calc(s, name, lvl) != hp:
continue
if tier and sum((a,d,s)) not in tiers[tier]:
continue
string = f'{lvl} {ff(a, d, s)}'
if highest != None:
highest = set(highest)
if highest == {'a'} and a > d and a > s:
print(string)
if highest == {'d'} and d > a and d > s:
print(string)
if highest == {'s'} and s > d and s > a:
print(string)
if highest == {'s', 'a'} and s == a and s > d:
print(string)
if highest == {'s', 'd'} and s == d and s > a:
print(string)
if highest == {'a', 'd'} and a == d and a > s:
print(string)
if highest == {'a', 'd', 's'} and a == d and a == s:
print(string)
else:
print(string)
def mm(l):
return (l[0], l[-1])
def ff(a, d, s):
return f'{a}/{d}/{s} {round(sum((a,d,s))*100/45)}%'
class Combo:
def __init__(self, a, d, s, lvl):
self.a = a
self.d = d
self.s = s
self.lvl = lvl
with open('simple.json', 'r') as f:
stats = json.load(f)
data = pd.read_csv('cpm.csv').values.tolist()
cpm = {k:v for (k,v) in data}
def hp_calc(iv, name, level):
base = stats[name]['stamina']
return int(cpm[level] * (iv + base))
def cp(a, d, s, level, name):
base = stats[name]
attack = a + base['attack']
defense = d + base['defense']
stamina = s + base['stamina']
return math.floor((attack * (defense**0.5) * (stamina**0.5) * (cpm[level]**2)) / 10)
l = []
for lvl in range(1, 35):
for a in range(16):
for d in range(16):
for s in range(16):
power = cp(a, d, s, lvl, "Blaziken")
# power = cp(a, d, s, lvl, "Melmetal")
if power < 1501 and power > 1450:
if power > 1499:
pass
# print(a, d, s, lvl)
l.append(Combo(a, d, s, lvl))
# print(l)
l2 = []
for x in l:
# power = cp(x.a, x.d, x.s, x.lvl, "Meltan")
power = cp(x.a, x.d, x.s, x.lvl, "Torchic")
l2.append(power)
# print(power)
# print(mm(sorted(set(l2))))
# print(cp(15, 15, 3, 19, "Blaziken"))
# print(cp(15, 15, 15, ))
for a in range(14, 16):
for d in range(14, 16):
for s in range(14, 16):
for lvl in range(1, 35):
power = cp(a, d, s, lvl, 'Torchic')
# print(power, ff(a,d,s))
# print(stats['Sneasel'])
ivs('Shieldon', 654, highest=['d'], hp=79, tier=1)
# print(cp(13, 10, 15, 40, 'Bastiodon'))
# print(cp(15, 15, 15, 15, 'Zubat'))
# print(hp_calc(10, 'Rhyhorn', 31))
|
from django import template
from ..service.get_base_template_cache_time import get_base_template_cache_time_by_part
register = template.Library()
@register.inclusion_tag('base_template/cached_body.html')
def cached_body():
cached_body_time = get_base_template_cache_time_by_part('BODY')
return {"cached_body_time": cached_body_time}
|
import numpy as np
import csv
import pandas as pd
from scipy.integrate import quad
# # Import the data(leave the head)
# sourcenamelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# GRBname=[column[0]for column in sourcenamelist]
# Znamelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# Z=[column[1]for column in Znamelist]
# Epeaknamelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# Epeak=[column[2]for column in Epeaknamelist]
# Enamelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# Eiso=[column[3]for column in Enamelist]
# T90namelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# T90=[column[4]for column in T90namelist]
# Snamelist=csv.reader(open('/Users/dingding/Desktop/Final5.11.csv','r'))
# S=[column[5]for column in Snamelist]
#
#
# # calculate the k-corrention:
# alpha=-1
# beita=-2.5
# def NE(E,Epeak):
# if (alpha-beita)*Epeak/(2+alpha)>=E:
# NE=(E/100)**alpha*np.exp(-E*(2+alpha)/Epeak)
# return NE
# elif (alpha-beita)*Epeak/(2+alpha)<=E:
# NE=(((alpha-beita)*Epeak/(100*(2+alpha)))**(alpha-beita)*np.exp(beita-alpha)*(E/100)**beita)
# return NE
# def k(Epeak,Z):
# a1=quad(lambda E:E*NE(E,Epeak),1/(1+Z),10**4/(1+Z))
# a2=quad(lambda E:E*NE(E,Epeak),15,350)
# k=a1[0]/a2[0]
# return k
#
# # calculate the luminosity distance
# omegal=0.734
# omegam=0.266
# h=0.71
# H0=1/(3.09*10**17)
# c=2.99792458*10**8
# def dl(Z):
# integrateportion=quad(lambda x:1/np.sqrt(omegam*(1+x)**3+omegal),0,Z)
# dl=c*(1+Z)/H0*integrateportion[0]
# return dl
print(np.pi)
|
import unittest
from click.testing import CliRunner
from ...create_cmd import create_cmd
from core.substitution import Substitution
from core.cardanopy_config import CardanoPyConfig
from pathlib import Path
import tempfile
import shutil
import os
class TestSubstitution(unittest.TestCase):
def setUp(self):
self.test_dir = Path(tempfile.mkdtemp())
os.chdir(self.test_dir)
# print(f"test_dir='{self.test_dir}'")
def tearDown(self):
shutil.rmtree(self.test_dir)
def test_substitution(self):
app_dir = self.test_dir.joinpath('test-app')
runner = CliRunner()
create_result = runner.invoke(create_cmd, ['--template',
'basic',
'--network',
'testnet',
str(app_dir)])
assert create_result.exit_code == 0
target_config_dir = CardanoPyConfig.try_get_valid_config_dir(app_dir)
target_config_file = CardanoPyConfig.try_get_valid_config_file(app_dir)
cardanopy_config = CardanoPyConfig()
cardanopy_config.load(target_config_file, ("_NAME=test_app", "_TAG=test_tag"))
Substitution.generate(False, target_config_dir, cardanopy_config)
|
import glob
import json
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
import seaborn as sns
from visgrid.utils import load_experiment, get_parser
parser = get_parser()
# yapf: disable
parser.add_argument('--pretrain-steps', type=str, default='3k',
choices=['3k','30k'], help='Number of pretraining steps')
parser.add_argument('--smoothing', type=int, default=5,
help='Number of data points for sliding window average')
# yapf: enable
args = parser.parse_args()
def load_experiment(path):
logfiles = sorted(glob.glob(os.path.join(path, 'scores-*.txt')))
agents = [path.split('/')[-2] for f in logfiles]
seeds = [int(f.split('-')[-1].split('.')[0]) for f in logfiles]
logs = [open(f, 'r').read().splitlines() for f in logfiles]
def read_log(log):
results = [json.loads(item) for item in log]
data = smooth(pd.DataFrame(results), args.smoothing)
return data
results = [read_log(log) for log in logs]
keys = list(zip(agents, seeds))
data = pd.concat(results, join='outer', keys=keys,
names=['agent',
'seed']).sort_values(by='seed',
kind='mergesort').reset_index(level=[0, 1])
return data #[data['episode']<=100]
def smooth(data, n):
numeric_dtypes = data.dtypes.apply(pd.api.types.is_numeric_dtype)
numeric_cols = numeric_dtypes.index[numeric_dtypes]
data[numeric_cols] = data[numeric_cols].rolling(n).mean()
return data
pretrain_experiments = 'pretrain_3k' if args.pretrain_steps == '3k' else 'pretrain_30k'
experiments = ['pretrain_0k', pretrain_experiments]
agents = [
'markov',
'inv-only',
'contr-only',
'autoenc',
'truestate',
'end-to-end',
'pixel-pred',
# 'random',
# 'rearrange_xy',
]
root = 'results/scores/'
unfiltered_paths = [(root + e + '/' + a + '/', (e, a)) for e in experiments for a in agents]
experiments = [experiment for path, experiment in unfiltered_paths if os.path.exists(path)]
paths = [path for path, _ in unfiltered_paths if os.path.exists(path)]
labels = ['tag', 'features']
data = pd.concat([load_experiment(p) for p in paths],
join='outer',
keys=(experiments),
names=labels).reset_index(level=list(range(len(labels))))
def plot(data, x, y, hue, style, col=None):
print("Plotting using hue={hue}, style={style}".format(hue=hue, style=style))
assert not data.empty, "DataFrame is empty, please check query"
# print(data.query('episode==99').groupby('agent', as_index=False)['total_reward'].mean())
# print(data.query('episode==99').groupby('agent', as_index=False)['total_reward'].std())
data = data.replace('markov', 'Markov')
data = data.replace('end-to-end', 'visual')
data = data.replace('truestate', 'xy-position')
print(data.groupby('agent', as_index=False)['reward'].mean())
print(data.groupby('agent', as_index=False)['reward'].std())
# If asking for multiple envs, use facetgrid and adjust height
height = 4 if col is not None and len(data[col].unique()) > 1 else 5
if col:
col_wrap = 2 if len(data[col].unique()) > 1 else 1
else:
col_wrap = None
# data = data[data['episode'] < 97]
dashes = {
'Markov': '',
'inv-only': (1, 1),
'contr-only': (1, 2, 5, 2),
'autoenc': (2, 2, 1, 2),
'visual': (5, 2, 5, 2),
'xy-position': (7, 2, 3, 2),
'pixel-pred': (7, 1, 1, 1),
'random': (1, 2, 3, 2),
}
algs = [
'Markov',
'autoenc',
'inv-only',
'pixel-pred',
'contr-only',
'visual',
'xy-position',
'random',
]
labels = [
'Markov',
'Autoenc',
'Inverse',
'Pixel-Pred',
'Ratio',
'Visual',
'Expert (x,y)',
'Random',
]
colormap = [
'Markov',
'inv-only',
'autoenc',
'visual',
'contr-only',
'xy-position',
'pixel-pred',
]
p = sns.color_palette('Set1', n_colors=2)
red, _ = p
p = sns.color_palette('Set1', n_colors=9, desat=0.5)
_, blue, green, purple, orange, yellow, brown, pink, gray = p
palette = [red, blue, brown, purple, orange, yellow, pink]
palette = dict(zip(colormap, palette))
palette['random'] = gray
data = data.append({'agent': 'random', 'reward': -84.8, 'seed': 0, 'episode': 0},
ignore_index=True)# yapf: disable
g = sns.relplot(
x=x,
y=y,
data=data,
hue=hue,
hue_order=algs,
style=style,
kind='line',
# legend='full',
legend=False,
dashes=dashes,
height=height,
aspect=1.2,
col=col,
col_wrap=col_wrap,
# col_order=col_order,
palette=palette,
linewidth=2,
facet_kws={
'sharey': False,
'sharex': False
})
g.set_titles('{col_name}')
ax = g.axes.flat[0]
ax.set_ylim((-90, 0))
ax.set_xlim((0, 100))
ax.axhline(-84.8, dashes=dashes['random'], color=palette['random'], linewidth=2)
leg = ax.legend(labels, loc='upper center', ncol=4, bbox_to_anchor=(0.43, -0.17), fontsize=12, frameon=False)
leg.set_draggable(True)
for axis in ['bottom','left']:
ax.spines[axis].set_linewidth(2)
ax.tick_params(width=2)
ax.tick_params(labelsize=16)
ax.set_ylabel('Reward',fontsize=18)
ax.set_xlabel('Episode',fontsize=18)
plt.tight_layout()
plt.subplots_adjust(bottom=0.25)
plt.show()
plot(data, x='episode', y='reward', hue='agent', style='agent')
|
#!/usr/bin/python3
# coding=utf-8
# -------------------------------------------------------------------------------
# This file is part of Phobos, a Blender Add-On to edit robot models.
# Copyright (C) 2020 University of Bremen & DFKI GmbH Robotics Innovation Center
#
# You should have received a copy of the 3-Clause BSD License in the LICENSE file.
# If not, see <https://opensource.org/licenses/BSD-3-Clause>.
# -------------------------------------------------------------------------------
import bpy
from phobos.utils.selection import selectObjects as select
import phobos.utils.editing as eUtils
root = bpy.context.active_object
links = [link for link in bpy.context.selected_objects if link.phobostype == 'link']
visuals = [v for v in bpy.context.selected_objects if v.phobostype == 'visual' and v.parent]
# rename all bones so they are not all named 'Bone' in the joint armature
for link in links:
select([link], clear=True, active=0)
link.data.bones[0].name = link.name
bpy.ops.object.mode_set(mode='EDIT')
print(link.name, len(link.data.bones), len(link.data.edit_bones))
link.data.edit_bones[0].name = link.name
bpy.ops.object.mode_set(mode='OBJECT')
# save list of all parent joints for all visuals
vparents = {}
lparents = {}
for v in visuals:
vparents[v.name] = v.parent.name
for l in links:
try:
lparents[l.name] = l.parent.name
except AttributeError:
pass # root link
select(visuals, clear=True, active=0)
bpy.ops.object.parent_clear(type='CLEAR_KEEP_TRANSFORM')
select(links, clear=True, active=links.index(root))
bpy.context.scene.objects.active = root
bpy.ops.object.join()
bpy.ops.object.mode_set(mode='EDIT')
for key, value in lparents.items():
root.data.edit_bones[key].parent = root.data.edit_bones[value]
bpy.ops.object.mode_set(mode='OBJECT')
for v in visuals:
select([root], clear=True, active=0)
bpy.ops.object.join()
bpy.ops.object.mode_set(mode='EDIT')
root.data.edit_bones.active = root.data.edit_bones[vparents[v.name]]
bpy.ops.object.mode_set(mode='OBJECT')
eUtils.parentObjectsTo(v, root, 1)
|
#!/env/bin/env python3
# -*- coding: utf-8 -*-
import sys
def main():
print("hello world")
if __name__ == "__main__":
args = sys.argv
main()
|
import os
os_env_prefix = 'KAKATUA'
cmd_prefix = os.getenv(f'{os_env_prefix}_CMD_PREFIX')
bot_token = os.getenv(f'{os_env_prefix}_BOT_TOKEN')
|
import io
from setuptools import setup
VERSION = '0.2'
with io.open('README.md', encoding='utf-8') as f:
long_description = '\n' + f.read()
setup(
name='file-permissions',
version=VERSION,
description='A tiny wrapper to get information about file permissions',
long_description=long_description,
long_description_content_type='text/markdown',
author='Andrii Rusanov',
author_email='andrey@rusanov.me',
url='https://github.com/andreyrusanov/permissions',
py_modules=['permissions'],
include_package_data=True,
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# test.py
# Improved version of the aging grep test with blktrace, backups, and config files. Running without a config file will generate one with default values.
import os.path
import subprocess
import shlex
import time
import os
import sys
print("[HZY0] iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'")
subprocess.check_call("iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'", shell=True, stderr=subprocess.STDOUT)
# the profile namedtuple (similar to a C struct) contains all the info in a
# profile
from collections import namedtuple
profile = namedtuple("profile", ["name", "mntpnt", "partition"])
# FNULL is used to reroute output of certain subprocesses to /dev/null
FNULL = open(os.devnull, 'w')
# tcolors is used to changed the terminal color
class tcolors:
bold = '\033[1m'
pullnumber = '\033[94m'
initialization = '\033[92m'
firsttimesetup = '\033[36m'
end = '\033[0m'
################################################################################
# greptest
# runs a wall-timed test of how long it takes to grep a fixed random string
# recursively through the research directory. testno is used to distinguish the
# traces; profile distinguishes the profile (aged/clean/etc)
def greptest(testno, profile):
if clear_cache == True:
subprocess.check_call(shlex.split("bash remount.sh " + profile.name))
time.sleep(1)
print('blktrace -a read -d ' + profile.partition.rstrip(
'0123456789') + ' -o ' + test_name + '/' + profile.name + 'blktrace' + str(testno).zfill(4))
subprocess.Popen('blktrace -a read -d ' + profile.partition.rstrip(
'0123456789') + ' -o ' + test_name + '/' + profile.name + 'blktrace' + str(testno).zfill(4), shell=True,
stdout=FNULL, stderr=subprocess.STDOUT)
time.sleep(2)
print('grep -r --binary-files=text "' + grep_random + '" ' + profile.mntpnt)
start = time.time()
subprocess.call(shlex.split('grep -r --binary-files=text "' + grep_random + '" ' + profile.mntpnt),
stderr=subprocess.STDOUT)
stop = time.time()
time.sleep(2)
print('kill -15 ' + subprocess.check_output(["pidof", "-s", "blktrace"]).strip())
subprocess.call('kill -15 ' + subprocess.check_output(["pidof", "-s", "blktrace"]), shell=True, stdout=FNULL,
stderr=subprocess.STDOUT)
time.sleep(4)
return (stop - start)
################################################################################
# process_layout
# processes the given blktrace file and computes the layout score
def process_layout(filename):
print("processing " + filename + " to compute layout score")
print('blkparse -a issue -f "%S %n\\n" -i ' + filename)
while os.path.isfile("{}.blktrace.0".format(filename)) == False:
time.sleep(1)
time.sleep(1)
# traceoutputfile = open("{}".format(filename.replace("blktrace","")),"w")
blktrace_output = subprocess.check_output(shlex.split('blkparse -a issue -f "%S %n\n" -i ' + filename))
blktrace_lines = blktrace_output.split('\n')
blktrace_lines.pop() # removes the trailing empty string
# print(blktrace_lines[-1])
discont = -1 # number of discontiguous blocks
total_blocks = 0 # total number of blocks
last_sector = -1 # the last sector read
for line in blktrace_lines:
splitline = line.split()
if len(splitline) != 0 and splitline[
0].isdigit(): # this makes sure we're not at one of blkparse's trailing non-data lines
sector = int(splitline[0])
length = int(splitline[1])
# traceoutputfile.write("{} {}\n".format(sector, length))
if last_sector != sector:
discont = discont + 1
last_sector = sector + length
total_blocks = total_blocks + length / 8
# traceoutputfile.close()
if total_blocks != 0:
return float(1) - float(discont) / float(total_blocks)
else:
return float(-1)
################################################################################
# initialization procedure
print(tcolors.initialization + "initializing test")
# check if first_time_setup.sh has been run and run it if not
try:
subprocess.check_call('git config --global --list | grep "allowreachablesha1inwant"', shell=True, stdout=FNULL,
stderr=subprocess.STDOUT)
except Exception, e:
print(tcolors.firsttimesetup + "improper git configuration detected, running first time setup")
subprocess.check_call(shlex.split('bash first_time_setup.sh'))
print(tcolors.end)
# check if config.sh exists, and exit if not
if os.path.isfile('config.sh') == False:
print
"********************"
print
"config.sh doesn't exist"
print
"edit defconfig.sh and save as config.sh"
print
"exiting script"
print
"********************"
exit()
# load variables from config.sh using the printconfig.sh helper script
print("loading configuration parameters from config.sh")
config = subprocess.check_output(shlex.split("bash printconfig.sh"))
config = config.split('\n')
config.pop() # pop the trailing empty string
# set optional profiles to empty
clean = profile._make(["", "", ""])
cleaner = profile._make(["", "", ""])
for item in config:
tmp = item.split(" ", 1)
param = tmp[0]
value = tmp[1]
if param == 'test_name':
test_name = value
if param == 'total_pulls':
total_pulls = int(value)
if param == 'pulls_per_grep':
pulls_per_grep = int(value)
if param == 'grep_random':
grep_random = value
if param == 'gc_on':
if value == "True":
gc_on = True
else:
gc_on = False
if param == 'keep_traces':
if value == "True":
keep_traces = True
else:
keep_traces = False
if param == 'clear_cache':
if value == "True":
clear_cache = True
else:
clear_cache = False
if param == 'aged':
aged = profile._make(["aged", value.split()[0], value.split()[1]])
if param == 'clean':
clean = profile._make(["clean", value.split()[0], value.split()[1]])
if param == 'cleaner':
cleaner = profile._make(["cleaner", value.split()[0], value.split()[1]])
# set the git gc config option
if gc_on == True:
print("enabling git gc:")
print("git config --global --unset gc.auto")
subprocess.call(shlex.split("git config --global --unset gc.auto"))
else:
print("disabling git gc:")
print("git config --global gc.auto 0")
subprocess.check_call(shlex.split("git config --global gc.auto 0"))
# format the partitions
subprocess.check_call(shlex.split("bash format.sh aged"))
if (clean.name != ""):
subprocess.check_call(shlex.split("bash format.sh clean"))
if (cleaner.name != ""):
subprocess.check_call(shlex.split("bash format.sh cleaner"))
# trim the SSD
print("fstrim -v /home/iamzeyuanhu/hzy/")
subprocess.check_call(shlex.split("fstrim -v /home/iamzeyuanhu/hzy/"))
# create a dir to hold the results and traces
print("mkdir -p " + test_name)
subprocess.check_call(shlex.split("mkdir -p " + test_name))
# load the revision file, initialize the source location, and load the results file
rev = open('linuxrev.txt')
# point to the linux kernel repo that I cloned beforehand
source = os.path.abspath('/home/iamzeyuanhu/linux')
#source = os.path.abspath('linux')
resultfile = open(test_name + '/' + test_name + 'results.csv', 'w')
resultfile.write("pulls_performed filesystem_size aged_time aged_layout_score")
if clean.mntpnt != '':
resultfile.write(" clean_time clean_layout_score")
if cleaner.mntpnt != '':
resultfile.write(" cleaner_time cleaner_layout_score")
resultfile.write("\n")
# initialize the target repo on the aged drive
print("initializing repo on target drive")
print("mkdir -p " + "linux")
subprocess.check_call(shlex.split("mkdir -p " + aged.mntpnt + "/linux"))
# print("cp -r " + "$HOME/linux " + aged.mntpnt)
# subprocess.check_call(shlex.split("cp -r " + "/home/iamzeyuanhu/linux " + aged.mntpnt))
print("git init")
subprocess.check_call(shlex.split("git init"), cwd=aged.mntpnt + "/linux", stdout=FNULL, stderr=subprocess.STDOUT)
# make sure blktrace isn't running
try:
subprocess.call('kill -15 ' + subprocess.check_output(["pidof", "-s", "blktrace"]), shell=True)
except Exception, e:
pass
print('initialization complete' + tcolors.end)
print("[HZY1] iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'")
subprocess.check_call("iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'", shell=True, stderr=subprocess.STDOUT)
################################################################################
# main loop
for i in range(0, total_pulls + 1):
# checkout procedure
currhash = rev.readline()
#checkout_command = 'git pull --no-edit -q -s recursive -X theirs ' + source + ' ' + currhash.strip()
checkout_command = 'git pull --no-edit -s recursive -X theirs ' + source + ' ' + currhash.strip()
print(tcolors.pullnumber + str(i).zfill(6) + tcolors.end + ' ' + checkout_command)
subprocess.check_call(shlex.split(checkout_command), cwd=aged.mntpnt + '/linux', stdout=FNULL,
stderr=subprocess.STDOUT)
print("sync")
subprocess.call("sync", shell=True, stderr=subprocess.STDOUT)
# grep test
if i % pulls_per_grep == 0:
resultfile.write(str(i) + " ")
print(tcolors.bold + '\nrunning aged grep test: ' + str(i / pulls_per_grep) + '\n' + tcolors.end)
fssize = subprocess.check_output(shlex.split("du -s"), cwd=aged.mntpnt).split()[0]
resultfile.write(str(fssize) + " ")
agedresult = False
# while agedresult == False:
agedgrep = greptest(i / pulls_per_grep, aged)
try:
aged_layout_score = process_layout(test_name + "/agedblktrace" + str(i / pulls_per_grep).zfill(4))
# agedresult = True
except Exception, e:
aged_layout_score = 1
resultfile.write(str(agedgrep) + " " + str(aged_layout_score) + " ")
# clean grep test
if clean.name != "":
print(tcolors.bold + '\nrunning clean grep test: ' + str(i / pulls_per_grep) + '\n' + tcolors.end)
subprocess.check_call(shlex.split("bash format.sh clean"))
print("cp -a " + aged.mntpnt + "/linux " + clean.mntpnt)
subprocess.check_output(shlex.split("cp -a " + aged.mntpnt + "/linux " + clean.mntpnt))
cleanresult = False
# while cleanresult == False:
cleangrep = greptest(i / pulls_per_grep, clean)
try:
clean_layout_score = process_layout(test_name + "/cleanblktrace" + str(i / pulls_per_grep).zfill(4))
except Exception, e:
clean_layout_score = 1
resultfile.write(str(cleangrep) + " " + str(clean_layout_score) + " ")
# cleaner grep test
if cleaner.name != "":
print(tcolors.bold + '\nrunning cleaner grep test: ' + str(i / pulls_per_grep) + '\n' + tcolors.end)
subprocess.check_call(shlex.split("bash unmount.sh clean"))
subprocess.check_call(shlex.split("bash unmount.sh cleaner"))
print("dd if=" + clean.partition + " of=" + cleaner.partition + " bs=4M")
subprocess.check_call(shlex.split("dd if=" + clean.partition + " of=" + cleaner.partition + " bs=4M"),
stdout=FNULL, stderr=subprocess.STDOUT)
subprocess.check_call(shlex.split("bash mount.sh clean"))
subprocess.check_call(shlex.split("bash mount.sh cleaner"))
cleanergrep = greptest(i / pulls_per_grep, cleaner)
cleaner_layout_score = process_layout(test_name + "/cleanerblktrace" + str(i / pulls_per_grep).zfill(4))
resultfile.write(str(cleanergrep) + " " + str(cleaner_layout_score) + " ")
print(tcolors.bold + '\nresults of grep test ' + str(i / pulls_per_grep) + ':')
print('grep test completed in ' + str(agedgrep) + ' seconds')
if clean.name != "":
print('clean test completed in ' + str(cleangrep) + ' seconds')
if cleaner.name != "":
print('cleaner test completed in ' + str(cleanergrep) + ' seconds')
print('aged layout score: ' + str(aged_layout_score))
if clean.name != "":
print('clean layout score: ' + str(clean_layout_score))
if cleaner.name != "":
print('cleaner layout score: ' + str(cleaner_layout_score))
print(tcolors.end)
if keep_traces == False:
print("deleting traces")
print("rm " + test_name + "/*blktrace*")
subprocess.call("rm " + test_name + "/*blktrace*", shell=True)
resultfile.write("\n")
resultfile.flush()
# end of main loop
try:
subprocess.call(['kill', '-15', subprocess.check_output(["pidof", "-s", "blktrace"])])
except Exception, e:
pass
print("[HZY2] iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'")
subprocess.check_call("iostat -d /dev/sdb1 | awk '$1 ~ /sdb1/ {print $6}'", shell=True, stderr=subprocess.STDOUT)
|
from flask import Flask
from flask_restful import Api
from flask_jwt import JWT
from security import authenticate, identity
from resources.user import UserRegister
from resources.items import Item, ItemList
from resources.store import Store, StoreList
from db import db
# HTTP status codes
# 200 - Success
# 201 - Created
# 202 - Accepted
# 400 - Bad request
# 401 - Not authorised
# 404 - Not found
# 500 - Internal server error
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db'
# to disable Flask SQLALCHEMY track modification access
app.config['SQLALCHEMY_TRACK_MODIFICATIONS']=False
app.secret_key = 'loki'
api = Api(app)
jwt = JWT(app, authenticate, identity) # /auth
@app.before_first_request
def create_tables():
db.create_all()
api.add_resource(Item, "/item/<string:name>")
api.add_resource(ItemList, "/items")
api.add_resource(UserRegister, "/register")
api.add_resource(Store, "/store/<string:name>")
api.add_resource(StoreList, "/stores")
# To avoid executing the below line during imports
if __name__ == '__main__':
db.init_app(app)
app.run(port=4000, debug=True)
|
from sympy import symbols, sin, cos
from shenfun import *
# Use sympy to compute manufactured solution
x, y, z = symbols("x,y,z")
ue = (cos(4*x) + sin(2*y) + sin(4*z))*(1-x**2)
fe = ue.diff(x, 2) + ue.diff(y, 2) + ue.diff(z, 2)
C0 = FunctionSpace(32, 'Chebyshev', bc=(0, 0))
F1 = FunctionSpace(32, 'Fourier', dtype='D')
F2 = FunctionSpace(32, 'Fourier', dtype='d')
T = TensorProductSpace(comm, (C0, F1, F2))
u = TrialFunction(T)
v = TestFunction(T)
# Assemble left and right hand
f_hat = inner(v, Array(T, buffer=fe))
A = inner(v, div(grad(u)))
# Solve
solver = chebyshev.la.Helmholtz(*A)
u_hat = Function(T)
u_hat = solver(f_hat, u_hat)
uj = u_hat.backward()
assert np.linalg.norm(u_hat.backward()-Array(T, buffer=ue)) < 1e-12
print(u_hat.shape)
|
import numpy as np
import matplotlib.pyplot as plt
import math
N = 129
n_iter = 64
image = np.zeros([n_iter, N], dtype='int')
print(image.shape)
image[0, math.ceil(N/2)] = 1
for iterate in range(1, n_iter):
for j in range(1, N-1):
if image[iterate-1, j+1]+image[iterate-1, j-1] == 1:
image[iterate, j] = 1
image[iterate, 0] = image[iterate, N-2]
image[iterate, N-1] = image[iterate, 1]
plt.imshow(image, interpolation="nearest")
plt.show()
|
import pytest
from debutizer.commands.config_file import (
CredentialsYAMLError,
DebutizerYAMLError,
S3UploadTargetConfiguration,
)
def test_s3_configuration_validity():
config = S3UploadTargetConfiguration(
endpoint="my_endpoint",
bucket="my_bucket",
access_key="my_access_key",
secret_key="my_secret_key",
sign=True,
gpg_key_id="my_gpg_key_id",
gpg_signing_key="my_gpg_signing_key",
)
config.check_validity()
config.access_key = None
with pytest.raises(CredentialsYAMLError):
config.check_validity()
config.access_key = "my_access_key"
config.gpg_key_id = None
config.gpg_signing_key = None
with pytest.raises(DebutizerYAMLError):
config.check_validity()
|
# chat/routing.py
from django.urls import re_path
from . import consumers
websocket_urlpatterns = [
re_path(r'game', consumers.GameConsumer.as_asgi()),
re_path(r'chat', consumers.ChatConsumer.as_asgi())
]
|
""""
Copyright © twilsonco 2020
Description:
This is a discord bot to manage torrent transfers through the Transmission transmissionrpc python library.
Version: 1.2
"""
import discord
import asyncio
import aiohttp
import json
from json import dumps, load
import subprocess
from discord.ext.commands import Bot
from discord.ext import commands
from platform import python_version
import os
import sys
from os.path import expanduser, join, exists, isdir, isfile
import shutil
import re
import datetime
import pytz
import platform
import secrets
import transmissionrpc
import logging
from logging import handlers
import base64
import random
from enum import Enum
# BEGIN USER CONFIGURATION
CONFIG_DIR = os.path.dirname(os.path.realpath(__file__))
"""
Bot configuration is done with a config.json file.
"""
CONFIG = None
TSCLIENT_CONFIG = None
# logging.basicConfig(format='%(asctime)s %(message)s',filename=join(expanduser("~"),'ts_scripts.log'))
logName = join(CONFIG_DIR,'transmissionbot.log')
logging.basicConfig(format='%(asctime)s %(message)s',filename=join(CONFIG_DIR,'transmissionbot.log'))
logger = logging.getLogger('transmission_bot')
logger.setLevel(logging.DEBUG) # set according to table below. Events with values LESS than the set value will not be logged
"""
Level Numeric value
__________________________
CRITICAL 50
ERROR 40
WARNING 30
INFO 20
DEBUG 10
NOTSET 0
"""
fh = logging.handlers.RotatingFileHandler(logName, backupCount=5)
if os.path.isfile(logName): # log already exists, roll over!
fh.doRollover()
fmt = logging.Formatter('%(asctime)s [%(threadName)14s:%(filename)8s:%(lineno)5s - %(funcName)20s()] %(levelname)8s: %(message)s')
fh.setFormatter(fmt)
logger.addHandler(fh)
# END USER CONFIGURATION
# for storing config and transfer list
CONFIG_JSON = join(CONFIG_DIR, "config.json")
LOCK_FILE = join(CONFIG_DIR, "lock")
DEFAULT_REASON="TransmissionBot"
def lock(lockfile=LOCK_FILE):
""" Wait for LOCK_FILE to not exist, then create it to lock """
from time import sleep
from random import random
from pathlib import Path
lock_file = Path(lockfile)
logger.debug("Creating lock file '{}'".format(lockfile))
while lock_file.is_file():
logger.debug("Config file locked, waiting...")
sleep(0.5)
logger.debug("Lock file created '{}'".format(lockfile))
lock_file.touch()
def unlock(lockfile=LOCK_FILE):
""" Delete LOCK_FILE """
from pathlib import Path
lock_file = Path(lockfile)
logger.debug("Removing lock file '{}'".format(lockfile))
if lock_file.is_file():
lock_file.unlink()
logger.debug("Lock file removed '{}'".format(lockfile))
else:
logger.debug("Lock file didn't exist '{}'".format(lockfile))
def mkdir_p(path):
"""mimics the standard mkdir -p functionality when creating directories
:param path:
:return:
"""
try:
makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and isdir(path):
pass
else:
raise
def generate_json(json_data=None, path=None, overwrite=False):
"""Generate a new config file based on the value of the CONFIG global variable.
This function will cause a fatal error if trying to overwrite an exiting file
without setting overwrite to True.
:param overwrite: Overwrite existing config file
:type overwrite: bool
:return: Create status
:rtype: bool
"""
if not path or not json_data:
return False
if exists(path) and not overwrite:
logger.fatal("JSON file exists already! (Set overwite option to overwrite)")
return False
if not exists(os.path.dirname(path)):
mkdir_p(os.path.dirname(path))
try:
lock()
if exists(path):
# first backup the existing file
shutil.copy2(path,"{}.bak".format(path))
try:
with open(path, 'w') as cf:
cf.write(dumps(json_data, sort_keys=True, indent=4, separators=(',', ': ')))
except Exception as e:
logger.error("Exception when writing JSON file {}, reverting to backup: {}".format(path,e))
shutil.move("{}.bak".format(path), path)
else:
with open(path, 'w') as cf:
cf.write(dumps(json_data, sort_keys=True, indent=4, separators=(',', ': ')))
except Exception as e:
logger.fatal("Exception when writing JSON file: {}".format(e))
finally:
unlock()
return True
def load_json(path=None):
"""Load a config file from disk using the default location if it exists. If path is defined
it will be used instead of the default path.
:param path: Optional path to config file
:type path: str
:return: Load status
:rtype: bool
"""
if not path:
return False
if exists(path):
jsonContents = load(open(path))
logger.debug("Loaded JSON file: {}".format(path))
return jsonContents
return False
CONFIG = load_json(CONFIG_JSON) if exists(CONFIG_JSON) else None # will be read from CONFIG_JSON
class OutputMode(Enum):
AUTO = 1
DESKTOP = 2
MOBILE = 3
OUTPUT_MODE = OutputMode.AUTO
REPEAT_MSG_IS_PINNED = False
REPEAT_MSGS = {}
# REPEAT_MSGS[msg_key] = {
# 'msgs':msg_list,
# 'command':command,
# 'context':context,
# 'content':content,
# 'pin_to_bottom':False,
# 'reprint': False,
# 'freq':CONFIG['repeat_freq'],
# 'timeout':CONFIG['repeat_timeout'],
# 'timeout_verbose':REPEAT_TIMEOUT,
# 'cancel_verbose':CONFIG['repeat_cancel_verbose'],
# 'start_time':datetime.datetime.now(),
# 'do_repeat':True
# }
TORRENT_JSON = join(CONFIG_DIR, "transfers.json")
# list of transfer information to be stored in a separate file, used for
# checking for transfer state stanges for the notification system
# here's the structure, a dict with a dict for each transfer with select information.
# this will be a local var, since it's only needed in the function that checks for changes.
# TORRENT_LIST = {
# 'hashString':{
# 'name':t.name,
# 'error':t.error,
# 'errorString':t.errorString,
# 'status':t.status,
# 'isStalled':t.isStalled,
# 'progress':t.progress
# }
# }
TORRENT_ADDED_USERS = {}
TORRENT_NOTIFIED_USERS = {}
TORRENT_OPTOUT_USERS = {}
async def determine_prefix(bot, message):
return CONFIG['bot_prefix']
client = Bot(command_prefix=determine_prefix)
TSCLIENT = None
MAKE_CLIENT_FAILED = False
# Begin transmissionrpc functions, lovingly taken from https://github.com/leighmacdonald/transmission_scripts
filter_names = ( # these are the filters accepted by transmissionrpc
"all",
"active",
"downloading",
"seeding",
"stopped",
"finished"
)
filter_names_extra = ( # these are extra filters I've added
"stalled",
"private",
"public",
"error",
'err_none',
'err_tracker_warn',
'err_tracker_error',
'err_local',
'verifying',
'queued',
"running" # running means a non-zero transfer rate, not to be confused with "active"
)
filter_names_full = filter_names + filter_names_extra
sort_names = (
"id",
"progress",
"name",
"size",
"ratio",
"speed",
"speed_up",
"speed_down",
"status",
"queue",
"age",
"activity"
)
class TSClient(transmissionrpc.Client):
""" Basic subclass of the standard transmissionrpc client which provides some simple
helper functionality.
"""
def get_torrents_by(self, sort_by=None, filter_by=None, reverse=False, filter_regex=None, tracker_regex=None, id_list=None, num_results=None):
"""This method will call get_torrents and then perform any sorting or filtering
actions requested on the returned torrent set.
:param sort_by: Sort key which must exist in `Sort.names` to be valid;
:type sort_by: str
:param filter_by:
:type filter_by: str
:param reverse:
:return: Sorted and filter torrent list
:rtype: transmissionrpc.Torrent[]
"""
if id_list:
torrents = self.get_torrents(ids=id_list)
else:
torrents = self.get_torrents()
if filter_regex:
regex = re.compile(filter_regex, re.IGNORECASE)
torrents = [tor for tor in torrents if regex.search(tor.name)]
if tracker_regex:
regex = re.compile(tracker_regex, re.IGNORECASE)
torrents = [tor for tor in torrents if regex.search(str([t['announce'] for t in tor.trackers]))]
if filter_by:
for f in filter_by.split():
if f == "active":
torrents = [t for t in torrents if not t.isStalled and t.rateDownload + t.rateUpload == 0]
elif f in filter_names:
torrents = filter_torrents_by(torrents, key=getattr(Filter, filter_by))
elif f == "verifying":
torrents = [t for t in torrents if "check" in t.status]
elif f == "queued":
torrents = [t for t in torrents if "load pending" in t.status]
elif f == "stalled":
torrents = [t for t in torrents if t.isStalled]
elif f == "private":
torrents = [t for t in torrents if t.isPrivate]
elif f == "public":
torrents = [t for t in torrents if not t.isPrivate]
elif f == "error":
torrents = [t for t in torrents if t.error != 0]
elif f == "err_none":
torrents = [t for t in torrents if t.error == 0]
elif f == "err_tracker_warn":
torrents = [t for t in torrents if t.error == 1]
elif f == "err_tracker_error":
torrents = [t for t in torrents if t.error == 2]
elif f == "err_local":
torrents = [t for t in torrents if t.error == 3]
elif f == "running":
torrents = [t for t in torrents if t.rateDownload + t.rateUpload > 0]
else:
continue
if sort_by is None:
if "downloading" in filter_by or "seeding" in filter_by or "running" in filter_by:
sort_by = "speed"
elif "stopped" in filter_by or "finished" in filter_by:
sort_by = "ratio"
if sort_by:
torrents = sort_torrents_by(torrents, key=getattr(Sort, sort_by), reverse=reverse)
if num_results and num_results < len(torrents):
torrents = torrents[-num_results:]
return torrents
def make_client():
""" Create a new transmission RPC client
If you want to parse more than the standard CLI arguments, like when creating a new customized
script, you can append your options to the argument parser.
:param args: Optional CLI args passed in.
:return:
"""
logger.debug("Making new TSClient")
global MAKE_CLIENT_FAILED
tsclient = None
try:
lock()
tsclient = TSClient(
TSCLIENT_CONFIG['host'],
port=TSCLIENT_CONFIG['port'],
user=TSCLIENT_CONFIG['user'],
password=TSCLIENT_CONFIG['password']
)
MAKE_CLIENT_FAILED = False
logger.debug("Made new TSClient")
except Exception as e:
logger.error("Failed to make TS client: {}".format(e))
MAKE_CLIENT_FAILED = True
finally:
unlock()
return tsclient
def reload_client():
global TSCLIENT
TSCLIENT = make_client()
class Filter(object):
"""A set of filtering operations that can be used against a list of torrent objects"""
# names = (
# "all",
# "active",
# "downloading",
# "seeding",
# "stopped",
# "finished"
# )
names = filter_names
@staticmethod
def all(t):
return t
@staticmethod
def active(t):
return t.rateUpload > 0 or t.rateDownload > 0
@staticmethod
def downloading(t):
return t.status == 'downloading'
@staticmethod
def seeding(t):
return t.status == 'seeding'
@staticmethod
def stopped(t):
return t.status == 'stopped'
@staticmethod
def finished(t):
return t.status == 'finished'
@staticmethod
def lifetime(t):
return t.date_added
def filter_torrents_by(torrents, key=Filter.all):
"""
:param key:
:param torrents:
:return: []transmissionrpc.Torrent
"""
filtered_torrents = []
for torrent in torrents:
if key(torrent):
filtered_torrents.append(torrent)
return filtered_torrents
class Sort(object):
""" Defines methods for sorting torrent sequences """
# names = (
# "id",
# "progress",
# "name",
# "size",
# "ratio",
# "speed",
# "speed_up",
# "speed_down",
# "status",
# "queue",
# "age",
# "activity"
# )
names = sort_names
@staticmethod
def activity(t):
return t.date_active
@staticmethod
def age(t):
return t.date_added
@staticmethod
def queue(t):
return t.queue_position
@staticmethod
def status(t):
return t.status
@staticmethod
def progress(t):
return t.progress
@staticmethod
def name(t):
return t.name.lower()
@staticmethod
def size(t):
return -t.totalSize
@staticmethod
def id(t):
return t.id
@staticmethod
def ratio(t):
return t.ratio
@staticmethod
def speed(t):
return t.rateUpload + t.rateDownload
@staticmethod
def speed_up(t):
return t.rateUpload
@staticmethod
def speed_down(t):
return t.rateDownload
def sort_torrents_by(torrents, key=Sort.name, reverse=False):
return sorted(torrents, key=key, reverse=reverse)
# def print_torrent_line(torrent, colourize=True):
# name = torrent.name
# progress = torrent.progress / 100.0
# print("[{}] [{}] {} {}[{}/{}]{} ra: {} up: {} dn: {} [{}]".format(
# white_on_blk(torrent.id),
# find_tracker(torrent),
# print_pct(torrent) if colourize else name.decode("latin-1"),
# white_on_blk(""),
# red_on_blk("{:.0%}".format(progress)) if progress < 1 else green_on_blk("{:.0%}".format(progress)),
# magenta_on_blk(natural_size(torrent.totalSize)),
# white_on_blk(""),
# red_on_blk(torrent.ratio) if torrent.ratio < 1.0 else green_on_blk(torrent.ratio),
# green_on_blk(natural_size(float(torrent.rateUpload)) + "/s") if torrent.rateUpload else "0.0 kB/s",
# green_on_blk(natural_size(float(torrent.rateDownload)) + "/s") if torrent.rateDownload else "0.0 kB/s",
# yellow_on_blk(torrent.status)
# ))
def remove_torrent(torrent, reason=DEFAULT_REASON, delete_files=False):
""" Remove a torrent from the client stopping it first if its in a started state.
:param client: Transmission RPC Client
:type client: transmissionrpc.Client
:param torrent: Torrent instance to remove
:type torrent: transmissionrpc.Torrent
:param reason: Reason for removal
:type reason: str
:param dry_run: Do a dry run without actually running any commands
:type dry_run: bool
:return:
"""
if torrent.status != "stopped":
if not CONFIG['dryrun']:
TSCLIENT.stop_torrent(torrent.hashString)
if not CONFIG['dryrun']:
TSCLIENT.remove_torrent(torrent.hashString, delete_data=delete_files)
logger.info("Removed: {} {}\n\tReason: {}\n\tDry run: {}, Delete files: {}".format(torrent.name, torrent.hashString, reason, CONFIG['dryrun'],delete_files))
def remove_torrents(torrents, reason=DEFAULT_REASON, delete_files=False):
""" Remove a torrent from the client stopping it first if its in a started state.
:param client: Transmission RPC Client
:type client: transmissionrpc.Client
:param torrent: Torrent instance to remove
:type torrent: transmissionrpc.Torrent
:param reason: Reason for removal
:type reason: str
:param dry_run: Do a dry run without actually running any commands
:type dry_run: bool
:return:
"""
for torrent in torrents:
remove_torrent(torrent, reason=reason, delete_files=delete_files)
def stop_torrents(torrents=[], reason=DEFAULT_REASON):
""" Stop (pause) a list of torrents from the client.
:param client: Transmission RPC Client
:type client: transmissionrpc.Client
:param torrent: Torrent instance to remove
:type torrent: transmissionrpc.Torrent
:param reason: Reason for removal
:type reason: str
:param dry_run: Do a dry run without actually running any commands
:type dry_run: bool
:return:
"""
for torrent in (torrents if len(torrents) > 0 else TSCLIENT.get_torrents()):
if torrent.status not in ["stopped","finished"]:
if not CONFIG['dryrun']:
TSCLIENT.stop_torrent(torrent.hashString)
logger.info("Paused: {} {}\n\tReason: {}\n\tDry run: {}".format(torrent.name, torrent.hashString, reason, CONFIG['dryrun']))
def resume_torrents(torrents=[], reason=DEFAULT_REASON, start_all=False):
""" Stop (pause) a list of torrents from the client.
:param client: Transmission RPC Client
:type client: transmissionrpc.Client
:param torrent: Torrent instance to remove
:type torrent: transmissionrpc.Torrent
:param reason: Reason for removal
:type reason: str
:param dry_run: Do a dry run without actually running any commands
:type dry_run: bool
:return:
"""
if start_all:
if not CONFIG['dryrun']:
TSCLIENT.start_all()
logger.info("Resumed: all transfers\n\tReason: {}\n\tDry run: {}".format(reason, CONFIG['dryrun']))
else:
for torrent in (torrents if len(torrents) > 0 else TSCLIENT.get_torrents()):
if torrent.status == "stopped":
if not CONFIG['dryrun']:
TSCLIENT.start_torrent(torrent.hashString)
logger.info("Resumed: {} {}\n\tReason: {}\n\tDry run: {}".format(torrent.name, torrent.hashString, reason, CONFIG['dryrun']))
def verify_torrents(torrents=[]):
""" Verify a list of torrents from the client.
:param client: Transmission RPC Client
:type client: transmissionrpc.Client
:param torrent: Torrent instance to remove
:type torrent: transmissionrpc.Torrent
:type reason: str
:param dry_run: Do a dry run without actually running any commands
:type dry_run: bool
:return:
"""
for torrent in (torrents if len(torrents) > 0 else TSCLIENT.get_torrents()):
if not CONFIG['dryrun']:
TSCLIENT.verify_torrent(torrent.hashString)
logger.info("Verified: {} {}\n\tDry run: {}".format(torrent.name, torrent.hashString, CONFIG['dryrun']))
def add_torrent(torStr):
torrent = None
if not CONFIG['dryrun']:
if torStr != "":
torrent = TSCLIENT.add_torrent(torStr)
logger.info("Added: {} {}\n\tDry run: {}".format(torrent.name, torrent.hashString, CONFIG['dryrun']))
else:
logger.info("Added: {} \n\tDry run: {}".format(torStr if len(torStr) < 300 else torStr[:200], CONFIG['dryrun']))
return torrent
# Begin discord bot functions, adapted from https://github.com/kkrypt0nn/Python-Discord-Bot-Template
# async def status_task():
# while True:
# await client.change_presence(activity=discord.Game("{}help".format(CONFIG['bot_prefix'])))
# await asyncio.sleep(86400)
# check current transfers against those in TORRENT_JSON and print notifications to channel for certain changes
def check_for_transfer_changes():
global TORRENT_NOTIFIED_USERS, TORRENT_ADDED_USERS, TORRENT_OPTOUT_USERS
# get current transfer information
reload_client()
torrents = TSCLIENT.get_torrents()
# TORRENT_LIST = {
# 'hashString':{
# 'name':t.name,
# 'error':t.error,
# 'errorString':t.errorString,
# 'status':t.status,
# 'isStalled':t.isStalled,
# 'progress':t.progress
# }
# }
try:
lock()
curTorrents = {t.hashString:{
'name':t.name,
'error':t.error,
'errorString':t.errorString,
'status':t.status,
'isStalled':t.isStalled,
'progress':t.progress,
'added_user':None if t.hashString not in TORRENT_ADDED_USERS else TORRENT_ADDED_USERS[t.hashString],
'notified_users':[] if t.hashString not in TORRENT_NOTIFIED_USERS else TORRENT_NOTIFIED_USERS[t.hashString],
'optout_users':[] if t.hashString not in TORRENT_OPTOUT_USERS else TORRENT_OPTOUT_USERS[t.hashString]
} for t in torrents}
finally:
unlock()
if exists(TORRENT_JSON):
oldTorrents = load_json(path=TORRENT_JSON)
if len(curTorrents) > 0 and len(oldTorrents) > 0 and len(next(iter(curTorrents.values()))) != len(next(iter(oldTorrents.values()))):
logger.info("old transfer json {} is using an old format, replacing with current transfers and not checking for changes.".format(TORRENT_JSON))
generate_json(json_data=curTorrents, path=TORRENT_JSON, overwrite=True)
return None
# get added_user and notified_users from oldTorrents and copy to newTorrents
for h,t in oldTorrents.items():
if h in curTorrents:
if t['added_user']:
# this would overwrite a torrent that somehow had two added_users, but that should never happen
curTorrents[h]['added_user'] = t['added_user']
if len(t['notified_users']) > 0:
curTorrents[h]['notified_users'] += [u for u in t['notified_users'] if u not in curTorrents[h]['notified_users']]
if len(t['optout_users']) > 0:
curTorrents[h]['optout_users'] += [u for u in t['optout_users'] if u not in curTorrents[h]['optout_users'] and (h not in TORRENT_NOTIFIED_USERS or u not in TORRENT_NOTIFIED_USERS[h])]
# logger.debug("'optout_users' for {} ({}): {}".format(t['name'], h, str(t['optout_users'])))
# for u in t['optout_users']:
# if h in TORRENT_NOTIFIED_USERS and u in TORRENT_NOTIFIED_USERS[h]:
# user = client.get_user(u)
# logger.debug("Removing {} ({}) from 'optout_users' for {} ({})".format(user.name, u, t['name'], h))
# curTorrents[h]['optout_users'].remove(u)
# logger.debug("new 'optout_users' for {} ({}): {}".format(t['name'], h, str(curTorrents[h]['optout_users'])))
try:
lock()
TORRENT_NOTIFIED_USERS = {}
TORRENT_ADDED_USERS = {}
TORRENT_OPTOUT_USERS = {}
finally:
unlock()
generate_json(json_data=curTorrents, path=TORRENT_JSON, overwrite=True)
else:
try:
lock()
TORRENT_NOTIFIED_USERS = {}
TORRENT_ADDED_USERS = {}
TORRENT_OPTOUT_USERS = {}
finally:
unlock()
generate_json(json_data=curTorrents, path=TORRENT_JSON, overwrite=True)
return None
# print("before checking")
# get lists of different transfer changes
removedTransfers = {h:t for h,t in oldTorrents.items() if h not in curTorrents}
errorTransfers = {h:t for h,t in curTorrents.items() if t['error'] != 0 and ((h in oldTorrents and oldTorrents[h]['error'] == 0) or h not in oldTorrents)}
downloadedTransfers = {h:t for h,t in curTorrents.items() if t['progress'] == 100.0 and ((h in oldTorrents and oldTorrents[h]['progress'] < 100.0) or h not in oldTorrents)}
stalledTransfers = {h:t for h,t in curTorrents.items() if t['isStalled'] and ((h in oldTorrents and not oldTorrents[h]['isStalled']) or h not in oldTorrents)}
unstalledTransfers = {h:t for h,t in curTorrents.items() if not t['isStalled'] and h in oldTorrents and oldTorrents[h]['isStalled']}
finishedTransfers = {h:t for h,t in curTorrents.items() if t['status'] == 'finished' and ((h in oldTorrents and oldTorrents[h]['status'] != 'finished') or h not in oldTorrents)}
stoppedTransfers = {h:t for h,t in curTorrents.items() if t['status'] == 'stopped' and ((h in oldTorrents and oldTorrents[h]['status'] != 'stopped') or h not in oldTorrents)}
startedTransfers = {h:t for h,t in curTorrents.items() if t['status'] in ['downloading','seeding'] and h in oldTorrents and oldTorrents[h]['status'] not in ['downloading','seeding']}
# only report transfers as "new" if they haven't already been put in one of the dicts above
checkTransfers = {**errorTransfers, **downloadedTransfers, **stalledTransfers, **unstalledTransfers, **finishedTransfers, **stoppedTransfers, **startedTransfers, **oldTorrents}
newTransfers = {h:t for h,t in curTorrents.items() if h not in checkTransfers}
# print("done checking for changes")
# DEBUG grab a few random transfers for each type, vary the number to see if multiple embeds works
# print(str(oldTorrents))
# numTransfers = 3
# removedTransfers = {h:t for h,t in random.sample(oldTorrents.items(),numTransfers)}
# errorTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# downloadedTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# stalledTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# unstalledTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# finishedTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# newTransfers = {h:t for h,t in random.sample(curTorrents.items(),numTransfers)}
# print(str(errorTransfers))
# print("done applying debug changes")
return {
'new':{'name':"🟢 {0} new transfer{1}", 'data':newTransfers},
'removed':{'name':"❌ {0} removed transfer{1}", 'data':removedTransfers},
'error':{'name':"‼️ {0} transfer{1} with error{1}", 'data':errorTransfers},
'downloaded':{'name':"⬇️ {0} transfer{1} downloaded", 'data':downloadedTransfers},
'stalled':{'name':"🐢 {0} transfer{1} stalled", 'data':stalledTransfers},
'unstalled':{'name':"🐇 {0} stalled transfer{1} active", 'data':unstalledTransfers},
'finished':{'name':"🏁 {0} transfer{1} finished", 'data':finishedTransfers},
'stopped':{'name':"⏹ {0} transfer{1} paused", 'data':stoppedTransfers},
'started':{'name':"▶️ {0} transfer{1} resumed", 'data':startedTransfers}
}
def prepare_notifications(changedTransfers, states=["removed", "error", "downloaded", "stalled", "unstalled", "finished", "stopped", "started"]):
nTotal = sum([len(d['data']) for s,d in changedTransfers.items() if s in states]) if changedTransfers is not None else 0
torrents = {}
if nTotal > 0:
embeds = [discord.Embed(title="")]
ts = datetime.datetime.now(tz=pytz.timezone('America/Denver'))
embeds[-1].timestamp = ts
for s,d in changedTransfers.items():
if s in states:
n = len(d['data'])
if n > 0:
for h,t in d['data'].items():
torrents[h] = t
nameStr = d['name'].format(n, '' if n == 1 else 's')
vals = ["{}{}".format("{}.".format(i+1) if n > 1 else '', t['name'], "\n (error: *{}*)".format(t['errorString']) if t['errorString'] != "" else "") for i,t in enumerate(d['data'].values())]
valStr = ',\n'.join(vals)
if len(embeds[-1]) + len(nameStr) + len(valStr) >= 6000:
embeds.append(discord.Embed(title=""))
embeds[-1].timestamp = ts
if len(nameStr) + len(valStr) > 1000:
valStr = ""
for i,v in enumerate(vals):
if len(embeds[-1]) + len(nameStr) + len(valStr) + len(v) >= 6000:
embeds.append(discord.Embed(title=""))
embeds[-1].timestamp = ts
if len(nameStr) + len(valStr) + len(v) > 1000:
embeds[-1].add_field(name=nameStr, value=valStr, inline=False)
nameStr = ""
valStr = ""
else:
valStr += v
if i < len(vals) - 1:
valStr += ",\n"
pass
embeds[-1].add_field(name=nameStr, value=valStr, inline=False)
return embeds, nTotal, torrents
return None, nTotal, torrents
async def check_notification_reactions(message, is_text_channel, torrents, starttime=datetime.datetime.now()):
if (datetime.datetime.now() - starttime).total_seconds() >= CONFIG['reaction_wait_timeout']:
if is_text_channel:
await message.clear_reactions()
return
def check(reaction, user):
return user.id in CONFIG['whitelist_user_ids'] and reaction.message.id == message.id and (str(reaction.emoji) == '🔕' or (str(reaction.emoji) == '🔔' and is_text_channel))
try:
reaction, user = await client.wait_for('reaction_add', timeout=CONFIG['reaction_wait_timeout'], check=check)
except asyncio.TimeoutError:
return await check_notification_reactions(message, is_text_channel, torrents, starttime=starttime)
else:
if str(reaction.emoji) == '🔔':
if len(torrents) > 0:
for h,t in torrents.items():
if h in TORRENT_NOTIFIED_USERS:
TORRENT_NOTIFIED_USERS[h].append(user.id)
else:
TORRENT_NOTIFIED_USERS[h] = [user.id]
embed = discord.Embed(title="🔔 Notifications enabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t['name'] for t in torrents.values()])]))
await user.send(embed=embed)
if str(reaction.emoji) == '🔕':
if len(torrents) > 0:
for h,t in torrents.items():
if h in TORRENT_OPTOUT_USERS:
TORRENT_OPTOUT_USERS[h].append(user.id)
else:
TORRENT_OPTOUT_USERS[h] = [user.id]
embed = discord.Embed(title="🔕 Notifications disabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t['name'] for t in torrents.values()])]))
await user.send(embed=embed)
return await check_notification_reactions(message, is_text_channel, torrents, starttime=starttime)
async def run_notifications():
if CONFIG['notification_enabled']:
# get all changes
logger.debug("Running notification check")
changedTransfers = check_for_transfer_changes()
nTotal = sum([len(d['data']) for d in changedTransfers.values()]) if changedTransfers is not None else 0
if nTotal > 0:
addReactions = (sum([len(d['data']) for k,d in changedTransfers.items() if k != "removed"]) > 0)
# first in_channel notifications
if CONFIG['notification_enabled_in_channel'] and CONFIG['notification_channel_id'] > 0 and len(str(CONFIG['notification_channel_id'])) == 18:
embeds, n, torrents = prepare_notifications(changedTransfers, CONFIG['notification_states']['in_channel'])
logger.debug("in_channel notifications: {}".format(n))
# now post notifications
if n > 0:
ch = client.get_channel(CONFIG['notification_channel_id'])
msgs = [await ch.send(embed=e) for e in embeds]
if addReactions:
[await msgs[-1].add_reaction(s) for s in ['🔔','🔕']]
asyncio.create_task(check_notification_reactions(msgs[-1], True, torrents, datetime.datetime.now()))
# Now notify the users
# First get only the changedTransfers that require user notification.
# These will be stored separate because users *should* be reminded whether a notification
# is for a torrent they added versus one they elected to receive notifications for.
logger.debug("preparing list of transfers for user DM notifications")
addedUserChangedTransfers = {}
notifiedUserChangedTransfers = {}
for s,d in changedTransfers.items():
logger.debug("state: {} ({} transfers)".format(s, len(d['data'])))
if s in CONFIG['notification_states']['added_user']:
for h,t in d['data'].items():
logger.debug("Checking transfer: {} ({})".format(str(t), h))
if t['added_user'] is not None and t['added_user'] not in t['optout_users'] and t['added_user'] not in CONFIG['notification_DM_opt_out_user_ids']:
u = t['added_user']
if u in addedUserChangedTransfers:
if s in addedUserChangedTransfers[u]:
addedUserChangedTransfers[u][s]['data'][h] = t
else:
addedUserChangedTransfers[u][s] = {'name':d['name'],'data':{h:t}}
else:
addedUserChangedTransfers[u] = {s:{'name':d['name'],'data':{h:t}}}
if s in CONFIG['notification_states']['notified_users']:
for h,t in d['data'].items():
logger.debug("Checking transfer: {} ({})".format(str(t), h))
for u in t['notified_users']:
if u not in t['optout_users'] and (u not in addedUserChangedTransfers or s not in addedUserChangedTransfers[u] or h not in addedUserChangedTransfers[u][s]['data']):
if u in notifiedUserChangedTransfers:
if s in notifiedUserChangedTransfers[u]:
notifiedUserChangedTransfers[u][s]['data'][h] = t
else:
notifiedUserChangedTransfers[u][s] = {'name':d['name'],'data':{h:t}}
else:
notifiedUserChangedTransfers[u] = {s:{'name':d['name'],'data':{h:t}}}
logger.debug("DM notifications for notified_users: {}".format(str(notifiedUserChangedTransfers)))
logger.debug("DM notifications for added_user: {}".format(str(addedUserChangedTransfers)))
logger.debug("done preparing list of user DM notifications, now send notifications")
# now send notifications as DMs
for u,transfers in addedUserChangedTransfers.items():
logger.debug("Sending added_user notificaions for user {}".format(u))
embeds, n, torrents = prepare_notifications(transfers, CONFIG['notification_states']['added_user'])
if n > 0:
embeds[-1].set_author(name="Activity for transfer{} you added".format('' if n == 1 else 's'))
user = client.get_user(u)
msgs = [await user.send(embed=e) for e in embeds]
if addReactions:
await msgs[-1].add_reaction('🔕')
asyncio.create_task(check_notification_reactions(msgs[-1], False, torrents, datetime.datetime.now()))
for u,transfers in notifiedUserChangedTransfers.items():
logger.debug("Sending notified_user notificaions for user {}".format(u))
embeds, n, torrents = prepare_notifications(transfers, CONFIG['notification_states']['notified_users'])
if n > 0:
user = client.get_user(u)
msgs = [await user.send(embed=e) for e in embeds]
if addReactions:
await msgs[-1].add_reaction('🔕')
asyncio.create_task(check_notification_reactions(msgs[-1], False, torrents, datetime.datetime.now()))
else:
logger.debug("No changed transfers...")
return
async def loop_notifications():
while CONFIG['notification_enabled']:
# print("looping notifications")
try:
await run_notifications()
except Exception as e:
logger.error("Exception thrown in run_notifications: {}".format(e))
await asyncio.sleep(CONFIG['notification_freq'])
return
@client.event
async def on_ready():
global TSCLIENT_CONFIG, CONFIG
unlock()
TSCLIENT_CONFIG = CONFIG['tsclient']
if not CONFIG: # load from config file
CONFIG = load_json(path=CONFIG_JSON)
if not CONFIG:
logger.critical("Failed to load config from {}".format(CONFIG_JSON))
await client.change_presence(activity=discord.Game("config load error!"))
return
else: # config specified in this file, so try to write config file
if exists(CONFIG_JSON):
if load_json(CONFIG_JSON) != CONFIG:
# check current config against config file, throw error if different
logger.critical("Conflict: Config file exists and config specified in bot.py!")
await client.change_presence(activity=discord.Game("config load error!"))
return
elif not generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True):
logger.critical("Failed to write config file on startup!")
await client.change_presence(activity=discord.Game("config load error!"))
return
TSCLIENT_CONFIG = CONFIG['tsclient']
reload_client()
if TSCLIENT is None:
logger.critical("Failed to create transmissionrpc client")
await client.change_presence(activity=discord.Game("client load error!"))
else:
# client.loop.create_task(status_task())
await client.change_presence(activity=discord.Game("Listening {}help".format(CONFIG['bot_prefix'])))
print('Logged in as ' + client.user.name)
print("Discord.py API version:", discord.__version__)
print("Python version:", platform.python_version())
print("Running on:", platform.system(), platform.release(), "(" + os.name + ")")
print('-------------------')
# ch = client.get_channel(CONFIG['notification_channel_id'])
# await ch.send("test message")
# user = client.get_user(CONFIG['owner_user_ids'][0])
# await user.send("test message")
if CONFIG['notification_enabled']:
task = asyncio.create_task(loop_notifications())
def humantime(S, compact_output=(OUTPUT_MODE == OutputMode.MOBILE)): # return humantime for a number of seconds. If time is more than 36 hours, return only the largest rounded time unit (e.g. 2 days or 3 months)
S = int(S)
if S == -2:
return '?' if compact_output else 'Unknown'
elif S == -1:
return 'N/A'
elif S < 0:
return 'N/A'
if compact_output:
sStr = "sec"
mStr = "min"
hStr = "hr"
dStr = "dy"
wStr = "wk"
moStr = "mth"
yStr = "yr"
else:
sStr = "second"
mStr = "minute"
hStr = "hour"
dStr = "day"
wStr = "week"
moStr = "month"
yStr = "year"
M = 60
H = M * 60
D = H * 24
W = D * 7
MO = D * 30
Y = MO * 12
y = S / (MO*11.5) # round 11 months to 1 year
mo = S / (W*3.5)
w = S / (D*6.5)
d = S / (D*1.5)
h = S / (M*55)
m = S / (55)
for t,td,tStr in zip([y,mo,w,d,h,m],[Y,MO,W,D,H,M],[yStr,moStr,wStr,dStr,hStr,mStr]):
if t >= 1:
t = round(S/td)
out = "{} {}{}".format(t, tStr, '' if t == 1 else 's')
return out
out = "{} {}{}".format(S, sStr, '' if S == 1 else 's')
return out
def humancount(B,d = 2):
'Return the given ~~bytes~~ *count* as a human friendly KB, MB, GB, or TB string'
B = float(B)
KB = float(1000) # thousand
MB = float(KB ** 2) # million
GB = float(KB ** 3) # billion
TB = float(KB ** 4) # trillion
if B < KB:
return '{0} B'.format(B)
elif KB <= B < MB:
return '{0:.{nd}f} thousand'.format(B/KB, nd = d)
elif MB <= B < GB:
return '{0:.{nd}f} million'.format(B/MB, nd = d)
elif GB <= B < TB:
return '{0:.{nd}f} billion'.format(B/GB, nd = d)
elif TB <= B:
return '{0:.{nd}f} trillion'.format(B/TB, nd = d)
def timeofday(S, ampm=True):
H,M = divmod(S,60)
if ampm:
if H == 0:
timestr = '12:{:02d} AM'.format(M)
elif H < 12:
timestr = '{}:{:02d} AM'.format(H,M)
else:
timestr = '{}:{:02d} PM'.format(H - 12,M)
else:
timestr = '{}:{:02d}'.format(H,M)
return timestr
def humanbytes(B,d = 2):
'Return the given bytes as a human friendly KB, MB, GB, or TB string'
B = float(B)
KB = float(1024)
MB = float(KB ** 2) # 1,048,576
GB = float(KB ** 3) # 1,073,741,824
TB = float(KB ** 4) # 1,099,511,627,776
if d <= 0:
if B < KB:
return '{0}B'.format(int(B))
elif KB <= B < MB:
return '{0:d}kB'.format(int(B/KB))
elif MB <= B < GB:
return '{0:d}MB'.format(int(B/MB))
elif GB <= B < TB:
return '{0:d}GB'.format(int(B/GB))
elif TB <= B:
return '{0:d}TB'.format(int(B/TB))
else:
if B < KB:
return '{0} B'.format(B)
elif KB <= B < MB:
return '{0:.{nd}f} kB'.format(B/KB, nd = d)
elif MB <= B < GB:
return '{0:.{nd}f} MB'.format(B/MB, nd = d)
elif GB <= B < TB:
return '{0:.{nd}f} GB'.format(B/GB, nd = d)
elif TB <= B:
return '{0:.{nd}f} TB'.format(B/TB, nd = d)
def tobytes(B):
'Return the number of bytes given by a string (a float followed by a space and the unit of prefix-bytes eg. "21.34 GB")'
numstr = B.lower().split(' ')
KB = (('kilo','kb','kb/s'),float(1024))
MB = (('mega','mb','mb/s'),float(KB[1] ** 2)) # 1,048,576
GB = (('giga','gb','gb/s'),float(KB[1] ** 3)) # 1,073,741,824
TB = (('tera','tb','tb/s'),float(KB[1] ** 4)) # 1,099,511,627,776
for prefix in (KB,MB,GB,TB):
if numstr[1] in prefix[0]:
return float(float(numstr[0]) * prefix[1])
def IsCompactOutput(message):
if isDM(message):
if message.author.id in CONFIG['DM_compact_output_user_ids']:
return True
else:
return False
elif OUTPUT_MODE == OutputMode.AUTO:
user = message.author
if user.is_on_mobile():
return True
else:
return False
else:
return False
# check that message author is allowed and message was sent in allowed channel
async def CommandPrecheck(message, whitelist=CONFIG['whitelist_user_ids']):
if not isDM(message) and not CONFIG['listen_all_channels'] and message.channel.id not in CONFIG['listen_channel_ids']:
await message.channel.send("I don't respond to commands in this channel...")
await asyncio.sleep(2)
await message.delete()
return False
if isDM(message) and not CONFIG['listen_DMs']:
await message.channel.send("I don't respond to DMs...")
await asyncio.sleep(2)
await message.delete()
return False
if message.author.id in CONFIG['blacklist_user_ids'] or (len(whitelist) > 0 and message.author.id not in whitelist):
await message.channel.send("You're not allowed to use this...")
await asyncio.sleep(2)
await message.delete()
return False
return True
def isDM(message):
return (message.author.dm_channel is not None and message.channel.id == message.author.dm_channel.id)
async def message_clear_reactions(message, parent_message, reactions=[]):
if not isDM(parent_message):
if reactions == []:
await message.clear_reactions()
else:
for s in reactions:
await message.clear_reaction(s)
def message_has_torrent_file(message):
for f in message.attachments:
if len(f.filename) > 8 and f.filename[-8:].lower() == ".torrent":
return True
return False
def commaListToParagraphForm(l):
outStr = ''
if len(l) > 0:
outStr += ('' if len(l <= 2) else ', ').join(l[:-1])
outStr += ('{} and '.format('' if len(l) <= 2 else ',') if len(l) > 1 else '') + str(l[-1])
return outStr
async def add(message, content = ""):
if await CommandPrecheck(message):
async with message.channel.typing():
torFileList = []
for f in message.attachments:
if len(f.filename) > 8 and f.filename[-8:].lower() == ".torrent":
encodedBytes = base64.b64encode(await f.read())
encodedStr = str(encodedBytes, "utf-8")
torFileList.append({"name":f.filename,"content":encodedStr})
continue
if content == "" and len(torFileList) == 0:
await message.channel.send("🚫 Invalid string")
if CONFIG['delete_command_messages'] and not isDM(message):
try:
await message.delete()
except:
pass
torStr = []
torIDs = []
for i,t in enumerate(torFileList):
# await message.channel.send('Adding torrent from file: {}\n Please wait...'.format(t["name"]))
try:
tor = add_torrent(t["content"])
if tor:
try:
lock()
TORRENT_ADDED_USERS[tor.hashString] = message.author.id
except Exception as e:
logger.fatal("Error adding user to 'TORRENT_ADDED_USERS' for new transfer: {}".format(e))
finally:
unlock()
logger.info("User {} ({}) added torrent from file {}: {} ({})".format(message.author.name, message.author.id, t["name"], tor.name, tor.hashString))
# if tor.isPrivate:
# privateTransfers.append(len(privateTransfers))
logger.debug("Added to TORRENT_ADDED_USERS")
torStr.append("💽 {}".format(tor.name))
torIDs.append(tor.id)
elif CONFIG['dryrun']:
torStr.append("💽 added file dryrun: {}".format(t["name"]))
except Exception as e:
logger.warning("Exception when adding torrent from file: {}".format(e))
for t in content.strip().split(" "):
if len(t) > 5:
# await message.channel.send('Adding torrent from link: {}\n Please wait...'.format(t))
try:
tor = add_torrent(t)
if tor:
try:
lock()
TORRENT_ADDED_USERS[tor.hashString] = message.author.id
except Exception as e:
logger.fatal("Error adding user to 'TORRENT_ADDED_USERS' for new transfer: {}".format(e))
finally:
unlock()
logger.info("User {} ({}) added torrent from URL: {} ({})".format(message.author.name, message.author.id, tor.name, tor.hashString))
# if tor.isPrivate:
# privateTransfers.append(len(privateTransfers))
logger.debug("Added to TORRENT_ADDED_USERS")
torStr.append("🧲 {}".format(tor.name))
torIDs.append(tor.id)
except Exception as e:
logger.warning("Exception when adding torrent from URL: {}".format(e))
if len(torStr) > 0:
embeds = []
if len('\n'.join(torStr)) > 2000:
embeds.append(discord.Embed(title='🟢 Added torrents'))
descStr = torStr[0]
for t in torStr[1:]:
if len(descStr) + len(t) < 2000:
descStr += '\n{}'.format(t)
else:
embeds[-1].description = descStr
embeds.append(discord.Embed(title='🟢 Added torrents'))
descStr = t
else:
embeds = [discord.Embed(title='🟢 Added torrent{}'.format("s" if len(torStr) > 1 else ""), description='\n'.join(torStr), color=0xb51a00)]
privateTransfers = []
if not CONFIG['dryrun']:
logger.debug("Checking for private transfers amidst the {} new torrents".format(len(torStr)))
privateCheckSuccess = False
for i in range(5):
try:
newTorrents = TSCLIENT.get_torrents_by(id_list=torIDs)
logger.debug("Fetched {} transfers from transmission corresponding to the {} transfer IDs recorded".format(len(newTorrents),len(torIDs)))
for tor in newTorrents:
logger.debug("Checking private status of added transfer {}: {}".format(i+1, tor.name))
if tor.isPrivate:
privateTransfers.append(torIDs.index(tor.id))
logger.debug("Transfer is private")
privateCheckSuccess = True
logger.debug("Successfully checked for private tranfers: {} found".format(len(privateTransfers)))
break
except AttributeError as e:
logger.debug("Attribute error when checking for private status of added torrent(s): {}".format(e))
except Exception as e:
logger.warning("Exception when checking for private status of added torrent(s): {}".format(e))
asyncio.sleep(0.2)
if len(privateTransfers) > 0 or CONFIG['dryrun']:
footerStr = "🔐 One or more added torrents are using a private tracker, which may prohibit running the same transfer from multiple locations. Ensure that you're not breaking any private tracker rules."
if len(privateTransfers) > 0 and CONFIG['delete_command_message_private_torrent']:
if not isDM(message):
try:
await message.delete()
footerStr += "\n(I erased the command message to prevent any unintentional sharing of torrent files)"
except Exception as e:
logger.warning("Exception when removing command message used to add private torrent(s): {}".format(e))
embeds[-1].set_footer(text=footerStr)
for e in embeds:
await message.channel.send(embed=e)
else:
await message.channel.send('🚫 No torrents added!')
@client.command(name='add', aliases=['a'], pass_context=True)
async def add_cmd(context, *, content = ""):
try:
await add(context.message, content=content)
except Exception as e:
logger.warning("Exception when adding torrent(s): {}".format(e))
# def torInfo(t):
# states = ('downloading', 'seeding', 'stopped', 'finished','all')
# stateEmoji = {i:j for i,j in zip(states,['🔻','🌱','⏸','🏁','↕️'])}
#
# downStr = humanbytes(t.progress * 0.01 * t.totalSize)
# upStr = "{} (Ratio: {:.2f})".format(humanbytes(t.uploadedEver), t.uploadRatio)
# runTime =
#
# if t.progress < 100.0:
# have = "{} of {} ({:.1f}){}{}".format(downStr,humanbytes(t.totalSize), t.progress, '' if t.haveUnchecked == 0 else ', {} Unverified'.format(humanbytes(t.haveUnchecked)), '' if t.corruptEver == 0 else ', {} Corrupt'.format(humanbytes(t.corruptEver)))
# avail = "{:.1f}%".format(t.desiredAvailable/t.leftUntilDone)
# else:
# have = "{} ({:d}){}{}".format(humanbytes(t.totalSize), t.progress, '' if t.haveUnchecked == 0 else ', {} Unverified'.format(humanbytes(t.haveUnchecked)), '' if t.corruptEver == 0 else ', {} Corrupt'.format(humanbytes(t.corruptEver)))
# avail = "100%"
#
# embed=discord.Embed(title=t.name,color=0xb51a00)
#
# return embed
torStates = ('downloading', 'seeding', 'stopped', 'verifying', 'queued', 'finished', #0-5
'stalled', 'active', 'running', #6-8
'private', 'public', #9-10
'error', 'err_none', 'err_tracker_warn', 'err_tracker_error', 'err_local', # 11-
)
torStateEmoji = ('🔻','🌱','⏸','🔬','🚧','🏁',
'🐢','🐇','🚀',
'🔐','🔓',
'‼️','✅','⚠️','🌐','🖥'
)
torStateFilters = {i:"--filter {}".format(j) for i,j in zip(torStateEmoji,torStates)}
torStateFilters['↕️']=''
def numTorInState(torrents, state):
rpc_states = ('downloading', 'seeding', 'stopped', 'finished')
if state in rpc_states:
return len([True for t in torrents if t.status == state])
elif state =='verifying': # these are also rpc statuses, but I want to combine them.
return len([True for t in torrents if 'check' in t.status])
elif state == 'queued':
return len([True for t in torrents if 'load pending' in t.status])
elif state == 'stalled':
return len([True for t in torrents if t.isStalled])
elif state == 'active':
return len([True for t in torrents if not t.isStalled]) - len([True for t in torrents if t.rateDownload + t.rateUpload > 0])
elif state == 'running':
return len([True for t in torrents if t.rateDownload + t.rateUpload > 0])
elif state == 'private':
return len([True for t in torrents if t.isPrivate])
elif state == 'public':
return len([True for t in torrents if not t.isPrivate])
elif state == 'error':
return len([True for t in torrents if t.error != 0])
elif state == 'err_none':
return len([True for t in torrents if t.error == 0])
elif state == 'err_twarn':
return len([True for t in torrents if t.error == 1])
elif state == 'err_terr':
return len([True for t in torrents if t.error == 2])
elif state == 'err_local':
return len([True for t in torrents if t.error == 3])
else:
return 0
def torSummary(torrents, repeat_msg_key=None, show_repeat=True, compact_output=(OUTPUT_MODE == OutputMode.MOBILE)):
numInState = [numTorInState(torrents,s) for s in torStates]
numTot = len(torrents)
sumTot = sum([t.totalSize for t in torrents])
totSize = humanbytes(sumTot)
totUpRate = humanbytes(sum([t.rateUpload for t in torrents]))
totDownRate = humanbytes(sum([t.rateDownload for t in torrents]))
downList = [t.progress*0.01*t.totalSize for t in torrents]
upList = [t.ratio * j for t,j in zip(torrents,downList)]
sumDown = sum(downList)
sumUp = sum(upList)
totDown = humanbytes(sumDown)
totUp = humanbytes(sumUp)
totRatio = '{:.2f}'.format((sumUp / sumDown) if sumDown > 0 else 0)
totDownRatio = '{:.2f}'.format((sumDown / sumTot * 100.0) if sumTot > 0 else 0)
numTopRatios = min([len(torrents),CONFIG['summary_num_top_ratio']])
topRatios = "• Top {} ratio{}:".format(numTopRatios,"s" if numTopRatios != 1 else "")
sortByRatio = sorted(torrents,key=lambda t:float(t.ratio),reverse=True)
for i in range(numTopRatios):
topRatios += "\n {:.1f} {:.35}{}".format(float(sortByRatio[i].ratio),sortByRatio[i].name,"..." if len(sortByRatio[i].name) > 35 else "")
embed=discord.Embed(description="*React to see list of corresponding transfers*", color=0xb51a00)
embed.set_author(name="Torrent Summary 🌊", icon_url=CONFIG['logo_url'])
embed.add_field(name="⬇️ {}/s".format(totDownRate), value="⬆️ {}/s".format(totUpRate), inline=False)
embed.add_field(name="⏬ {} of {}".format(totDown,totSize), value="⏫ {} ⚖️ {}".format(totUp,totRatio), inline=False)
embed.add_field(name="↕️ {} transfer{}".format(numTot, 's' if numTot != 1 else ''), value=' '.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[:6], numInState[:6])]), inline=False)
if compact_output:
embed.add_field(name=' '.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[11:], numInState[11:])]), value=' '.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[6:9], numInState[6:9])]) + "—" + ' '.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[9:11], numInState[9:11])]), inline=False)
else:
embed.add_field(name="{} Error{}{}".format(numInState[11], 's' if numInState[11] != 1 else '', ' ‼️' if numInState[11] > 0 else ''), value='\n'.join(['{} {}'.format(i,"**{}**".format(j) if i != '✅' and j > 0 else j) for i,j in zip(torStateEmoji[12:], numInState[12:])]), inline=not compact_output)
embed.add_field(name="Activity", value='\n'.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[6:9], numInState[6:9])]), inline=not compact_output)
embed.add_field(name="Tracker", value='\n'.join(['{} {}'.format(i,j) for i,j in zip(torStateEmoji[9:11], numInState[9:11])]), inline=not compact_output)
freq = humantime(REPEAT_MSGS[repeat_msg_key]['freq'],compact_output=False) if repeat_msg_key else None
if show_repeat:
embed.set_footer(text="{}📜 Legend, 🖨 Reprint{}".format((topRatios + '\n') if numTopRatios > 0 else '', '\nUpdating every {}—❎ to stop'.format(freq) if repeat_msg_key else ', 🔄 Auto-update'))
else:
embed.set_footer(text="{}📜 Legend, 🖨 Reprint".format((topRatios + '\n') if numTopRatios > 0 else ''))
return embed,numInState
async def summary(message, content="", repeat_msg_key=None, msg=None):
global REPEAT_MSGS
content=content.strip()
if await CommandPrecheck(message):
async with message.channel.typing():
if not repeat_msg_key:
if len(REPEAT_MSGS) == 0:
reload_client()
if CONFIG['delete_command_messages'] and not isDM(message):
try:
await message.delete()
except:
pass
torrents, errStr = get_torrent_list_from_command_str(content)
if errStr != "":
await message.channel.send(errStr)
return
summaryData=torSummary(torrents, repeat_msg_key=repeat_msg_key, show_repeat=repeat_msg_key, compact_output=IsCompactOutput(message))
if content != "":
summaryData[0].description = "Summary of transfers matching '`{}`'\n".format(content) + summaryData[0].description
stateEmojiFilterStartNum = 4 # the first emoji in stateEmoji that corresponds to a list filter
ignoreEmoji = ('✅')
formatEmoji = '💻' if IsCompactOutput(message) else '📱'
if repeat_msg_key or msg:
if isDM(message):
if repeat_msg_key:
stateEmoji = ('📜',formatEmoji,'🖨','❎','↕️') + torStateEmoji
summaryData[0].timestamp = datetime.datetime.now(tz=pytz.timezone('America/Denver'))
else:
stateEmoji = ('📜',formatEmoji,'🖨','🔄','↕️') + torStateEmoji
msg = await message.channel.send(embed=summaryData[0])
stateEmojiFilterStartNum += 1
else:
if msg:
stateEmoji = ('📜','🖨','🔄','↕️') + torStateEmoji
if message.channel.last_message_id != msg.id:
await msg.delete()
msg = await message.channel.send(embed=summaryData[0])
else:
await msg.edit(embed=summaryData[0])
else:
stateEmoji = ('📜','🖨','❎','↕️') + torStateEmoji
summaryData[0].timestamp = datetime.datetime.now(tz=pytz.timezone('America/Denver'))
msg = REPEAT_MSGS[repeat_msg_key]['msgs'][0]
if message.channel.last_message_id != msg.id and (REPEAT_MSGS[repeat_msg_key]['reprint'] or REPEAT_MSGS[repeat_msg_key]['pin_to_bottom']):
await msg.delete()
msg = await message.channel.send(embed=summaryData[0])
REPEAT_MSGS[repeat_msg_key]['msgs'] = [msg]
REPEAT_MSGS[repeat_msg_key]['reprint'] = False
else:
await msg.edit(embed=summaryData[0])
else:
if isDM(message):
stateEmoji = ('📜',formatEmoji,'🖨','🔄','↕️') + torStateEmoji
stateEmojiFilterStartNum += 1
else:
stateEmoji = ('📜','🖨','🔄','↕️') + torStateEmoji
msg = await message.channel.send(embed=summaryData[0])
# to get actual list of reactions, need to re-fetch the message from the server
cache_msg = await message.channel.fetch_message(msg.id)
msgRxns = [str(r.emoji) for r in cache_msg.reactions]
for i in stateEmoji[:stateEmojiFilterStartNum]:
if i not in msgRxns:
await msg.add_reaction(i)
for i in range(len(summaryData[1])):
if summaryData[1][i] > 0 and stateEmoji[i+stateEmojiFilterStartNum] not in ignoreEmoji and stateEmoji[i+stateEmojiFilterStartNum] not in msgRxns:
await msg.add_reaction(stateEmoji[i+stateEmojiFilterStartNum])
elif summaryData[1][i] == 0 and stateEmoji[i+stateEmojiFilterStartNum] in msgRxns:
await message_clear_reactions(msg, message, reactions=[stateEmoji[i+stateEmojiFilterStartNum]])
# if not repeat_msg_key:
# cache_msg = await message.channel.fetch_message(msg.id)
# for r in cache_msg.reactions:
# if r.count > 1:
# async for user in r.users():
# if user.id in CONFIG['whitelist_user_ids']:
# if str(r.emoji) == '📜':
# await message_clear_reactions(msg, message)
# await legend(context)
# return
# elif str(r.emoji) == '🔄':
# await message_clear_reactions(msg, message, reactions=['🔄'])
# await repeat_command(summary, message=message, content=content, msg_list=[msg])
# return
# elif str(r.emoji) in stateEmoji[stateEmojiFilterStartNum-1:] and user.id == message.author.id:
# await message_clear_reactions(msg, message)
# await list_transfers(message, content=torStateFilters[str(r.emoji)])
# return
cache_msg = await message.channel.fetch_message(msg.id)
for r in cache_msg.reactions:
if r.count > 1:
async for user in r.users():
if user.id in CONFIG['whitelist_user_ids']:
if str(r.emoji) == '📜':
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=['📜'])
else:
await message_clear_reactions(msg, message)
await legend(message)
return
elif str(r.emoji) == formatEmoji:
await toggle_compact_out(message=message)
asyncio.create_task(summary(message=message, content=content, msg=msg))
return
elif str(r.emoji) == '❎':
await message_clear_reactions(msg, message)
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
return
elif str(r.emoji) == '🔄':
await message_clear_reactions(msg, message, reactions=['🔄'])
asyncio.create_task(repeat_command(summary, message=message, content=content, msg_list=[msg]))
return
elif str(r.emoji) in stateEmoji[stateEmojiFilterStartNum-1:] and user.id == message.author.id:
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=[str(r.emoji)])
asyncio.create_task(list_transfers(message, content=torStateFilters[str(r.emoji)]+" "+content))
else:
await message_clear_reactions(msg, message)
await list_transfers(message, content=torStateFilters[str(r.emoji)]+" "+content)
return
def check(reaction, user):
return user == message.author and reaction.message.id == msg.id and str(reaction.emoji) in stateEmoji
try:
reaction, user = await client.wait_for('reaction_add', timeout=CONFIG['reaction_wait_timeout'] if not repeat_msg_key else REPEAT_MSGS[repeat_msg_key]['freq'], check=check)
except asyncio.TimeoutError:
if not repeat_msg_key:
await message_clear_reactions(msg, message)
return
pass
else:
if str(reaction.emoji) in stateEmoji[stateEmojiFilterStartNum-1:] and str(reaction.emoji) not in ignoreEmoji:
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=[str(reaction.emoji)])
asyncio.create_task(list_transfers(message, content=torStateFilters[str(reaction.emoji)]+" "+content))
else:
await message_clear_reactions(msg, message)
await list_transfers(message, content=torStateFilters[str(reaction.emoji)]+" "+content)
return
elif str(reaction.emoji) == '📜':
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=['📜'])
else:
await message_clear_reactions(msg, message)
await legend(message)
return
elif str(reaction.emoji) == formatEmoji:
await toggle_compact_out(message=message)
asyncio.create_task(summary(message=message, content=content, msg=msg))
return
elif str(reaction.emoji) == '❎':
await message_clear_reactions(msg, message)
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
return
elif str(reaction.emoji) == '🔄':
await message_clear_reactions(msg, message, reactions=['🔄'])
asyncio.create_task(repeat_command(summary, message=message, content=content, msg_list=[msg]))
return
elif str(reaction.emoji) == '🖨':
await message_clear_reactions(msg, message, reactions=['🖨'])
if repeat_msg_key:
REPEAT_MSGS[repeat_msg_key]['reprint'] = True
return
else:
# if not isDM(message):
# try:
# await msg.delete()
# except:
# pass
asyncio.create_task(summary(message=message, content=content, msg=msg))
if repeat_msg_key: # a final check to see if the user has cancelled the repeat by checking the count of the cancel reaction
cache_msg = await message.channel.fetch_message(msg.id)
for r in cache_msg.reactions:
if r.count > 1:
async for user in r.users():
if user.id in CONFIG['whitelist_user_ids']:
if str(reaction.emoji) == '📜':
await message_clear_reactions(msg, message, reactions=['📜'])
await legend(message)
return
elif str(r.emoji) == '❎':
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
await message_clear_reactions(msg, message)
return
elif str(r.emoji) == '🖨':
# await message_clear_reactions(msg, message, reactions=['🖨'])
REPEAT_MSGS[repeat_msg_key]['reprint'] = True
return
elif str(r.emoji) in stateEmoji[stateEmojiFilterStartNum-1:]:
await message_clear_reactions(msg, message, reactions=[str(r.emoji)])
asyncio.create_task(list_transfers(message, content=torStateFilters[str(reaction.emoji)]+" "+content))
return
@client.command(name='summary',aliases=['s'], pass_context=True)
async def summary_cmd(context, *, content="", repeat_msg_key=None):
try:
await summary(context.message, content, repeat_msg_key=repeat_msg_key)
except Exception as e:
logger.warning("Exception in t/summary: {}".format(e))
def strListToList(strList):
if not re.match('^[0-9\,\-]+$', strList):
return False
outList = []
for seg in strList.strip().split(","):
subseg = seg.split("-")
if len(subseg) == 1 and int(subseg[0]) not in outList:
outList.append(int(subseg[0]))
elif len(subseg) == 2:
subseg = sorted([int(i) for i in subseg])
outList += range(subseg[0],subseg[1]+1)
if len(outList) == 0:
return False
return outList
def torList(torrents, author_name="Torrent Transfers",title=None,description=None, footer="📜 Legend", compact_output=(OUTPUT_MODE == OutputMode.MOBILE)):
states = ('downloading', 'seeding', 'stopped', 'finished','checking','check pending','download pending','upload pending')
stateEmoji = {i:j for i,j in zip(states,['🔻','🌱','⏸','🏁','🔬','🔬','🚧','🚧'])}
errorStrs = ['✅','⚠️','🌐','🖥']
def torListLine(t):
try:
eta = int(t.eta.total_seconds())
except:
try:
eta = int(t.eta)
except:
eta = 0
if compact_output:
down = humanbytes(t.progress * 0.01 * t.totalSize, d=0)
out = "{}{}—".format(stateEmoji[t.status],errorStrs[t.error] if t.error != 0 else '')
if t.status == 'downloading':
out += "{}% {} {}{}/s{}".format(int(t.progress), down, '' if eta <= 0 else '{}@'.format(humantime(eta, compact_output=compact_output)), humanbytes(t.rateDownload, d=0), ' *{}/s* {:.1f}'.format(humanbytes(t.rateUpload, d=0), t.uploadRatio) if t.isStalled else '')
elif t.status == 'seeding':
out += "{} *{}/s*:{:.1f}".format(down, humanbytes(t.rateUpload, d=0), t.uploadRatio)
elif t.status == 'stopped':
out += "{}%{} {:.1f}".format(int(t.progress), down, t.uploadRatio)
elif t.status == 'finished':
out += "{} {:.1f}".format(down, t.uploadRatio)
elif t.status == "checking":
out += "{:.1f}%".format(t.recheckProgress*100.0)
else:
down = humanbytes(t.progress * 0.01 * t.totalSize)
out = "{} {} {} {}—".format(stateEmoji[t.status],errorStrs[t.error],'🚀' if t.rateDownload + t.rateUpload > 0 else '🐢' if t.isStalled else '🐇', '🔐' if t.isPrivate else '🔓')
if t.status == 'downloading':
out += "⏬ {:.1f}% of {}, ⬇️ {} {}/s, ⬆️ *{}/s*, ⚖️ *{:.2f}*".format(t.progress, humanbytes(t.totalSize, d=1), '' if eta <= 0 else '\n⏳ {} @ '.format(humantime(eta, compact_output=compact_output)), humanbytes(t.rateDownload), humanbytes(t.rateUpload), t.uploadRatio)
elif t.status == 'seeding':
out += "⏬ {}, ⬆️ *{}/s*, ⚖️ *{:.2f}*".format(humanbytes(t.totalSize, d=1), humanbytes(t.rateUpload), t.uploadRatio)
elif t.status == 'stopped':
out += "⏬ {:.1f}% of {}, ⚖️ *{:.2f}*".format(t.progress, humanbytes(t.totalSize, d=1), t.uploadRatio)
elif t.status == 'finished':
out += "⏬ {}, ⚖️ {:.2f}".format(humanbytes(t.totalSize, d=1), t.uploadRatio)
elif t.status == "checking":
out += "{:.2f}%".format(t.recheckProgress*100.0)
if t.error != 0:
out += "\n***Error:*** *{}*".format(t.errorString)
return out
if compact_output:
nameList = ["{}){:.26}{}".format(t.id,t.name,"..." if len(t.name) > 26 else "") for t in torrents]
else:
nameList = ["{}) {:.245}{}".format(t.id,t.name,"..." if len(t.name) > 245 else "") for t in torrents]
valList = [torListLine(t) for t in torrents]
n = 0
i = 0
eNum = 1
eNumTotal = 1 + len(torrents) // 25
embeds = []
if len(torrents) > 0:
while i < len(torrents):
embed=discord.Embed(title=title + ('' if eNumTotal == 1 else ' ({} of {})'.format(eNum, eNumTotal)),description=description,color=0xb51a00)
for j in range(25):
embed.add_field(name=nameList[i],value=valList[i],inline=False)
i += 1
n += 1
if n >= 25:
n = 0
eNum += 1
break
if i >= len(torrents):
break
embeds.append(embed)
else:
embeds.append(discord.Embed(title=title, description="No matching transfers found!", color=0xb51a00))
embeds[-1].set_author(name=author_name, icon_url=CONFIG['logo_url'])
embeds[-1].set_footer(text=footer)
return embeds
def torGetListOpsFromStr(listOpStr):
filter_by = None
sort_by = None
num_results = None
tracker_regex = None
splitcontent = listOpStr.split(" ")
if "--filter" in splitcontent:
ind = splitcontent.index("--filter")
if len(splitcontent) > ind + 1:
filter_by = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
elif "-f" in splitcontent:
ind = splitcontent.index("-f")
if len(splitcontent) > ind + 1:
filter_by = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
if "--sort" in splitcontent:
ind = splitcontent.index("--sort")
if len(splitcontent) > ind + 1:
sort_by = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
elif "-s" in splitcontent:
ind = splitcontent.index("-s")
if len(splitcontent) > ind + 1:
sort_by = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
if "--tracker" in splitcontent:
ind = splitcontent.index("--tracker")
if len(splitcontent) > ind + 1:
tracker_regex = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
elif "-t" in splitcontent:
ind = splitcontent.index("-t")
if len(splitcontent) > ind + 1:
tracker_regex = splitcontent[ind+1]
del splitcontent[ind+1]
del splitcontent[ind]
if "-N" in splitcontent:
ind = splitcontent.index("-N")
if len(splitcontent) > ind + 1:
try:
num_results = int(splitcontent[ind+1])
except:
num_results = -1
del splitcontent[ind+1]
del splitcontent[ind]
filter_regex = " ".join(splitcontent).strip()
if filter_regex == "":
filter_regex = None
if filter_by is not None and filter_by not in filter_names_full:
return -1, None, None, None, None
if sort_by is not None and sort_by not in sort_names:
return None, -1, None, None, None
if num_results is not None and num_results <= 0:
return None, None, None, None, -1
return filter_by, sort_by, filter_regex, tracker_regex, num_results
async def repeat_command(command, message, content="", msg_list=[]):
global REPEAT_MSGS
msg_key = secrets.token_hex()
REPEAT_MSGS[msg_key] = {
'msgs':msg_list,
'command':command,
'message':message,
'content':content,
'pin_to_bottom':False,
'reprint': False,
'freq':CONFIG['repeat_freq'] if message.author.id not in CONFIG['repeat_freq_DM_by_user_ids'] else CONFIG['repeat_freq_DM_by_user_ids'][message.author.id],
'timeout':CONFIG['repeat_timeout'] if message.author.id not in CONFIG['repeat_timeout_DM_by_user_ids'] else CONFIG['repeat_timeout_DM_by_user_ids'][message.author.id],
'timeout_verbose':CONFIG['repeat_timeout_verbose'],
'cancel_verbose':CONFIG['repeat_cancel_verbose'],
'start_time':datetime.datetime.now(),
'do_repeat':True
}
while msg_key in REPEAT_MSGS:
msg = REPEAT_MSGS[msg_key]
if msg['do_repeat']:
delta = datetime.datetime.now() - msg['start_time']
if msg['timeout'] > 0 and delta.seconds >= msg['timeout']:
if msg['timeout_verbose']:
await message.channel.send("❎ Auto-update timed out...")
break
else:
try:
await msg['command'](message=msg['message'], content=msg['content'], repeat_msg_key=msg_key)
except Exception as e:
logger.warning("Failed to execute repeat command {}(content={}): {}".format(msg['command'], msg['content'], e))
await asyncio.sleep(msg['freq'])
else:
if msg['cancel_verbose']:
await message.channel.send("❎ Auto-update canceled...")
break
del REPEAT_MSGS[msg_key]
return
def get_torrent_list_from_command_str(command_str=""):
id_list = strListToList(command_str)
filter_by, sort_by, filter_regex, tracker_regex, num_results = None, None, None, None, None
if not id_list:
filter_by, sort_by, filter_regex, tracker_regex, num_results = torGetListOpsFromStr(command_str)
if filter_by is not None and filter_by == -1:
return [], "Invalid filter specified. Choose one of {}".format(str(filter_names_full))
if sort_by is not None and sort_by == -1:
return [], "Invalid sort specified. Choose one of {}".format(str(sort_names))
if num_results is not None and num_results <= 0:
return [], "Must specify integer greater than 0 for `-N`!"
if TSCLIENT is None:
reload_client()
torrents = TSCLIENT.get_torrents_by(sort_by=sort_by, filter_by=filter_by, filter_regex=filter_regex, tracker_regex=tracker_regex, id_list=id_list, num_results=num_results)
return torrents, ""
async def list_transfers(message, content="", repeat_msg_key=None, msgs=None):
global REPEAT_MSGS
content=content.strip()
if await CommandPrecheck(message):
async with message.channel.typing():
if not repeat_msg_key:
if len(REPEAT_MSGS) == 0:
reload_client()
if CONFIG['delete_command_messages'] and not isDM(message):
try:
await message.delete()
except:
pass
torrents, errStr = get_torrent_list_from_command_str(content)
if errStr != "":
await message.channel.send(errStr)
return
embeds = torList(torrents, title="{} transfer{} matching '`{}`'".format(len(torrents),'' if len(torrents)==1 else 's',content), compact_output=IsCompactOutput(message))
embeds[-1].set_footer(text="📜 Legend, 🧾 Summarize, 🧰 Modify, 🖨 Reprint{}".format('\nUpdating every {}—❎ to stop'.format(humantime(REPEAT_MSGS[repeat_msg_key]['freq'],compact_output=False)) if repeat_msg_key else ', 🔄 Auto-update'))
formatEmoji = '💻' if IsCompactOutput(message) else '📱'
if repeat_msg_key or msgs:
if isDM(message):
if repeat_msg_key:
rxnEmoji = ['📜','🧾','🧰',formatEmoji,'🖨','❎','🔔','🔕']
embeds[-1].timestamp = datetime.datetime.now(tz=pytz.timezone('America/Denver'))
else:
rxnEmoji = ['📜','🧾','🧰',formatEmoji,'🖨','🔄','🔔','🔕']
msgs = [await message.channel.send(embed=e) for e in embeds]
else:
if msgs:
rxnEmoji = ['📜','🧾','🧰','🖨','🔄','🔔','🔕']
if message.channel.last_message_id != msgs[-1].id:
for m in msgs:
await m.delete()
msgs = []
for i,e in enumerate(embeds):
if i < len(msgs):
await msgs[i].edit(embed=e)
cache_msg = await message.channel.fetch_message(msgs[i].id)
if i < len(embeds) - 1 and len(cache_msg.reactions) > 0:
await message_clear_reactions(cache_msg, message)
else:
msgs.append(await message.channel.send(embed=e))
if len(msgs) > len(embeds):
for i in range(len(msgs) - len(embeds)):
await msgs[-1].delete()
del msgs[-1]
else:
rxnEmoji = ['📜','🧾','🧰','🖨','❎','🔔','🔕']
embeds[-1].timestamp = datetime.datetime.now(tz=pytz.timezone('America/Denver'))
msgs = REPEAT_MSGS[repeat_msg_key]['msgs']
if (REPEAT_MSGS[repeat_msg_key]['reprint'] or REPEAT_MSGS[repeat_msg_key]['pin_to_bottom']) and message.channel.last_message_id != msgs[-1].id:
for m in msgs:
await m.delete()
msgs = []
REPEAT_MSGS[repeat_msg_key]['reprint'] = False
for i,e in enumerate(embeds):
if i < len(msgs):
await msgs[i].edit(embed=e)
cache_msg = await message.channel.fetch_message(msgs[i].id)
if i < len(embeds) - 1 and len(cache_msg.reactions) > 0:
await message_clear_reactions(cache_msg, message)
else:
msgs.append(await message.channel.send(embed=e))
if len(msgs) > len(embeds):
for i in range(len(msgs) - len(embeds)):
await msgs[-1].delete()
del msgs[-1]
REPEAT_MSGS[repeat_msg_key]['msgs'] = msgs
else:
msgs = [await message.channel.send(embed=e) for e in embeds]
if isDM(message):
rxnEmoji = ['📜','🧾','🧰',formatEmoji,'🖨','🔄','🔔','🔕']
else:
rxnEmoji = ['📜','🧾','🧰','🖨','🔄','🔔','🔕']
msg = msgs[-1]
# to get actual list of reactions, need to re-fetch the message from the server
cache_msg = await message.channel.fetch_message(msg.id)
msgRxns = [str(r.emoji) for r in cache_msg.reactions]
for e in msgRxns:
if e not in rxnEmoji:
await message_clear_reactions(msg, message, reactions=[e])
for e in rxnEmoji:
if e not in msgRxns:
await msg.add_reaction(e)
cache_msg = await message.channel.fetch_message(msg.id)
for reaction in cache_msg.reactions:
if reaction.count > 1:
async for user in reaction.users():
if user.id in CONFIG['whitelist_user_ids']:
if str(reaction.emoji) == '📜':
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=['📜'])
else:
await message_clear_reactions(msg, message)
await legend(message)
return
elif str(reaction.emoji) == '🧾':
await message_clear_reactions(msg, message)
asyncio.create_task(summary(message=message, content=content))
return
elif str(reaction.emoji) == '🧰':
if len(torrents) > 0:
if not isDM(message) and CONFIG['delete_command_messages']:
for msg in msgs:
try:
msg.delete()
except:
pass
else:
await message_clear_reactions(msg, message)
asyncio.create_task(modify(message=message, content=','.join([str(t.id) for t in torrents])))
return
elif str(reaction.emoji) == formatEmoji:
await toggle_compact_out(message=message)
return await list_transfers(message=message, content=content, msgs=msgs)
return
elif str(reaction.emoji) == '🖨':
await message_clear_reactions(msg, message, reactions=['🖨'])
if repeat_msg_key:
REPEAT_MSGS[repeat_msg_key]['reprint'] = True
return
else:
# if not isDM(message):
# try:
# await msg.delete()
# except:
# pass
return await list_transfers(message=message, content=content, msgs=msgs)
elif str(reaction.emoji) == '❎':
await message_clear_reactions(msg, message)
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
return
elif str(reaction.emoji) == '🔄':
await message_clear_reactions(msg, message, reactions=['🔄'])
asyncio.create_task(repeat_command(list_transfers, message=message, content=content, msg_list=msgs))
return
elif str(reaction.emoji) == '🔔':
if len(torrents) > 0:
for t in torrents:
if t.hashString in TORRENT_NOTIFIED_USERS:
TORRENT_NOTIFIED_USERS[t.hashString].append(message.author.id)
else:
TORRENT_NOTIFIED_USERS[t.hashString] = [message.author.id]
embed = discord.Embed(title="🔔 Notifications enabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t.name for t in torrents])]))
await user.send(embed=embed)
elif str(reaction.emoji) == '🔕':
if len(torrents) > 0:
for t in torrents:
if t.hashString in TORRENT_OPTOUT_USERS:
TORRENT_OPTOUT_USERS[t.hashString].append(message.author.id)
else:
TORRENT_OPTOUT_USERS[t.hashString] = [message.author.id]
embed = discord.Embed(title="🔕 Notifications disabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t.name for t in torrents])]))
await user.send(embed=embed)
def check(reaction, user):
return user.id in CONFIG['whitelist_user_ids'] and reaction.message.id == msg.id and str(reaction.emoji) in rxnEmoji
try:
reaction, user = await client.wait_for('reaction_add', timeout=CONFIG['reaction_wait_timeout'] if not repeat_msg_key else REPEAT_MSGS[repeat_msg_key]['freq'], check=check)
except asyncio.TimeoutError:
if not repeat_msg_key:
await message_clear_reactions(msg, message)
return
pass
else:
if str(reaction.emoji) == '📜':
if repeat_msg_key:
await message_clear_reactions(msg, message, reactions=['📜'])
else:
await message_clear_reactions(msg, message)
await legend(message)
return
elif str(reaction.emoji) == '🧾':
await message_clear_reactions(msg, message)
asyncio.create_task(summary(message=message, content=content))
return
elif str(reaction.emoji) == '🧰':
if len(torrents) > 0:
if not isDM(message) and CONFIG['delete_command_messages']:
for msg in msgs:
try:
msg.delete()
except:
pass
else:
await message_clear_reactions(msg, message)
asyncio.create_task(modify(message=message, content=','.join([str(t.id) for t in torrents])))
return
elif str(reaction.emoji) == formatEmoji:
await toggle_compact_out(message=message)
return await list_transfers(message=message, content=content, msgs=msgs)
return
elif str(reaction.emoji) == '🖨':
await message_clear_reactions(msg, message, reactions=['🖨'])
if repeat_msg_key:
REPEAT_MSGS[repeat_msg_key]['reprint'] = True
return
else:
# if not isDM(message):
# try:
# await msg.delete()
# except:
# pass
return await list_transfers(message=message, content=content, msgs=msgs)
elif str(reaction.emoji) == '❎':
await message_clear_reactions(msg, message)
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
return
elif str(reaction.emoji) == '🔄':
await message_clear_reactions(msg, message, reactions=['🔄'])
asyncio.create_task(repeat_command(list_transfers, message=message, content=content, msg_list=msgs))
return
elif str(reaction.emoji) == '🔔':
if len(torrents) > 0:
for t in torrents:
if t.hashString in TORRENT_NOTIFIED_USERS:
TORRENT_NOTIFIED_USERS[t.hashString].append(message.author.id)
else:
TORRENT_NOTIFIED_USERS[t.hashString] = [message.author.id]
embed = discord.Embed(title="🔔 Notifications enabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t.name for t in torrents])]))
await user.send(embed=embed)
elif str(reaction.emoji) == '🔕':
if len(torrents) > 0:
for t in torrents:
if t.hashString in TORRENT_OPTOUT_USERS:
TORRENT_OPTOUT_USERS[t.hashString].append(message.author.id)
else:
TORRENT_OPTOUT_USERS[t.hashString] = [message.author.id]
embed = discord.Embed(title="🔕 Notifications disabled for:", description=",\n".join(["{}{}".format("" if len(torrents) == 1 else "**{}.**".format(i+1),j) for i,j in enumerate([t.name for t in torrents])]))
await user.send(embed=embed)
if repeat_msg_key: # a final check to see if the user has cancelled the repeat by checking the count of the cancel reaction
cache_msg = await message.channel.fetch_message(msg.id)
for r in cache_msg.reactions:
if r.count > 1:
async for user in r.users():
if user.id in CONFIG['whitelist_user_ids']:
if str(r.emoji) == '🖨':
REPEAT_MSGS[repeat_msg_key]['reprint'] = True
await message_clear_reactions(msg, message, reactions=['🖨'])
elif str(r.emoji) == formatEmoji:
await toggle_compact_out(message=message)
return await list_transfers(message=message, content=content, msgs=msgs)
return
elif str(r.emoji) == '🧰':
if len(torrents) > 0:
if not isDM(message) and CONFIG['delete_command_messages']:
for msg in msgs:
try:
msg.delete()
except:
pass
else:
await message_clear_reactions(msg, message)
asyncio.create_task(modify(message=message, content=','.join([t.id for t in torrents])))
return
elif str(r.emoji) == '📜':
await message_clear_reactions(msg, message, reactions=['📜'])
await legend(message)
return
elif str(r.emoji) == '❎':
await message_clear_reactions(msg, message)
REPEAT_MSGS[repeat_msg_key]['do_repeat'] = False
return
else: # not a repeat message, so no need to keep the reactions
await message_clear_reactions(msg, message)
@client.command(name='list', aliases=['l'], pass_context=True)
async def list_transfers_cmd(context, *, content="", repeat_msg_key=None):
try:
await list_transfers(context.message, content=content, repeat_msg_key=repeat_msg_key)
except Exception as e:
logger.warning("Exception in t/list: {}".format(e))
async def modify(message, content=""):
content=content.strip()
if await CommandPrecheck(message):
async with message.channel.typing():
allOnly = content.strip() == ""
torrents = []
if not allOnly:
if CONFIG['delete_command_messages'] and not isDM(message):
try:
await message.delete()
except:
pass
torrents, errStr = get_torrent_list_from_command_str(content)
if errStr != "":
await message.channel.send(errStr)
return
if len(torrents) > 0:
ops = ["pause","resume","remove","removedelete","verify"]
opNames = ["pause","resume","remove","remove and delete","verify"]
opEmoji = ['⏸','▶️','❌','🗑','🔬']
opStr = "⏸pause ▶️resume ❌remove 🗑remove and delete 🔬verify"
embeds = torList(torrents,author_name="Click a reaction to choose modification".format(len(torrents), '' if len(torrents)==1 else 's'),title="{} transfer{} matching '`{}`' will be modified".format(len(torrents), '' if len(torrents)==1 else 's', content), footer=opStr + "\n📜 Legend, 🚫 Cancel", compact_output=IsCompactOutput(message))
else:
embed=discord.Embed(title="Modify transfers",color=0xb51a00)
embed.set_author(name="No matching transfers found!", icon_url=CONFIG['logo_url'])
embeds = [embed]
else:
ops = ["pauseall","resumeall"]
opNames = ["pause all","resume all"]
opEmoji = ['⏸','▶️']
opStr = "⏸ pause or ▶️ resume all"
embed=discord.Embed(title="React to choose modification",color=0xb51a00)
embed.set_author(name="All transfers will be affected!", icon_url=CONFIG['logo_url'])
embed.set_footer(text=opStr + "\n📜 Legend, 🚫 Cancel")
embeds = [embed]
msgs = [await message.channel.send(embed=e) for e in embeds]
if not allOnly and len(torrents) == 0:
return
formatEmoji = '💻' if IsCompactOutput(message) else '📱'
opEmoji += ['🚫','📜']
if isDM(message):
opEmoji += [formatEmoji]
msg = msgs[-1]
for i in opEmoji:
await msgs[-1].add_reaction(i)
cache_msg = await message.channel.fetch_message(msg.id)
for reaction in cache_msg.reactions:
if reaction.count > 1:
async for user in reaction.users():
if user.id == message.author.id:
if str(reaction.emoji) == '📜':
await message_clear_reactions(msg, message)
await legend(message)
elif str(reaction.emoji) == formatEmoji:
await toggle_compact_out(message=message)
return await modify(message=message, content=content)
return
elif str(reaction.emoji) == '🚫':
await message_clear_reactions(msg, message)
await message.channel.send("❌ Cancelled!")
return
elif str(reaction.emoji) in opEmoji[:-1]:
cmds = {i:j for i,j in zip(opEmoji,ops)}
cmdNames = {i:j for i,j in zip(opEmoji,opNames)}
cmd = cmds[str(reaction.emoji)]
cmdName = cmdNames[str(reaction.emoji)]
doContinue = True
msg2 = None
if "remove" in cmds[str(reaction.emoji)]:
footerPrepend = ""
if CONFIG['private_transfers_protected'] and (not CONFIG['private_transfer_protection_bot_owner_override'] or message.author.id not in CONFIG['owner_user_ids']):
removeTorrents = [t for t in torrents if not t.isPrivate]
if len(removeTorrents) != len(torrents):
if CONFIG['private_transfer_protection_added_user_override']:
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if not t.isPrivate or ((t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id))]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) == 0:
await message.channel.send("🚫 I'm not allowed to remove private transfers unless they were added by you. If this isn't right, talk to an admin.")
await message_clear_reactions(msg, message)
return
else:
torrents = removeTorrents
footerPrepend = "(I'm not allowed to remove private transfers unless they were added by you, so this will only apply to those you added and the public ones)\n"
else:
if len(removeTorrents) == 0:
await message.channel.send("🚫 I'm not allowed to remove private transfers. If this isn't right, talk to an admin.")
await message_clear_reactions(msg, message)
return
else:
torrents = removeTorrents
if CONFIG['private_transfer_protection_bot_owner_override']:
footerPrepend = "(Only bot owners can remove private transfers, but I'll do the public ones)\n"
else:
footerPrepend = "(I'm not allowed to remove private transfers, but I'll do the public ones)\n"
if "delete" in cmds[str(reaction.emoji)] and not CONFIG['whitelist_user_can_delete'] and message.author.id not in CONFIG['owner_user_ids']:
# user may not be allowed to perform this operation. Check if they added any transfers, and whether the added_user_override is enabled.
if CONFIG['whitelist_added_user_remove_delete_override']:
# override is enabled, so reduce the list of torrents to be modified to those added by the user.
# first get transfers from TORRENT_JSON
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if (t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id)]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) > 0:
torrents = removeTorrents
footerPrepend = "(You can only remove and delete transfers added by you. Other transfers won't be affected.)\n"
else:
await message.channel.send("🚫 You can only remove and delete transfers added by you. If this isn't right, ask an admin to add you to the bot owner list.")
await message_clear_reactions(msg, message)
return
else:
# override not enabled, so user can't perform this operation
await message.channel.send("🚫 You're not allowed to remove and delete transfers. If this isn't right, ask an admin to add you to the bot owner list or to enable the override for transfers added by you.")
await message_clear_reactions(msg, message)
return
elif not CONFIG['whitelist_user_can_remove'] and message.author.id not in CONFIG['owner_user_ids']:
# user may not be allowed to perform this operation. Check if they added any transfers, and whether the added_user_override is enabled.
if CONFIG['whitelist_added_user_remove_delete_override']:
# override is enabled, so reduce the list of torrents to be modified to those added by the user.
# first get transfers from TORRENT_JSON
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if (t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id)]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) > 0:
torrents = removeTorrents
footerPrepend = "(You can only remove transfers added by you. Other transfers won't be affected.)\n"
else:
await message.channel.send("🚫 You can only remove transfers added by you. If this isn't right, ask an admin to add you to the bot owner list.")
await message_clear_reactions(msg, message)
return
else:
# override not enabled, so user can't perform this operation
await message.channel.send("🚫 You're not allowed to remove transfers. If this isn't right, ask an admin to add you to the bot owner list or to enable the override for transfers added by you.")
await message_clear_reactions(msg, message)
return
embed=discord.Embed(title="Are you sure you wish to remove{} {} transfer{}?".format(' and DELETE' if 'delete' in cmds[str(reaction.emoji)] else '', len(torrents), '' if len(torrents)==1 else 's'),description="**This action is irreversible!**",color=0xb51a00)
embed.set_footer(text=footerPrepend + "React ✅ to continue or ❌ to cancel")
msg2 = await message.channel.send(embed=embed)
for i in ['✅','❌']:
await msg2.add_reaction(i)
def check1(reaction, user):
return user == message.author and reaction.message.id == msg2.id and str(reaction.emoji) in ['✅','❌']
try:
reaction, user = await client.wait_for('reaction_add', timeout=60, check=check1)
except asyncio.TimeoutError:
await message_clear_reactions(msg, message)
await message_clear_reactions(msg2, message)
doContinue = False
else:
doContinue = str(reaction.emoji) == '✅'
if doContinue:
async with message.channel.typing():
await message.channel.send("{} Trying to {} transfer{}, please wait...".format(str(reaction.emoji), cmdName, 's' if allOnly or len(torrents) > 1 else ''))
try:
if "pause" in cmd:
stop_torrents(torrents)
elif "resume" in cmd:
resume_torrents(torrents, start_all=("all" in cmd))
elif "verify" in cmd:
verify_torrents(torrents)
else:
remove_torrents(torrents,delete_files="delete" in cmd)
ops = ["pause","resume","remove","removedelete","pauseall","resumeall","verify"]
opNames = ["paused","resumed","removed","removed and deleted","paused","resumed","queued for verification"]
opEmoji = ["⏸","▶️","❌","🗑","⏸","▶️","🔬"]
ops = {i:j for i,j in zip(ops,opNames)}
opEmoji = {i:j for i,j in zip(ops,opEmoji)}
await message.channel.send("{} Transfer{} {}".format(str(reaction.emoji),'s' if allOnly or len(torrents) > 1 else '', ops[cmd]))
await message_clear_reactions(msg, message)
if msg2 is not None:
await message_clear_reactions(msg2, message)
return
except Exception as e:
await message.channel.send("⚠️ A problem occurred trying to modify transfer(s). You may need to try again... Sorry!".format(str(reaction.emoji), cmdName, 's' if allOnly or len(torrents) > 1 else ''))
logger.warning("Exception in t/modify running command '{}': {}".format(cmd,e))
else:
await message.channel.send("❌ Cancelled!")
await message_clear_reactions(msg, message)
if msg2 is not None:
await message_clear_reactions(msg2, message)
return
def check(reaction, user):
return user == message.author and reaction.message.id == msg.id and str(reaction.emoji) in opEmoji
try:
reaction, user = await client.wait_for('reaction_add', timeout=60, check=check)
except asyncio.TimeoutError:
await message_clear_reactions(msg, message)
return
else:
if str(reaction.emoji) == '📜':
await message_clear_reactions(msg, message)
await legend(message)
elif str(reaction.emoji) == formatEmoji:
await toggle_compact_out(message=message)
return await modify(message=message, content=content)
return
elif str(reaction.emoji) == '🚫':
await message_clear_reactions(msg, message)
await message.channel.send("❌ Cancelled!")
return
elif str(reaction.emoji) in opEmoji[:-1]:
cmds = {i:j for i,j in zip(opEmoji,ops)}
cmdNames = {i:j for i,j in zip(opEmoji,opNames)}
cmd = cmds[str(reaction.emoji)]
cmdName = cmdNames[str(reaction.emoji)]
msg2 = None
doContinue = True
if "remove" in cmds[str(reaction.emoji)]:
footerPrepend = ""
if CONFIG['private_transfers_protected'] and (not CONFIG['private_transfer_protection_bot_owner_override'] or message.author.id not in CONFIG['owner_user_ids']):
removeTorrents = [t for t in torrents if not t.isPrivate]
if len(removeTorrents) != len(torrents):
if CONFIG['private_transfer_protection_added_user_override']:
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if not t.isPrivate or ((t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id))]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) == 0:
await message.channel.send("🚫 I'm not allowed to remove private transfers unless they were added by you. If this isn't right, talk to an admin.")
await message_clear_reactions(msg, message)
return
else:
torrents = removeTorrents
footerPrepend = "(I'm not allowed to remove private transfers unless they were added by you, so this will only apply to those you added and the public ones)\n"
else:
if len(removeTorrents) == 0:
await message.channel.send("🚫 I'm not allowed to remove private transfers. If this isn't right, talk to an admin.")
await message_clear_reactions(msg, message)
return
else:
torrents = removeTorrents
if CONFIG['private_transfer_protection_bot_owner_override']:
footerPrepend = "(Only bot owners can remove private transfers, but I'll do the public ones)\n"
else:
footerPrepend = "(I'm not allowed to remove private transfers, but I'll do the public ones)\n"
if "delete" in cmds[str(reaction.emoji)] and not CONFIG['whitelist_user_can_delete'] and message.author.id not in CONFIG['owner_user_ids']:
# user may not be allowed to perform this operation. Check if they added any transfers, and whether the added_user_override is enabled.
if CONFIG['whitelist_added_user_remove_delete_override']:
# override is enabled, so reduce the list of torrents to be modified to those added by the user.
# first get transfers from TORRENT_JSON
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if (t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id)]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) > 0:
torrents = removeTorrents
footerPrepend = "(You can only remove and delete transfers added by you. Other transfers won't be affected.)\n"
else:
await message.channel.send("🚫 You can only remove and delete transfers added by you. If this isn't right, ask an admin to add you to the bot owner list.")
await message_clear_reactions(msg, message)
return
else:
# override not enabled, so user can't perform this operation
await message.channel.send("🚫 You're not allowed to remove and delete transfers. If this isn't right, ask an admin to add you to the bot owner list or to enable the override for transfers added by you.")
await message_clear_reactions(msg, message)
return
elif not CONFIG['whitelist_user_can_remove'] and message.author.id not in CONFIG['owner_user_ids']:
# user may not be allowed to perform this operation. Check if they added any transfers, and whether the added_user_override is enabled.
if CONFIG['whitelist_added_user_remove_delete_override']:
# override is enabled, so reduce the list of torrents to be modified to those added by the user.
# first get transfers from TORRENT_JSON
oldTorrents = load_json(path=TORRENT_JSON)
removeTorrents = [t for t in torrents if (t.hashString in oldTorrents and oldTorrents[t.hashString]['added_user'] == message.author.id) or (t.hashString in TORRENT_ADDED_USERS and TORRENT_ADDED_USERS[t.hashString] == message.author.id)]
if len(removeTorrents) != len(torrents):
if len(removeTorrents) > 0:
torrents = removeTorrents
footerPrepend = "(You can only remove transfers added by you. Other transfers won't be affected.)\n"
else:
await message.channel.send("🚫 You can only remove transfers added by you. If this isn't right, ask an admin to add you to the bot owner list.")
await message_clear_reactions(msg, message)
return
else:
# override not enabled, so user can't perform this operation
await message.channel.send("🚫 You're not allowed to remove transfers. If this isn't right, ask an admin to add you to the bot owner list or to enable the override for transfers added by you.")
await message_clear_reactions(msg, message)
return
embed=discord.Embed(title="Are you sure you wish to remove{} {} transfer{}?".format(' and DELETE' if 'delete' in cmds[str(reaction.emoji)] else '', len(torrents), '' if len(torrents)==1 else 's'),description="**This action is irreversible!**",color=0xb51a00)
embed.set_footer(text="react ✅ to continue or ❌ to cancel")
msg2 = await message.channel.send(embed=embed)
for i in ['✅','❌']:
await msg2.add_reaction(i)
def check1(reaction, user):
return user == message.author and reaction.message.id == msg2.id and str(reaction.emoji) in ['✅','❌']
try:
reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check1)
except asyncio.TimeoutError:
await message_clear_reactions(msg, message)
await message_clear_reactions(msg2, message)
doContinue = False
else:
doContinue = str(reaction.emoji) == '✅'
if doContinue:
async with message.channel.typing():
await message.channel.send("{} Trying to {} transfer{}, please wait...".format(str(reaction.emoji), cmdName, 's' if allOnly or len(torrents) > 1 else ''))
try:
if "pause" in cmd:
stop_torrents(torrents)
elif "resume" in cmd:
resume_torrents(torrents, start_all=("all" in cmd))
elif "verify" in cmd:
verify_torrents(torrents)
else:
remove_torrents(torrents,delete_files="delete" in cmd)
ops = ["pause","resume","remove","removedelete","pauseall","resumeall","verify"]
opNames = ["paused","resumed","removed","removed and deleted","paused","resumed","queued for verification"]
opEmoji = ["⏸","▶️","❌","🗑","⏸","▶️","🔬"]
ops = {i:j for i,j in zip(ops,opNames)}
opEmoji = {i:j for i,j in zip(ops,opEmoji)}
await message.channel.send("{} Transfer{} {}".format(str(reaction.emoji),'s' if allOnly or len(torrents) > 1 else '', ops[cmd]))
await message_clear_reactions(msg, message)
if msg2 is not None:
await message_clear_reactions(msg2, message)
return
except Exception as e:
await message.channel.send("⚠️ A problem occurred trying to modify transfer(s). You may need to try again... Sorry!".format(str(reaction.emoji), cmdName, 's' if allOnly or len(torrents) > 1 else ''))
logger.warning("Exception in t/modify running command '{}': {}".format(cmd,e))
else:
await message.channel.send("❌ Cancelled!")
await message_clear_reactions(msg, message)
if msg2 is not None:
await message_clear_reactions(msg2, message)
return
await message_clear_reactions(msg, message)
@client.command(name='modify', aliases=['m'], pass_context=True)
async def modify_cmd(context, *, content=""):
try:
await modify(context.message, content=content)
except Exception as e:
logger.warning("Exception in t/modify: {}".format(e))
async def toggle_compact_out(message, content=""):
global OUTPUT_MODE, CONFIG
if isDM(message):
if message.author.id in CONFIG['DM_compact_output_user_ids']:
del CONFIG['DM_compact_output_user_ids'][CONFIG['DM_compact_output_user_ids'].index(message.author.id)]
await message.channel.send('🖥 DMs switched to desktop output')
else:
CONFIG['DM_compact_output_user_ids'].append(message.author.id)
await message.channel.send('📱 DMs switched to mobile output')
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
elif OUTPUT_MODE == OutputMode.AUTO:
if message.author.is_on_mobile():
OUTPUT_MODE = OutputMode.DESKTOP
await message.channel.send('🖥 Switched to desktop output')
else:
OUTPUT_MODE = OutputMode.MOBILE
await message.channel.send('📱 Switched to mobile output')
else:
OUTPUT_MODE = OutputMode.AUTO
await message.channel.send("🧠 Switched to smart selection of output (for you, {})".format('📱 mobile' if message.author.is_on_mobile() else '🖥 desktop'))
return
@client.command(name='compact', aliases=['c'], pass_context=True)
async def toggle_compact_out_cmd(context):
if await CommandPrecheck(context.message):
await toggle_compact_out(context.message)
async def LegendGetEmbed(embed_data=None):
isCompact = False #compact_output
joinChar = ',' if isCompact else '\n'
if embed_data:
embed = discord.Embed.from_dict(embed_data)
embed.add_field(name='Legend', value='', inline=False)
else:
embed = discord.Embed(title='Legend', color=0xb51a00)
embed.add_field(name="Status 🔍", value=joinChar.join(["🔻—downloading","🌱—seeding","⏸—paused","🔬—verifying","🚧—queued","🏁—finished","↕️—any"]), inline=not isCompact)
embed.add_field(name="Metrics 📊", value=joinChar.join(["⬇️—download rate","⬆️—upload rate","⏬—total downloaded","⏫—total uploaded","⚖️—seed ratio","⏳—ETA"]), inline=not isCompact)
embed.add_field(name="Modifications 🧰", value=joinChar.join(["⏸—pause","▶️—resume","❌—remove","🗑—remove and delete","🔬—verify"]), inline=not isCompact)
embed.add_field(name="Error ‼️", value=joinChar.join(["✅—none","⚠️—tracker warning","🌐—tracker error","🖥—local error"]), inline=not isCompact)
embed.add_field(name="Activity 📈", value=joinChar.join(["🐢—stalled","🐇—active","🚀—running (rate>0)"]), inline=not isCompact)
embed.add_field(name="Tracker 📡", value=joinChar.join(["🔐—private","🔓—public"]), inline=not isCompact)
embed.add_field(name="Messages 💬", value=joinChar.join(["🔄—auto-update message","❎—cancel auto-update","🖨—reprint at bottom", "📱 *or* 💻—switch output format to mobile/desktop", "🧾—summarize listed transfers"]), inline=not isCompact)
embed.add_field(name="Notifications 📣", value=joinChar.join(["🔔—enable","🔕—disable"]), inline=not isCompact)
return embed
async def legend(message, content=""):
if await CommandPrecheck(message):
await message.channel.send(embed=await LegendGetEmbed())
return
@client.command(name='legend', pass_context=True)
async def legend_cmd(context):
await legend(context.message)
# @client.command(name='test', pass_context=True)
# async def test(context):
# if context.message.author.is_on_mobile():
# await context.channel.send('on mobile')
# else:
# await context.channel.send('on desktop')
# return
async def purge(message):
def is_pinned(m):
return m.pinned
deleted = await message.channel.purge(limit=100, check=not is_pinned)
await message.channel.send('Deleted {} message(s)'.format(len(deleted)))
return
@client.command(name='purge', aliases=['p'], pass_context=True)
async def purge_cmd(context):
await purge(context.message)
async def set_repeat_freq(message, content=CONFIG['repeat_freq']):
global CONFIG
if isDM(message) and await CommandPrecheck(message):
try:
if content == "":
s = CONFIG['repeat_freq']
else:
s = int(content)
if s <= 0:
raise Exception("Integer <= 0 provided for repeat frequency")
CONFIG['repeat_freq_DM_by_user_ids'][message.author.id] = s
await message.channel.send('🔄 DM repeat frequency set to {}'.format(humantime(s,compact_output=False)))
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
except:
await message.channel.send('‼️ Error setting DM repeat frequency. Must be integer greater than zero (you provided {})'.format(content))
elif await CommandPrecheck(message, whitelist=CONFIG['owner_user_ids']):
try:
if content == "":
s = CONFIG['repeat_freq']
else:
s = int(content)
if s <= 0:
raise Exception("Integer <= 0 provided for repeat frequency")
CONFIG['repeat_freq'] = s
await message.channel.send('🔄 In-channel repeat frequency set to {}'.format(humantime(s,compact_output=False)))
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
except:
await message.channel.send('‼️ Error setting in-channel repeat frequency. Must be integer greater than zero (you provided {})'.format(content))
return
@client.command(name='set-repeat-freq', pass_context=True)
async def set_repeat_freq_cmd(context, content=""):
await set_repeat_freq(context.message, content.strip())
async def set_repeat_timeout(message, content=CONFIG['repeat_timeout']):
global CONFIG
if isDM(message) and await CommandPrecheck(message):
try:
if content == "":
s = CONFIG['repeat_timeout']
else:
s = int(content)
if s < 0:
raise Exception("Integer < 0 provided for repeat timeout")
CONFIG['repeat_timeout_DM_by_user_ids'][message.author.id] = s
await message.channel.send('🔄 DM repeat timeout set to {}'.format(humantime(s,compact_output=False) if s > 0 else 'unlimited'))
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
except:
await message.channel.send('‼️ Error setting DM repeat timeout. Must be integer greater than or equal to zero (you provided {})'.format(content))
elif await CommandPrecheck(message, whitelist=CONFIG['owner_user_ids']):
try:
if content == "":
s = CONFIG['repeat_timeout']
else:
s = int(content)
if s < 0:
raise Exception("Integer < 0 provided for repeat timeout")
CONFIG['repeat_timeout'] = s
await message.channel.send('🔄 In-channel repeat timeout set to {}'.format(humantime(s,compact_output=False) if s > 0 else 'unlimited'))
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
except:
await message.channel.send('‼️ Error setting DM repeat timeout. Must be integer greater than or equal to zero (you provided {})'.format(content))
return
@client.command(name='set-repeat-timeout', pass_context=True)
async def set_repeat_timeout_cmd(context, content=""):
await set_repeat_timeout(context.message, content.strip())
async def toggle_notifications(message, content=""):
global CONFIG
if isDM(message) and await CommandPrecheck(message):
if message.author.id in CONFIG['notification_DM_opt_out_user_ids']:
CONFIG['notification_DM_opt_out_user_ids'].remove(message.author.id)
await message.channel.send('🔔 DM notifications enabled')
else:
CONFIG['notification_DM_opt_out_user_ids'].append(message.author.id)
await message.channel.send('🔕 DM notifications disabled')
generate_json(json_data=CONFIG, path=CONFIG_JSON, overwrite=True)
elif await CommandPrecheck(message, whitelist=CONFIG['owner_user_ids']):
if CONFIG['notification_enabled_in_channel']:
CONFIG['notification_enabled_in_channel'] = False
await message.channel.send('🔕 In-channel notifications disabled')
else:
CONFIG['notification_enabled_in_channel'] = True
await message.channel.send('🔔 In-channel notifications enabled')
return
@client.command(name='notifications', aliases=['n'], pass_context=True)
async def toggle_notifications_cmd(context):
await toggle_notifications(context.message)
async def toggle_dryrun(message, content=""):
global CONFIG
CONFIG['dryrun'] = not CONFIG['dryrun']
await message.channel.send("Toggled dryrun to {}".format(CONFIG['dryrun']))
return
@client.command(name='dryrun', pass_context=True)
async def toggle_dryrun_cmd(context):
if await CommandPrecheck(context.message, whitelist=CONFIG['owner_user_ids']):
await toggle_dryrun(context.message)
@client.event
async def on_message(message):
if message.author.id == client.user.id:
return
if message_has_torrent_file(message):
await add(message, content=message.content)
if isDM(message): # dm only
contentLower = message.content.lower()
c = message.content
for k,v in dmCommands.items():
for ai in [k] + v['alias']:
a = ai
cl = contentLower
if len(ai) == 1:
a += ' '
if len(c) == 1:
cl += ' '
c += ' '
if len(cl) >= len(a) and a == cl[:len(a)]:
await v['cmd'](message, content=c[len(a):].strip())
return
await client.process_commands(message)
elif not message.guild: # group dm only
# do stuff here #
pass
else: # server text channel
await client.process_commands(message)
client.remove_command('help')
async def print_help(message, content="", compact_output=(OUTPUT_MODE == OutputMode.MOBILE)):
if await CommandPrecheck(message):
if content != "":
if content in ["l","list"]:
embed = discord.Embed(title='List transfers', color=0xb51a00)
embed.set_author(name="List current transfers with sorting, filtering, and search options", icon_url=CONFIG['logo_url'])
embed.add_field(name="Usage", value='`{0}list [--filter FILTER] [--sort SORT] [--tracker TRACKER] [-N NUM_RESULTS] [TORRENT_ID_SPECIFIER] [NAME]`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Filtering", value='`--filter FILTER` or `-f FILTER`\n`FILTER` is one of `{}`'.format(str(filter_names_full)), inline=False)
embed.add_field(name="Sorting", value='`--sort SORT` or `-s SORT`\n`SORT` is one of `{}`'.format(str(sort_names)), inline=False)
embed.add_field(name="Tracker", value='`--tracker TRACKER` or `-t TRACKER`\n`TRACKER` is a regular expression used to search transfer names (no enclosing quotes; may NOT contain spaces)', inline=False)
embed.add_field(name="Specify number of results to show", value='`-N NUM_RESULTS`\n`NUM_RESULTS` is an integer greater than 0', inline=False)
embed.add_field(name="By ID specifier", value='`TORRENT_ID_SPECIFIER` is a valid transfer ID specifier—*e.g.* `1,3-5,9` to specify transfers 1, 3, 4, 5, and 9\n*Transfer IDs are the left-most number in the list of transfers (use* `{0}list` *to print full list)*\n*Either TORRENT_ID_SPECIFIER or NAME can be specified, but not both*'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Searching by name", value='`NAME` is a regular expression used to search transfer names (no enclosing quotes; may contain spaces)', inline=False)
embed.add_field(name="Examples", value="*List all transfers:* `{0}list`\n*Search using phrase 'ubuntu':* `{0}l ubuntu`\n*List downloading transfers:* `{0}l -f downloading`\n*List 10 most recently added transfers (sort transfers by age and specify number):* `{0}list --sort age -N 10`".format(CONFIG['bot_prefix']), inline=False)
# await message.channel.send(embed=embed)
elif content in ["a","add"]:
embed = discord.Embed(title='Add transfer', description="If multiple torrents are added, separate them by spaces", color=0xb51a00)
embed.set_author(name="Add one or more specified torrents by magnet link, url to torrent file, or by attaching a torrent file", icon_url=CONFIG['logo_url'])
embed.add_field(name="Usage", value='`{0}add TORRENT_FILE_URL_OR_MAGNET_LINK ...`\n`{0}a TORRENT_FILE_URL_OR_MAGNET_LINK ...`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Notes", value='*You can add transfers by uploading a torrent file without having to type anything, i.e. no command necessary, just upload it to TransmissionBot\'s channel or via DM*', inline=False)
embed.add_field(name="Examples", value="*Add download of Linux Ubuntu using link to torrent file:* `{0}add https://releases.ubuntu.com/20.04/ubuntu-20.04.1-desktop-amd64.iso.torrent`\n*Add download of ubuntu using the actual `.torrent` file:* Select the `.torrent` file as an attachmend in Discord and send, no `{0}add` needed!".format(CONFIG['bot_prefix']), inline=False)
# await message.channel.send(embed=embed)
elif content in ["m","modify"]:
embed = discord.Embed(title='Modify existing transfer(s)', color=0xb51a00)
embed.set_author(name="Pause, resume, remove, or remove and delete specified transfer(s)", icon_url=CONFIG['logo_url'])
embed.add_field(name="Usage", value='`{0}modify [LIST_OPTIONS]`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Pause or resume ALL transfers", value="Simply run `{0}modify` to pause or resume all existing transfers".format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="By list options", value='`LIST_OPTIONS` is a valid set of options to the `{0}list` command (see `{0}help list` for details)'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Examples", value="`{0}modify`\n`{0}m ubuntu`\n`{0}m 23,34,36-42`\n`{0}m --filter downloading ubuntu`".format(CONFIG['bot_prefix']), inline=False)
# await message.channel.send(embed=embed)
elif content in ["s","summary"]:
embed = discord.Embed(title="Print summary of transfers", color=0xb51a00)
embed.set_author(name="Print summary of active transfer information", icon_url=CONFIG['logo_url'])
embed.add_field(name="Usage", value='`{0}summary [LIST_OPTIONS]`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="By list options", value='`LIST_OPTIONS` is a valid set of options to the `{0}list` command (see `{0}help list` for details)'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Examples", value="`{0}summary`\n`{0}s --filter private`\n`{0}s 23,34,36-42`\n`{0}s --filter downloading ubuntu`".format(CONFIG['bot_prefix']), inline=False)
# await message.channel.send(embed=embed)
elif content in ["config"]:
embed = discord.Embed(title="Configuration", color=0xb51a00)
embed.set_author(name="Configure bot options", icon_url=CONFIG['logo_url'])
embed.add_field(name='Toggle output style', value='*toggle between desktop (default), mobile (narrow), or smart selection of output style*\n*ex.* `{0}compact` or `{0}c`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name='Toggle notifications', value='*toggle notifications regarding transfer state changes to be checked every {1} (can be changed in config file)*\n*ex.* `{0}notifications` or `{0}n`'.format(CONFIG['bot_prefix'], humantime(CONFIG['notification_freq'],compact_output=False)), inline=False)
embed.add_field(name='Set auto-update message frequency and timeout', value='**Frequency:** *Use* `{0}set-repeat-freq NUM_SECONDS` *or* `{0}freq NUM_SECONDS`*to set the repeat frequency of auto-update messages (*`NUM_SECONDS`*must be greater than 0, leave blank to revert to default of {1})*\n**Timeout:** *Use* `{0}set-repeat-timeout NUM_SECONDS` *or* `{0}timeout NUM_SECONDS` *to set the amount of time an auto-repeat message will repeat until it quits automatically (times out) (*`NUM_SECONDS` *must be greater or equal to 0. Set to 0 for no timeout. Leave blank to revert to default of {2})*'.format(CONFIG['bot_prefix'], humantime(CONFIG['repeat_freq'],compact_output=False),humantime(CONFIG['repeat_timeout'],compact_output=False)), inline=False)
# await message.channel.send(embed=embed)
else:
embed = discord.Embed(title='List of commands:', description='Send commands in-channel or directly to me via DM.', color=0xb51a00)
embed.set_author(name='Transmission Bot: Manage torrent file transfers', icon_url=CONFIG['logo_url'])
embed.add_field(name="Add new torrent transfers `{0}add`".format(CONFIG['bot_prefix']), value="*add one or more specified torrents by magnet link, url to torrent file (in which case you don't need to use a command), or by attaching a torrent file*\n*ex.* `{0}add TORRENT ...` or `{0}a TORRENT ...`".format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Modify existing transfers `{0}modify`".format(CONFIG['bot_prefix']), value="*pause, resume, remove, or remove and delete specified transfers*\n*ex.* `{0}modify [LIST_OPTIONS]` or `{0}m [LIST_OPTIONS]`".format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="List torrent transfers `{0}list`".format(CONFIG['bot_prefix']), value="*list current transfers with sorting, filtering, and search options*\n*ex.* `{0}list [LIST_OPTIONS]` or `{0}l [LIST_OPTIONS]`".format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name="Print summary of transfers `{0}summary`".format(CONFIG['bot_prefix']), value="*print summary for specified transfers, with followup options to list subsets of those transfers*\n*ex.* `{0}summary [LIST_OPTIONS]` or `{0}s [LIST_OPTIONS]`".format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name='Show legend `{0}legend`'.format(CONFIG['bot_prefix']), value='*prints legend showing the meaning of symbols used in the output of other commands*\n*ex.* `{0}legend`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name='Help - Gives this menu `{0}help`'.format(CONFIG['bot_prefix']), value='*with optional details of specified command*\n*ex.* `{0}help` or `{0}help COMMAND`'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name='Configuration `{0}help config`'.format(CONFIG['bot_prefix']), value='*set frequency and timeout of auto-update messages, toggle notifications, and toggle output display style*\n*See* `{0}help config` *for more information*'.format(CONFIG['bot_prefix']), inline=False)
embed.add_field(name='Bot information `{0}info`'.format(CONFIG['bot_prefix']), value='*prints information pertaining to the physical server running the bot*', inline=False)
# if not compact_output:
# legendEmbed=await LegendGetEmbed()
# embed.add_field(name=legendEmbed.title, value='', inline=False)
# for f in legendEmbed.fields:
# embed.add_field(name=f.name, value=f.value, inline=f.inline)
if not isDM(message):
try:
await message.author.send(embed=embed)
await message.channel.send('Hi {}, I sent you a DM with the help information'.format(message.author.display_name))
except:
await message.channel.send(embed=embed)
else:
await message.channel.send(embed=embed)
@client.command(name='help', description='Help HUD.', brief='HELPOOOO!!!', pass_context=True)
async def help_cmd(context, *, content=""):
await print_help(context.message, content)
async def print_info(message, content=""):
import requests as req
from netifaces import interfaces, ifaddresses, AF_INET
async with message.channel.typing():
# get public IP address
# modified from MattMoony's gist: https://gist.github.com/MattMoony/80b05a48b1bcdc64df32f95ed269a393
try:
publicIP = req.get("https://wtfismyip.com/text").text.strip()
publicIP = "Public: " + publicIP
except Exception as e:
logger.error("Failed to get public IP address (from https://wtfismyip.com/text): {}".format(e))
publicIP = "Failed to resolve public IP (check logs)"
# get local addresses
# from DzinX's answer: https://stackoverflow.com/a/166591/2620767
try:
addresses = ['{}: {}'.format(ifaceName, i['addr']) for ifaceName in interfaces() for i in ifaddresses(ifaceName).setdefault(AF_INET, [{'addr':'No IP addr'}] )]
# addresses = ['{}: {}'.format(ifaceName, i['addr']) for ifaceName in interfaces() for i in ifaddresses(ifaceName).setdefault(AF_INET, [{'addr':'No IP addr'}] ) if i['addr'] != "No IP addr"]
except Exception as e:
logger.error("Failed to get local IP address: {}".format(e))
addresses = ["Failed to resolve local IPs (check logs)"]
addresses = '\n'.join([publicIP] + addresses)
# get Transmission client and session info
try:
session = TSCLIENT.session_stats()
trpcinfo = {
'alt_speed_limit_down': humanbytes(session.alt_speed_down*1024,d=1)+'/s',
'alt_speed_limit_enabled': session.alt_speed_enabled,
'alt_speed_limit_up': humanbytes(session.alt_speed_up*1024,d=1)+'/s',
'alt_speed_time_begin': timeofday(session.alt_speed_time_begin),
'alt_speed_time_day': session.alt_speed_time_day,
'alt_speed_time_enabled': session.alt_speed_time_enabled,
'alt_speed_time_end': timeofday(session.alt_speed_time_end),
'alt_speed_up': session.alt_speed_up,
'blocklist_enabled': session.blocklist_enabled,
'blocklist_size': session.blocklist_size,
'blocklist_url': session.blocklist_url,
'cache_size_mb': session.cache_size_mb,
'config_dir': session.config_dir,
'dht_enabled': session.dht_enabled,
'download_dir': session.download_dir,
'download_dir_free_space': session.download_dir_free_space,
'download_dir_free_space': humanbytes(session.download_dir_free_space,d=1),
'download_queue_enabled': session.download_queue_enabled,
'download_queue_size': session.download_queue_size,
'encryption': session.encryption,
'idle_seeding_limit': session.idle_seeding_limit,
'idle_seeding_limit_enabled': session.idle_seeding_limit_enabled,
'incomplete_dir': session.incomplete_dir,
'incomplete_dir_enabled': session.incomplete_dir_enabled,
'lpd_enabled': session.lpd_enabled,
'peer_limit_global': session.peer_limit_global,
'peer_limit_per_torrent': session.peer_limit_per_torrent,
'peer_port': session.peer_port,
'peer_port_random_on_start': session.peer_port_random_on_start,
'pex_enabled': session.pex_enabled,
'port_forwarding_enabled': session.port_forwarding_enabled,
'queue_stalled_enabled': session.queue_stalled_enabled,
'queue_stalled_minutes': session.queue_stalled_minutes,
'rename_partial_files': session.rename_partial_files,
'rpc_version': session.rpc_version,
'rpc_version_minimum': session.rpc_version_minimum,
'script_torrent_done_enabled': session.script_torrent_done_enabled,
'script_torrent_done_filename': session.script_torrent_done_filename,
'seedRatioLimit': session.seedRatioLimit,
'seedRatioLimited': session.seedRatioLimited,
'seed_queue_enabled': session.seed_queue_enabled,
'seed_queue_size': session.seed_queue_size,
'session_id': session.session_id if hasattr(session, 'session_id') else "N/A",
'speed_limit_down_enabled': session.speed_limit_down_enabled,
'speed_limit_down': humanbytes(session.speed_limit_down*1024,d=1)+'/s',
'speed_limit_up_enabled': session.speed_limit_up_enabled,
'speed_limit_up': humanbytes(session.speed_limit_up*1024,d=1)+'/s',
'start_added_torrents': session.start_added_torrents,
'trash_original_torrent_files': session.trash_original_torrent_files,
'utp_enabled': session.utp_enabled,
'version': session.version,
}
trpcStr = '\n'.join(["{}: {}{}{}".format(k,"'" if isinstance(v,str) else '', v, "'" if isinstance(v,str) else '') for k,v in trpcinfo.items()])
# get session statistics
try:
stats = ['\n'.join(["{}: {}{}{}".format(k,"'" if isinstance(v,str) else '', v, "'" if isinstance(v,str) else '') for k,v in {'downloaded': humanbytes(stat['downloadedBytes'],d=1), 'uploaded': humanbytes(stat['uploadedBytes'],d=1), 'files added': humancount(stat['filesAdded'],d=1), 'session count': stat['sessionCount'], 'uptime': humantime(stat['secondsActive'], compact_output=False)}.items()]) for stat in [session.current_stats,session.cumulative_stats]]
except Exception as e:
logger.error("Failed to get transmission session statistics: {}".format(e))
stats = ['Failed to get', 'Failed to get']
except Exception as e:
logger.error("Failed to get transmission (rpc) info: {}".format(e))
trpcStr = "Failed to get transmission (rpc) info (check logs)"
stats = ['Failed to get', 'Failed to get']
# TODO get discord.py info
# prepare output embed
embed = discord.Embed(title='TransmissionBot info', description="*All information pertains to the machine on which the bot is running...*\n\n" + "```python\n" + trpcStr + "\n```", color=0xb51a00)
embed.add_field(name="IP Addresses", value="```python\n" + addresses + "\n```", inline=True)
# embed.add_field(name="Transmission (rpc) info", value="```" + trpcStr + "```", inline=False)
embed.add_field(name="Current session stats", value="```python\n" + stats[0] + "\n```", inline=True)
embed.add_field(name="Cumulative session stats", value="```python\n" + stats[1] + "\n```", inline=True)
await message.channel.send(embed=embed)
return
@client.command(name='info', pass_context=True)
async def info_cmd(context, *, content=""):
if await CommandPrecheck(context.message, whitelist=CONFIG['owner_user_ids']):
await print_info(context.message)
@client.command(name='test', pass_context=True)
async def test(context, *, content=""):
if await CommandPrecheck(context.message, whitelist=CONFIG['owner_user_ids']):
user = context.message.author
await user.send("test message")
await context.message.channel.send("Hey {}, I sent you a message!".format(user.display_name))
pass
return
@client.event
async def on_command_error(context, error):
# if command has local error handler, return
if hasattr(context.command, 'on_error'):
return
# get the original exception
error = getattr(error, 'original', error)
if isinstance(error, commands.CommandNotFound):
return
if isinstance(error, commands.BotMissingPermissions):
missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in error.missing_perms]
if len(missing) > 2:
fmt = '{}, and {}'.format("**, **".join(missing[:-1]), missing[-1])
else:
fmt = ' and '.join(missing)
_message = 'I need the **{}** permission(s) to run this command.'.format(fmt)
await context.send(_message)
return
if isinstance(error, commands.DisabledCommand):
await context.send('This command has been disabled.')
return
if isinstance(error, commands.CommandOnCooldown):
await context.send("This command is on cooldown, please retry in {}s.".format(math.ceil(error.retry_after)))
return
if isinstance(error, commands.MissingPermissions):
missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in error.missing_perms]
if len(missing) > 2:
fmt = '{}, and {}'.format("**, **".join(missing[:-1]), missing[-1])
else:
await context.send(_message)
return
if isinstance(error, commands.UserInputError):
await context.send("Invalid input.")
await print_help(context)
return
if isinstance(error, commands.NoPrivateMessage):
try:
await context.author.send('This command cannot be used in direct messages.')
except discord.Forbidden:
pass
return
if isinstance(error, commands.CheckFailure):
await context.send("You do not have permission to use this command.")
return
# ignore all other exception types, but print them to stderr
print('Ignoring exception in command {}:'.format(context.command), file=sys.stderr)
# traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
if isinstance(error, commands.CommandOnCooldown):
try:
await context.message.delete()
except:
pass
embed = discord.Embed(title="Error!", description='This command is on a {:.2f}s cooldown'.format(error.retry_after), color=0xb51a00)
message = await context.message.channel.send(embed=embed)
await asyncio.sleep(5)
await message.delete()
elif isinstance(error, commands.CommandNotFound):
try:
await context.message.delete()
except:
pass
embed = discord.Embed(title="Error!", description="I don't know that command!", color=0xb51a00)
message = await context.message.channel.send(embed=embed)
await asyncio.sleep(2)
await help_cmd(context)
raise error
dmCommands = {
'summary': {'alias':['sum','s'], 'cmd':summary},
'list': {'alias':['ls','l'], 'cmd':list_transfers},
'legend': {'alias':[], 'cmd':legend},
'add': {'alias':['a'], 'cmd':add},
'modify': {'alias':['mod','m'], 'cmd':modify},
'help': {'alias':[], 'cmd':print_help},
'compact': {'alias':['c'], 'cmd':toggle_compact_out},
'notifications': {'alias':['n'], 'cmd':toggle_notifications},
'set-repeat-timeout': {'alias':['timeout'], 'cmd':set_repeat_timeout},
'set-repeat-freq': {'alias':['freq'], 'cmd':set_repeat_freq},
'info': {'alias':[], 'cmd':print_info}
}
client.run(CONFIG['bot_token'])
|
# Author: BigRabbit
# 下午3:10
from django.contrib.auth import get_user_model, authenticate
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
from django.contrib.auth.tokens import default_token_generator
from django.conf import settings
from django.utils.http import urlsafe_base64_decode
from django.utils.encoding import force_text
from rest_framework.authtoken.models import Token
from rest_framework import serializers, exceptions
from allauth.account import app_settings
from allauth.utils import get_username_max_length, email_address_exists
from allauth.account.adapter import get_adapter
from allauth.account.utils import setup_user_email
UserModel = get_user_model()
TokenModel = Token
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = UserModel
fields = ('pk', 'username', 'email', 'first_name', 'last_name')
read_only_fields = ('email',)
class TokenSerializer(serializers.ModelSerializer):
class Meta:
model = TokenModel
fields = ('key',)
class RegisterSerializer(serializers.Serializer):
"""
验证用户输入的数据,创建新用户
"""
username = serializers.CharField(
max_length=get_username_max_length(),
min_length=app_settings.USERNAME_MIN_LENGTH,
required=app_settings.USERNAME_REQUIRED
)
email = serializers.EmailField(required=app_settings.EMAIL_REQUIRED)
password1 = serializers.CharField(write_only=True)
password2 = serializers.CharField(write_only=True)
cleaned_data = {}
def validate_username(self, username):
username = get_adapter().clean_username(username)
return username
def validate_email(self, email):
email = get_adapter().clean_email(email)
if app_settings.UNIQUE_EMAIL:
if email and email_address_exists(email):
msg = {"email_error": "This email address has been registered!"}
raise serializers.ValidationError(msg)
return email
def validate_password1(self, password):
password = get_adapter().clean_password(password)
return password
def validate_password_equal(self, data):
if data['password1'] != data['password2']:
msg = {"password_error": "The password entered twice is inconsistent!"}
raise serializers.ValidationError(msg)
return data
def get_cleaned_data(self):
result = {
'username': self.validated_data.get('username', ''),
'email': self.validated_data.get('email', ''),
'password1': self.validated_data.get('password1', '')
}
return result
def save(self, request):
adapter = get_adapter()
user = adapter.new_user(request)
self.cleaned_data = self.get_cleaned_data()
adapter.save_user(request, user, self)
setup_user_email(request, user, [])
return user
class VerifyEmailSerializer(serializers.Serializer):
key = serializers.CharField()
class LoginSerializer(serializers.Serializer):
username = serializers.CharField(required=True, allow_blank=False)
email = serializers.EmailField(required=True, allow_blank=False)
password = serializers.CharField(style={'input_type': 'password'})
def _validate_username(self, username, password):
"""
通过username与password进行身份认证
:param username: 账号名称
:param password: 账号密码
:return: User object
"""
if username and password:
user = authenticate(username=username, password=password)
else:
msg = {"identity_error": "Must have 'username' and 'password'."}
raise exceptions.ValidationError(msg)
return user
def _validate_email(self, email, password):
"""
通过email与password进行身份认证
:param email: 账号邮箱
:param password: 账号密码
:return: User object
"""
if email and password:
user = authenticate(email=email, password=password)
else:
msg = {"identity_error": "Must have 'email' and 'password'."}
raise exceptions.ValidationError(msg)
return user
def _validate_email_username(self, username, email, password):
"""
通过以上两种认证方式中的任意一种进行身份认证
:param username: 账号名称
:param email: 账号邮箱
:param password: 账号密码
:return: User object
"""
if username and password:
user = authenticate(username=username, password=password)
elif email and password:
user = authenticate(email=email, password=password)
else:
msg = {"identity_error": "Must have 'username' and 'password' or 'email' and 'password'."}
raise exceptions.ValidationError(msg)
return user
def validate(self, attrs):
username = attrs.get('username')
email = attrs.get('email')
password = attrs.get('password')
# 通过username与password进行身份认证
if app_settings.AUTHENTICATION_METHOD == \
app_settings.AuthenticationMethod.USERNAME:
user = self._validate_username(username=username, password=password)
# 通过email与password进行身份认证
elif app_settings.AUTHENTICATION_METHOD == \
app_settings.AuthenticationMethod.EMAIL:
user = self._validate_email(email=email, password=password)
# 通过以上两种认证方式中的任意一种进行身份认证
else:
user = self._validate_email_username(username=username, email=email, password=password)
# 判断用户是否已经激活
if user:
if not user.is_active:
msg = {"account_error": "This account is not available."}
raise exceptions.ValidationError(msg)
else:
msg = {"identity_error": "This identity information cannot be logged in."}
raise exceptions.ValidationError(msg)
# 判断邮箱是否已经验证
if app_settings.EMAIL_VERIFICATION == app_settings.EmailVerificationMethod.MANDATORY:
email_address = user.emailaddress_set.get(email=user.email)
if not email_address.verified:
msg = {"email_error": "This email address is not verified."}
raise serializers.ValidationError(msg)
attrs['user'] = user
return attrs
class PasswordResetSerializer(serializers.Serializer):
"""
邮件重置密码
用户请求密码重置邮件,验证数据后发送重置邮件
"""
email = serializers.EmailField()
password_reset_form_class = PasswordResetForm
def set_email_options(self):
"""
密码重置邮件的配置(可选)
如果不配置,则使用 PasswordResetForm 的默认配置,
具体配置请参阅django/contrib/auth/forms.py中的PasswordResetForm的.save()方法
"""
options = {}
return options
def validate_email(self, value):
"""
创建密码重置的form
"""
self.reset_form = self.password_reset_form_class(data=self.initial_data)
if not self.reset_form.is_valid():
raise serializers.ValidationError(self.reset_form.errors)
return value
def save(self):
"""
配置email,发送密码重置邮件
"""
request = self.context.get('request')
options = {
'use_https': request.is_secure(),
'from_email': getattr(settings, 'DEFAULT_EMAIL_FROM'),
'request': request
}
options.update(self.set_email_options())
self.reset_form.save(**options)
class PasswordResetConfirmSerializer(serializers.Serializer):
"""
邮件重置密码
验证用户信息,保存新密码
"""
new_password1 = serializers.CharField(max_length=128)
new_password2 = serializers.CharField(max_length=128)
uid = serializers.CharField()
token = serializers.CharField()
set_password_form_class = SetPasswordForm
_errors = {}
def validate(self, attrs):
try:
uid = force_text(urlsafe_base64_decode(attrs['uid']))
self.user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
raise exceptions.ValidationError({"value_error": "Uid is invalid"})
self.set_password_form = SetPasswordForm(user=self.user, data=attrs)
if not self.set_password_form.is_valid():
raise serializers.ValidationError(self.set_password_form.errors)
if not default_token_generator.check_token(self.user, attrs['token']):
raise exceptions.ValidationError({"value_error": "Token is invalid"})
return attrs
def save(self):
return self.set_password_form.save()
class PasswordChangeSerializer(serializers.Serializer):
"""
页面重置密码
验证旧密码,确定身份合法性,保存新密码
"""
old_password = serializers.CharField(max_length=128)
new_password1 = serializers.CharField(max_length=128)
new_password2 = serializers.CharField(max_length=128)
set_password_form_class = SetPasswordForm
def __init__(self, *args, **kwargs):
self.logout_on_password_change = getattr(settings, 'LOGOUT_ON_PASSWORD_CHANGE', False)
super(PasswordChangeSerializer, self).__init__(*args, **kwargs)
self.fields.pop('old_password')
self.request = self.context.get('request')
self.user = getattr(self.request, 'user', None)
def validate_old_password(self, value):
"""
验证旧密码,确定用户身份合法性
"""
if not self.user.check_possword(value):
raise serializers.ValidationError(
{"password_error": "Your old password was entered incorrectly"}
)
return value
def validate(self, attrs):
self.set_password_form = self.set_password_form_class(user=self.user, data=attrs)
if not self.set_password_form.is_valid():
raise serializers.ValidationError(self.set_password_form.errors)
return attrs
def save(self):
self.set_password_form.save()
# 修改密码后是否需要用户重新登录
# 如果需要则设为'True',不需要则设为'False'
if not self.logout_on_password_change:
# 更新会话
from django.contrib.auth import update_session_auth_hash
update_session_auth_hash(self.request, self.user)
|
# coding=utf-8
import pytest
from pytorch_transformers import AutoTokenizer
from neural_wsd.text.transformers import asciify
from neural_wsd.text.transformers import BasicTextTransformer
from neural_wsd.text.transformers import PaddingTransformer
from neural_wsd.text.transformers import WordPieceListTransformer
def test_lowercase_op():
t = BasicTextTransformer(name="text-prepocess", to_lowercase=True)
assert t.transform(["Here some Text", "And More"])[0] == ["here some text", "and more"]
def test_padding_op_correct():
t = PaddingTransformer(name="padding-op", max_seq_len=5)
truth = [[1, 2, 3, 0, 0], [1, 0, 5, 4, 0]]
output, _ = t.transform([[1, 2, 3], [1, 0, 5, 4]])
assert truth == output.tolist()
assert output.shape == (2, 5)
def test_asciify_correct():
# Todo (kerem, osman): add more text cases, especially for English.
assert asciify("Ślusàrski") == "Slusarski"
assert asciify("kierowców") == "kierowcow"
assert (
asciify("Sıfır noktasındayız. Olayın şerefine bir konuşma yapacak mısın?") == "Sifir "
"noktasindayiz. Olayin serefine bir konusma yapacak misin?"
)
assert (
asciify("Here is some text that shouldn't be changed.") == "Here is some text that "
"shouldn't be changed."
)
@pytest.mark.parametrize("base_model", [("roberta-base")])
def test_wordpiece_to_token_correct(base_model):
t = WordPieceListTransformer(name="wordpiece-to-token", base_model=base_model)
tokenizer = AutoTokenizer.from_pretrained(base_model)
# Long text
sentences = [
"Some strange text sssasd sdafds dfv vc a more strange",
"Short sentence",
"OneToken",
"",
]
encoded_ids = [tokenizer.encode(sentence) for sentence in sentences]
_, context = t.transform(encoded_ids)
t = context["wordpiece_to_token_list"]
assert [
(1,),
(2,),
(3,),
(4, 5, 6, 7),
(8, 9, 10),
(11, 12),
(13, 14),
(15,),
(16,),
(17,),
] == t[0]
assert [(1,), (2,)] == t[1]
assert [(1, 2)] == t[2]
assert [] == t[3]
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Batctl(MakefilePackage):
"""B.A.T.M.A.N. advanced control and management tool"""
homepage = "https://github.com/open-mesh-mirror/batctl"
url = "https://github.com/open-mesh-mirror/batctl/archive/v2019.5.tar.gz"
version('2019.5', sha256='ffe5857a33068ec174140c154610d76d833524d840a2fc2d1a15e16686213cad')
version('2019.4', sha256='a3564eb9727335352dc0cfa2f2b29474c2c837384689ac5fcb387784a56e7685')
version('2019.3', sha256='2bd93fa14925a8dc63a67e64266c8ccd2fa3ac44b10253d93e6f8a630350070c')
version('2019.2', sha256='fb656208ff7d4cd8b1b422f60c9e6d8747302a347cbf6c199d7afa9b80f80ea3')
depends_on('libnl')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('batctl', prefix.bin)
|
import json
import time
import random
import numpy as np
from umap.umap_ import UMAP
from typing import Any, List
from sklearn.utils import shuffle
from matplotlib import pyplot as plt
from matplotlib.colors import ListedColormap
from wsl.locations import wsl_model_dir, known_extensions
from wsl.loaders.img_loaders import Loader
import torch
from torch.utils.data import DataLoader
colour_list = ['lightsalmon', 'orangered', 'indianred', 'brown', 'palegreen', 'darkseagreen',
'greenyellow', 'darkolivegreen', 'lightskyblue', 'deepskyblue', 'cyan', 'dodgerblue']
def plot(features, labels, classes, path):
print(features.shape, labels.shape)
features, labels = shuffle(features, labels)
print('Plotting UMAP...', end='')
features = features.reshape(features.shape[0], -1)
embedding = UMAP(n_neighbors=20, min_dist=1, metric='correlation', random_state=1, transform_seed=1).fit_transform(features)
colours = ListedColormap(colour_list[:len(classes)])
scatter = plt.scatter(embedding[:, 0], embedding[:, 1], c=labels, s=3, alpha=1, cmap='plasma')
plt.legend(handles=scatter.legend_elements()[0], labels=classes, loc='best',ncol=1, fontsize=6)
plt.savefig(str(path / 'umap.png'), dpi=300)
plt.close()
print('done.')
def main(debug: bool = True, model: str = 'rsna_pneumonia_lr0.0001_bs32_adam_densenet121_wildcat_maps1_alpha0.05_flaneur',
datasets: Any = ['cancer_mgh', 'cancer_dmist2', 'cancer_dmist3', 'cancer_dmist4']):
path = wsl_model_dir / model
print(f'Model: {path}')
assert path.exists()
if (path / 'configs.json').exists(): # Model not completed
with open(path / 'configs.json') as f:
configs = json.load(f)
# print(configs)
else:
print('Incomplete model')
return
checkpoint = torch.load(path / 'best.pt', map_location='cuda:0' if torch.cuda.is_available() else 'cpu')
checkpoint['model'].eval()
features = {}
print(checkpoint['model'].module._modules.keys())
if configs['wildcat']:
layer_name = 'classifier'
else:
layer_name = 'pool'
def hook(layer, inp, out):
if layer_name not in features:
features[layer_name] = out.detach().data.view(out.size(0), -1)
else:
features[layer_name] = torch.cat((features[layer_name], out.detach().data.view(out.size(0), -1)), dim=0)
handles = checkpoint['model'].module._modules[layer_name].register_forward_hook(hook)
dataset_classes = []
for dataset_id, dataset in enumerate(datasets):
loader = Loader(data=dataset,
split='valid',
extension=known_extensions[dataset],
length=500)
dataloader = DataLoader( # type: ignore
loader, batch_size=configs['batchsize'], num_workers=4,
pin_memory=True, shuffle=True)
print(f'Length of {dataset}: {len(loader.names)}')
dataset_classes += [dataset_id] * len(loader)
start = time.time()
with torch.set_grad_enabled(False):
for idx, data in enumerate(dataloader):
imgs = data[1].cuda().float()
_ = checkpoint['model'](imgs)
speed = configs['batchsize'] * idx // (time.time() - start)
print('Iter:', idx, 'Speed:', int(speed), 'img/s', end='\r', flush=True)
print('Total time:', time.time() - start, 'secs')
plot(features[layer_name].cpu().detach().numpy(), np.asarray(dataset_classes), datasets, path)
if __name__ == '__main__':
main()
|
from model.contact import Contact
test_data = [
Contact(firstname="Nikolay", middlename="Vassilievich", lastname="Gogol", nickname="writer", title="Title",
company="Writer Union", address="Dikanka, 8 - 13", homephone="+31278963215", mobilephone="8(921)4567893", workphone="84958963214",
faxphone="89994445566", email="gogol@pochta.com", email2="gogol2@pochta.com", email3="gogol3@pochta.com",
homepage="http://www.gogol.com", bday="1", bmonth="April", byear="1809",
aday="21", amonth="July", ayear="1829", address2="Moscow, Night st., 8 - 77", secondaryphone="89996132578"),
Contact(firstname="Alexander", middlename="Sergeeevich", lastname="Pushkin", nickname="poet", title="Title1",
company="Poet Union", address="Boldino, Central st., 1 -15", homephone="+31278963215", mobilephone="8(921)4567893", workphone="84958963214",
faxphone="89994445566", email="pushkin@pochta.com", email2="pushkin2@pochta.com", email3="pushkin3@pochta.com",
homepage="http://www.pushkin.com", bday="6", bmonth="June", byear="1799",
aday="21", amonth="July", ayear="1820", address2="Moscow, Goncharova st., 8 - 77", secondaryphone="89996132578")
]
|
from qiskit import *
# from qiskit.visualization import plot_histogram
# import matplotlib
class Simple:
def __init__(self):
self.backend = None
def auth(self):
# self.backend = Aer.get_backend('statevector_simulator')
self.backend = Aer.get_backend('aer_simulator')
def run(self: 'Qiskit', circuit: str) -> str:
qc = QuantumCircuit.from_qasm_str(circuit)
print(qc) # TODO only on debug
job = self.backend.run(qc, shots=1024, memory=True)
output = job.result().get_counts(0)
# TODO plot
# plot_histogram(output)
# matplotlib.pyplot.show(block=True) # To show the graph
return output
|
#!/usr/bin/env python3
import os
import numpy as np
import astropy.io.fits as fits
from stella.catalog.base import _str_to_float
inputfile = os.path.join(os.getenv('ASTRO_DATA'), 'catalog/I/239/hip_main.dat')
types = [
('HIP', np.int32),
('RAdeg', np.float64),
('DEdeg', np.float64),
('Vmag', np.float32),
('Plx', np.float32),
('e_Plx', np.float32),
('pmRA', np.float32),
('pmDE', np.float32),
('e_pmRA', np.float32),
('e_pmDE', np.float32),
('BTmag', np.float32),
('e_BTmag',np.float32),
('VTmag', np.float32),
('e_VTmag',np.float32),
('B-V', np.float32),
('e_B-V', np.float32),
('r_B-V', 'S1'),
('V-I', np.float32),
('e_V-I', np.float32),
('r_V-I', 'S1'),
('Hpmag', np.float32),
('e_Hpmag',np.float32),
('Hpscat', np.float32),
('o_Hpmag',np.int16),
#('CCDM', 'S10'),
#('HD', np.int32),
#('BD', 'S10'),
#('CoD', 'S10'),
('SpType', 'S12'),
('r_SpType','S1'),
]
tmp = list(zip(*types))
record = np.dtype({'names':tmp[0],'formats':tmp[1]})
fill_item = np.array((0, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,
np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,
np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,
'', np.NaN, np.NaN, '', np.NaN, np.NaN, np.NaN,
-32768, '',''),dtype=record)
data = {}
infile = open(inputfile)
for row in infile:
hip = int(row[8:14])
rah = int(row[17:19])
ram = int(row[20:22])
ras = float(row[23:28])
radeg = (rah + ram/60. + ras/3600.)*15.
ded = abs(int(row[30:32]))
dem = int(row[33:35])
des = float(row[36:40])
dedeg = ded + dem/60. + des/3600.
if row[29]=='-':
dedeg = -dedeg
vmag = _str_to_float(row[41:46], np.NaN)
plx = _str_to_float(row[79:86], np.NaN)
e_plx = _str_to_float(row[119:125], np.NaN)
pmRA = _str_to_float(row[87:95], np.NaN)
pmDE = _str_to_float(row[96:104], np.NaN)
e_pmRA = _str_to_float(row[126:132], np.NaN)
e_pmDE = _str_to_float(row[133:139], np.NaN)
BTmag = _str_to_float(row[217:223], np.NaN)
VTmag = _str_to_float(row[230:236], np.NaN)
e_BTmag = _str_to_float(row[224:229], np.NaN)
e_VTmag = _str_to_float(row[237:242], np.NaN)
BV = _str_to_float(row[245:251], np.NaN)
e_BV = _str_to_float(row[252:257], np.NaN)
r_BV = row[258].strip()
VI = _str_to_float(row[260:264], np.NaN)
e_VI = _str_to_float(row[265:269], np.NaN)
r_VI = row[270].strip()
Hpmag = _str_to_float(row[274:281], np.NaN)
e_Hpmag = _str_to_float(row[282:288], np.NaN)
Hpscat = _str_to_float(row[289:294], np.NaN)
if row[295:298].strip()=='':
o_Hpmag = 0
else:
o_Hpmag = int(row[295:298])
if not np.isnan(pmRA):
pm_ra = pmRA*1e-3/3600. # convert pm_RA from mas/yr to degree/yr
#radeg += (2000.0-1991.25)*pm_ra/math.cos(dedeg/180.*math.pi)
if not np.isnan(pmDE):
pm_de = pmDE*1e-3/3600. # convert pm_Dec from mas/yr to degree/yr
#dedeg += (2000.0-1991.25)*pm_de
SpType = row[435:447].strip()
r_SpType = row[448].strip()
item = np.array((hip, radeg, dedeg, vmag, plx, e_plx,
pmRA, pmDE, e_pmRA, e_pmDE,
BTmag, e_BTmag, VTmag, e_VTmag,
BV, e_BV, r_BV, VI, e_VI, r_VI,
Hpmag, e_Hpmag, Hpscat, o_Hpmag,
#CCDM, HD, BD, CoD,
SpType, r_SpType,
),dtype=record)
if hip in data:
print('Error: Duplicate Records for HIP', hip)
data[hip] = item
infile.close()
newdata = []
for hip in range(1, max(data.keys())+1):
if hip in data:
newdata.append(data[hip])
else:
newdata.append(fill_item)
newdata = np.array(newdata, dtype=record)
pri_hdu = fits.PrimaryHDU()
tbl_hdu = fits.BinTableHDU(newdata)
hdu_lst = fits.HDUList([pri_hdu,tbl_hdu])
outputfile='HIP.fits'
if os.path.exists(outputfile):
os.remove(outputfile)
hdu_lst.writeto(outputfile)
|
__author__ = 'raymond301'
import itertools, glob, os
from config import DATA_ARCHIVE_ROOT, logger as logging
from .kgenomes_parser import load_data
import biothings.hub.dataload.uploader as uploader
from hub.dataload.uploader import SnpeffPostUpdateUploader
class KgenomesBaseUploader(uploader.IgnoreDuplicatedSourceUploader,
uploader.ParallelizedSourceUploader,
SnpeffPostUpdateUploader):
def get_pinfo(self):
pinfo = super(KgenomesBaseUploader,self).get_pinfo()
# clinvar parser has some memory requirements, ~1.5G
logging.debug( pinfo )
pinfo.setdefault("__reqs__",{})["mem"] = 12 * (1024**3)
#logging.debug( pinfo.setdefault("__reqs__",{})["mem"] )
return pinfo
def jobs(self):
files = glob.glob(os.path.join(self.data_folder,self.__class__.GLOB_PATTERN))
logging.debug(os.path.join(self.data_folder,self.__class__.GLOB_PATTERN))
if len(files) != 1:
raise uploader.ResourceError("Expected 1 files, got: %s" % files)
chrom_list = [str(i) for i in range(1, 23)] + ['X', 'Y', 'MT']
return list(itertools.product(files,chrom_list))
def load_data(self,input_file,chrom):
self.logger.info("Load data from '%s' for chr %s" % (input_file,chrom))
return load_data(self.__class__.__metadata__["assembly"],input_file,chrom)
def post_update_data(self, *args, **kwargs):
super(KgenomesBaseUploader,self).post_update_data(*args,**kwargs)
#self.logger.info("Indexing 'rsid'")
# background=true or it'll lock the whole database...
#self.collection.create_index("dbsnp.rsid",background=True)
@classmethod
def get_mapping(klass):
mapping = {
"kgenomes" : {
"properties": {
"chrom": {
"type": "text",
"analyzer": "string_lowercase"
},
"pos": {
"type": "long"
},
"ref": {
"type": "text",
"analyzer": "string_lowercase"
},
"filter": {
"type": "text",
"analyzer": "string_lowercase"
},
"alt": {
"type": "text",
"analyzer": "string_lowercase"
},
"multi-allelic": {
"type": "text",
"analyzer": "string_lowercase"
},
"alleles": {
"type": "text",
"analyzer": "string_lowercase"
},
"type": {
"type": "text",
"analyzer": "string_lowercase"
},
"qual": {
"type": "float"
},
"filter": {
"type": "text",
"analyzer": "string_lowercase"
},
"ac": {
"type": "integer"
},
"an": {
"type": "integer"
},
"af": {
"properties": {
"af": {
"type": "float"
},
"af_afr": {
"type": "float"
},
"af_amr": {
"type": "float"
},
"af_eas": {
"type": "float"
},
"af_eur": {
"type": "float"
},
"af_sas": {
"type": "float"
}
}
},
"dp": {
"type": "integer"
},
"vt": {
"type": "text",
"analyzer": "string_uppercase"
},
"num_samples": {
"type": "integer"
},
"dp": {
"type": "long"
},
"exon_flag": {
"type": "boolean"
},
"aa": {
"properties": {
"ancestral_allele": {
"type": "text",
"analyzer": "string_lowercase"
},
"ancestral_indel_type": {
"type": "text",
"analyzer": "string_lowercase"
}
}
}
}
},
}
return mapping
class KgenomesUploader(KgenomesBaseUploader):
name = "kgenomes"
main_source= "kgenomes"
__metadata__ = {"mapper" : 'observed',
"assembly" : "hg19",
"src_meta" : {
"url" : "http://www.internationalgenome.org/about",
"license" : "Creative Commons",
"license_url" : "http://www.internationalgenome.org/announcements/data-management-and-community-access-paper-published-2012-04-29/",
"license_url_short": "nnn"
}
}
GLOB_PATTERN = "*.genotypes.vcf"
|
from django.views.decorators.csrf import csrf_exempt
from django.template import RequestContext, loader
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.shortcuts import render_to_response, get_object_or_404
from ldap_auth.toolbox import get_user
from django.core.exceptions import PermissionDenied
from django.views.decorators.http import require_POST, require_GET
import json
from models import *
from utils import ajax_request
@csrf_exempt
@require_POST
@ajax_request
def push_log_entries(request, server):
data = request.POST.get("data", "")
success_count = 0
for item in json.loads(data):
item = Log(server_name=server, **item)
try:
item.save()
success_count += 1
except:
pass
return {"status": "ok", "success": True, "count": success_count}
@require_GET
@ajax_request
def get_last_timestamp(request, server):
last_item = Log.objects.latest("timestamp")
return {"server": server, "timestamp": str(last_item)}
|
class Solution:
def reverseBits(self, n):
'''
input: n, an integer
return: an integer
'''
n = format(n, '032b')
n = list(str(n))
n = n[::-1]
n = int(''.join(map(str,n)))
n = str(n)
n = int(n, 2)
return n
|
"""
Support for paramiko remote objects.
"""
from . import run
from .opsys import OS
import connection
from teuthology import misc
import time
import re
import logging
from cStringIO import StringIO
from teuthology import lockstatus as ls
import os
import pwd
import tempfile
import netaddr
import console
log = logging.getLogger(__name__)
class Remote(object):
"""
A connection to a remote host.
This is a higher-level wrapper around Paramiko's `SSHClient`.
"""
# for unit tests to hook into
_runner = staticmethod(run.run)
def __init__(self, name, ssh=None, shortname=None, console=None,
host_key=None, keep_alive=True):
self.name = name
if '@' in name:
(self.user, hostname) = name.split('@')
# Temporary workaround for 'hostname --fqdn' not working on some
# machines
self._hostname = hostname
else:
# os.getlogin() doesn't work on non-login shells. The following
# should work on any unix system
self.user = pwd.getpwuid(os.getuid()).pw_name
hostname = name
self._shortname = shortname or hostname.split('.')[0]
self._host_key = host_key
self.keep_alive = keep_alive
self._console = console
self.ssh = ssh
def connect(self, timeout=None):
args = dict(user_at_host=self.name, host_key=self._host_key,
keep_alive=self.keep_alive)
if timeout:
args['timeout'] = timeout
self.ssh = connection.connect(**args)
return self.ssh
def reconnect(self, timeout=None):
"""
Attempts to re-establish connection. Returns True for success; False
for failure.
"""
if self.ssh is not None:
self.ssh.close()
if not timeout:
return self._reconnect(timeout=timeout)
start_time = time.time()
elapsed_time = lambda: time.time() - start_time
while elapsed_time() < timeout:
success = self._reconnect()
if success:
break
default_sleep_val = 30
# Don't let time_remaining be < 0
time_remaining = max(0, timeout - elapsed_time())
sleep_val = min(time_remaining, default_sleep_val)
time.sleep(sleep_val)
return success
def _reconnect(self, timeout=None):
try:
self.connect(timeout=timeout)
return self.is_online
except Exception as e:
log.debug(e)
return False
@property
def ip_address(self):
return self.ssh.get_transport().getpeername()[0]
@property
def interface(self):
"""
The interface used by the current SSH connection
"""
if not hasattr(self, '_interface'):
self._set_iface_and_cidr()
return self._interface
@property
def cidr(self):
"""
The network (in CIDR notation) used by the remote's SSH connection
"""
if not hasattr(self, '_cidr'):
self._set_iface_and_cidr()
return self._cidr
def _set_iface_and_cidr(self):
proc = self.run(
args=['PATH=/sbin:/usr/sbin', 'ip', 'addr', 'show'],
stdout=StringIO(),
)
proc.wait()
regexp = 'inet.? %s' % self.ip_address
proc.stdout.seek(0)
for line in proc.stdout.readlines():
line = line.strip()
if re.match(regexp, line):
items = line.split()
self._interface = items[-1]
self._cidr = str(netaddr.IPNetwork(items[1]).cidr)
return
raise RuntimeError("Could not determine interface/CIDR!")
@property
def hostname(self):
if not hasattr(self, '_hostname'):
proc = self.run(args=['hostname', '--fqdn'], stdout=StringIO())
proc.wait()
self._hostname = proc.stdout.getvalue().strip()
return self._hostname
@property
def machine_type(self):
if not getattr(self, '_machine_type', None):
remote_info = ls.get_status(self.hostname)
if not remote_info:
return None
self._machine_type = remote_info.get("machine_type", None)
return self._machine_type
@property
def shortname(self):
if self._shortname is None:
self._shortname = self.hostname.split('.')[0]
return self._shortname
@property
def is_online(self):
if self.ssh is None:
return False
try:
self.run(args="true")
except Exception:
return False
return self.ssh.get_transport().is_active()
def ensure_online(self):
if not self.is_online:
return self.connect()
@property
def system_type(self):
"""
System type decorator
"""
return misc.get_system_type(self)
def __str__(self):
return self.name
def __repr__(self):
return '{classname}(name={name!r})'.format(
classname=self.__class__.__name__,
name=self.name,
)
def run(self, **kwargs):
"""
This calls `orchestra.run.run` with our SSH client.
TODO refactor to move run.run here?
"""
if self.ssh is None:
self.reconnect()
r = self._runner(client=self.ssh, name=self.shortname, **kwargs)
r.remote = self
return r
def mktemp(self):
"""
Make a remote temporary file
Returns: the name of the temp file created using
tempfile.mkstemp
"""
py_cmd = "import os; import tempfile; import sys;" + \
"(fd,fname) = tempfile.mkstemp();" + \
"os.close(fd);" + \
"sys.stdout.write(fname.rstrip());" + \
"sys.stdout.flush()"
args = [
'python',
'-c',
py_cmd,
]
proc = self.run(
args=args,
stdout=StringIO(),
)
data = proc.stdout.getvalue()
return data
def chmod(self, file_path, permissions):
"""
As super-user, set permissions on the remote file specified.
"""
args = [
'sudo',
'chmod',
permissions,
file_path,
]
self.run(
args=args,
)
def chcon(self, file_path, context):
"""
Set the SELinux context of a given file.
VMs and non-RPM-based hosts will skip this operation because ours
currently have SELinux disabled.
:param file_path: The path to the file
:param context: The SELinux context to be used
"""
if self.os.package_type != 'rpm':
return
if misc.is_vm(self.shortname):
return
self.run(args="sudo chcon {con} {path}".format(
con=context, path=file_path))
def _sftp_put_file(self, local_path, remote_path):
"""
Use the paramiko.SFTPClient to put a file. Returns the remote filename.
"""
sftp = self.ssh.open_sftp()
sftp.put(local_path, remote_path)
return
def _sftp_get_file(self, remote_path, local_path):
"""
Use the paramiko.SFTPClient to get a file. Returns the local filename.
"""
file_size = self._format_size(
self._sftp_get_size(remote_path)
).strip()
log.debug("{}:{} is {}".format(self.shortname, remote_path, file_size))
sftp = self.ssh.open_sftp()
sftp.get(remote_path, local_path)
return local_path
def _sftp_open_file(self, remote_path):
"""
Use the paramiko.SFTPClient to open a file. Returns a
paramiko.SFTPFile object.
"""
sftp = self.ssh.open_sftp()
return sftp.open(remote_path)
def _sftp_get_size(self, remote_path):
"""
Via _sftp_open_file, return the filesize in bytes
"""
with self._sftp_open_file(remote_path) as f:
return f.stat().st_size
@staticmethod
def _format_size(file_size):
"""
Given a file_size in bytes, returns a human-readable representation.
"""
for unit in ('B', 'KB', 'MB', 'GB', 'TB'):
if abs(file_size) < 1024.0:
break
file_size = file_size / 1024.0
return "{:3.0f}{}".format(file_size, unit)
def remove(self, path):
self.run(args=['rm', '-fr', path])
def put_file(self, path, dest_path, sudo=False):
"""
Copy a local filename to a remote file
"""
if sudo:
raise NotImplementedError("sudo not supported")
self._sftp_put_file(path, dest_path)
return
def get_file(self, path, sudo=False, dest_dir='/tmp'):
"""
Fetch a remote file, and return its local filename.
:param sudo: Use sudo on the remote end to read a file that
requires it. Defaults to False.
:param dest_dir: Store the file in this directory. If it is /tmp,
generate a unique filename; if not, use the original
filename.
:returns: The path to the local file
"""
if not os.path.isdir(dest_dir):
raise IOError("{dir} is not a directory".format(dir=dest_dir))
if sudo:
orig_path = path
path = self.mktemp()
args = [
'sudo',
'cp',
orig_path,
path,
]
self.run(args=args)
self.chmod(path, '0666')
if dest_dir == '/tmp':
# If we're storing in /tmp, generate a unique filename
(fd, local_path) = tempfile.mkstemp(dir=dest_dir)
os.close(fd)
else:
# If we are storing somewhere other than /tmp, use the original
# filename
local_path = os.path.join(dest_dir, path.split(os.path.sep)[-1])
self._sftp_get_file(path, local_path)
if sudo:
self.remove(path)
return local_path
def get_tar(self, path, to_path, sudo=False):
"""
Tar a remote directory and copy it locally
"""
remote_temp_path = self.mktemp()
args = []
if sudo:
args.append('sudo')
args.extend([
'tar',
'cz',
'-f', remote_temp_path,
'-C', path,
'--',
'.',
])
self.run(args=args)
if sudo:
self.chmod(remote_temp_path, '0666')
self._sftp_get_file(remote_temp_path, to_path)
self.remove(remote_temp_path)
def get_tar_stream(self, path, sudo=False):
"""
Tar-compress a remote directory and return the RemoteProcess
for streaming
"""
args = []
if sudo:
args.append('sudo')
args.extend([
'tar',
'cz',
'-f', '-',
'-C', path,
'--',
'.',
])
return self.run(args=args, wait=False, stdout=run.PIPE)
@property
def os(self):
if not hasattr(self, '_os'):
proc = self.run(
args=[
'python', '-c',
'import platform; print platform.linux_distribution()'],
stdout=StringIO(), stderr=StringIO(), check_status=False)
if proc.exitstatus == 0:
self._os = OS.from_python(proc.stdout.getvalue().strip())
return self._os
proc = self.run(args=['cat', '/etc/os-release'], stdout=StringIO(),
stderr=StringIO(), check_status=False)
if proc.exitstatus == 0:
self._os = OS.from_os_release(proc.stdout.getvalue().strip())
return self._os
proc = self.run(args=['lsb_release', '-a'], stdout=StringIO(),
stderr=StringIO())
self._os = OS.from_lsb_release(proc.stdout.getvalue().strip())
return self._os
@property
def arch(self):
if not hasattr(self, '_arch'):
proc = self.run(args=['uname', '-m'], stdout=StringIO())
proc.wait()
self._arch = proc.stdout.getvalue().strip()
return self._arch
@property
def host_key(self):
if not self._host_key:
trans = self.ssh.get_transport()
key = trans.get_remote_server_key()
self._host_key = ' '.join((key.get_name(), key.get_base64()))
return self._host_key
@property
def inventory_info(self):
node = dict()
node['name'] = self.hostname
node['user'] = self.user
node['arch'] = self.arch
node['os_type'] = self.os.name
node['os_version'] = '.'.join(self.os.version.split('.')[:2])
node['ssh_pub_key'] = self.host_key
node['up'] = True
return node
@property
def console(self):
if not self._console:
self._console = getRemoteConsole(self.name)
return self._console
def __del__(self):
if self.ssh is not None:
self.ssh.close()
def getShortName(name):
"""
Extract the name portion from remote name strings.
"""
hn = name.split('@')[-1]
p = re.compile('([^.]+)\.?.*')
return p.match(hn).groups()[0]
def getRemoteConsole(name, ipmiuser=None, ipmipass=None, ipmidomain=None,
logfile=None, timeout=20):
"""
Return either VirtualConsole or PhysicalConsole depending on name.
"""
if misc.is_vm(name):
return console.VirtualConsole(name)
return console.PhysicalConsole(
name, ipmiuser, ipmipass, ipmidomain, logfile, timeout)
|
import os
import logging
import twink
import twink.ovs
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(
twink.PortMonitorChannel,
twink.ovs.OvsChannel,
twink.JackinChannel,
twink.LoggingChannel,
):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print(msg)
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [], None))
print(self.ofctl("dump-flows"))
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all,"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller,goto_table:1")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
if os.environ.get("USE_GEVENT"):
twink.use_gevent()
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.StreamServer(("0.0.0.0", 6653))
tcpserv.channel_cls = TestChannel
twink.sched.serve_forever(tcpserv)
|
# A série de Fibonacci é formada pela seqüência 0,1,1,2,3,5,8,13,21,34,55,... Faça um programa que gere a série até que o valor seja maior que 500
ultimo = 1
penultimo = 1
termo = ultimo + penultimo
while termo < 500:
termo = ultimo + penultimo
penultimo = ultimo
ultimo = termo
print(termo)
|
from django.shortcuts import render
from rest_framework.generics import ListCreateAPIView,RetrieveUpdateAPIView
from .models import DoctorClinic
from .serializers import DoctorClinicSerializer
# Create your views here.
class DoctorClinicList (ListCreateAPIView):
queryset = DoctorClinic.objects.all()
serializer_class=DoctorClinicSerializer
class DoctorClinicDetail (RetrieveUpdateAPIView):
queryset = DoctorClinic.objects.all()
serializer_class = DoctorClinicSerializer
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
import string
import webbrowser
from datetime import datetime
from flask import Flask, render_template, request, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
DEBUG = True
PORT = 5000
dir_path = os.path.dirname(os.path.realpath(__file__))
app = Flask(__name__)
app.secret_key = 'admin'
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///" + dir_path.replace("\\", "/") + "/todo.db"
db = SQLAlchemy(app)
class Todo(db.Model):
"""
Veri tabanında saklanacak verilerin modelini tanımlayan sınıf...
"""
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(80))
content = db.Column(db.Text)
complete = db.Column(db.Boolean)
date = db.Column(db.DateTime, nullable=False)
finished_date = db.Column(db.DateTime, nullable=True)
@app.route("/")
def home():
"""
Bu fonksiyon ana sayfa rotası için home.html'i yükler ve
veri tabanından tüm verileri çekip gönderir.
"""
title = 'Hızlı ve Kolay Yapılacaklar Listesi'
todos = Todo.query.all()
return render_template("home.html", todos=todos, title=title)
@app.route("/add", methods=["GET", "POST"])
def add_todo():
"""
Bu rota, yeni bir todo ekler. Eğer form POST olarak gelmiş ise işlemleri yapıp
ana sayfayı yükler. Eğer değil ise hiçbir işlem yapmadan ana sayfaya döner.
"""
if request.method == "POST":
title = string.capwords(request.form.get("todo_name").capitalize())
content = request.form.get("todo_content")
content = content[0].upper() + content[1:]
now = datetime.now()
new_todo = Todo(title=title, content=content, complete=False, date=now, finished_date=None)
db.session.add(new_todo)
db.session.commit()
return redirect(url_for("home"))
else:
return redirect(url_for("home"))
@app.route("/complete/<string:id>")
def complete_todo(id):
"""
Bir todo'yu tamamlandı işareti koyar.
"""
todo = Todo.query.filter_by(id=id).first()
if todo == None:
return redirect(url_for("home"))
else:
if todo.complete == False:
todo.complete = True
todo.finished_date = datetime.now()
else:
todo.complete = False
todo.finished_date = None
db.session.commit()
return redirect(url_for("home"))
@app.route("/delete/<string:id>")
def delete_todo(id):
"""
Seçilen bir todo'yu veri tabanından siler.
"""
todo = Todo.query.filter_by(id=id).first()
if todo == None:
return redirect(url_for("home"))
else:
db.session.delete(todo)
db.session.commit()
return redirect(url_for("home"))
@app.route("/delete/all")
def delete_all_todos():
"""
Eğer toplamda 10 veya daha fazla todo varsa yeni bir tuş eklenir.
Ve bu tuş, basıldığında bir soru sorar: Bütün todo'lar silinecek, emin misiniz?
"""
title = "Tüm Todo'ları Sil"
todos = Todo.query.all()
if todos == []:
return redirect(url_for("home"))
return render_template("delete_all_todos.html", todos=todos, title=title)
@app.route("/delete/all/sure")
def delete_all_todos_sure():
"""
Eğer kullanıcı tüm todo'ları silmekte emin ise hepsini siler.
"""
todos = Todo.query.all()
for x in range(len(todos) - 1, -1, -1):
db.session.delete(todos[x])
db.session.commit()
return redirect(url_for("home"))
@app.route("/detail/<string:id>")
def detail_todo(id):
"""
Seçilmiş olan todo'nun detaylı bilgi sayfasını yükler.
Todo yok ise ana sayfaya yönlendirilir.
"""
todo = Todo.query.filter_by(id=id).first()
if todo == None:
return redirect(url_for("home"))
else:
return render_template("detail.html", todo=todo, title=todo.title)
@app.route("/edit/<string:id>")
def edit_todo(id):
"""
Seçilmiş olan todo'nun düzenlenme sayfasını yükler.
Todo yok ise ana sayfaya yönlendirilir.
"""
todo = Todo.query.filter_by(id=id).first()
if todo == None:
return redirect(url_for("home"))
else:
return render_template("edit.html", todo=todo, title=todo.title)
@app.route("/change/<string:id>", methods=["GET", "POST"])
def change_todo(id):
"""
Seçilmiş olan todo'nun düzenlenme sayfasını yükler.
Todo yok ise ana sayfaya yönlendirilir.
"""
todo = Todo.query.filter_by(id=id).first()
if todo == None:
return redirect(url_for("home"))
else:
new_title = request.form.get("new_todo_name").capitalize()
new_content = request.form.get("new_todo_content")
new_content = new_content[0].upper() + new_content[1:]
keep_current_date = request.form.get("keep_date")
todo.title = new_title
todo.content = new_content
if keep_current_date != True:
todo.date = datetime.now()
db.session.commit()
return redirect(url_for("home"))
@app.errorhandler(404)
def todo_not_found(e):
"""
Bulunamayan bir sayfa olursa 404.html'i yükler.
"""
return render_template("404.html")
# Sunucuyu başlat!
if __name__ == "__main__":
if not DEBUG:
webbrowser.open_new(f'http://localhost:{PORT}')
app.run(debug=DEBUG, port=PORT)
|
name = "Manish"
age = 30
print("This is hello world!!!")
print(name)
print(float(age))
age = "31"
print(age)
print("this", "is", "cool", "and", "awesome!")
|
#!/usr/bin/python
import os
from subprocess import call, check_output
VERSION = check_output([ "python", "version.py", "--in", "../apps/Tasks/src/version.h", "-b", "PROGRAM_VERSION_BUILD", "!SEMANTIC"]).strip()
TAG = check_output([ "python", "version.py", "--in", "../apps/Tasks/src/version.h", "!NIGHTLY"]).strip()
call(["git","add","../apps/Tasks/src/version.h"])
call(["git","commit","-m","Build for version %s" % VERSION])
call(["git","tag",TAG])
|
# -*- coding: utf-8 -*-
list_sample = [666, 555, -111, 999, -222]
print(sorted(list_sample))
print(sorted(list_sample, key=abs)) # ?
print(list_sample)
list_sample2 = ['bbb', 'aaa', 'ccc', 'ZZZ', 'XXX', 'YYY']
print(sorted(list_sample2))
print(sorted(list_sample2, key=str.lower)) # 忽略大小写排序
print(sorted(list_sample2, key=str.lower, reverse=True)) # 忽略大小写反向排序
list_sample3 = [('BBB', 77), ('AAA', 99), ('RRR', 66), ('LLL', 88)]
s = 'asdf234GDSdsf23' # 自定义规则对字符串排序:小写<大写<奇数<偶数;
print("".join(sorted(s, key=lambda x: (x.isdigit(), x.isdigit() and int(x) % 2 == 0, x.isupper(), x))))
# Boolean值的排序:False在前,True在后
# x.isdigit() 把数字放前,字母放后;
# x.isdigit() and int(x) % 2 == 0 奇数在前,偶数在后;
# x.isupper() 字母小写在前,大写在后;
# x 对所有类别数字或字母排序;
def by_name(t):
return t[0].lower()
def by_score(t):
return t[1]
print('Sorted by name: ', sorted(list_sample3, key=by_name))
print('Sorted by score: ', sorted(list_sample3, key=by_score, reverse=False)) # reverse=False为升序排序
# ### sorted()
# https://docs.python.org/3/library/functions.html#sorted
# 内置函数sorted()用来对可迭代对象(例如list)进行排序,返回值是一个新的列表;
# - 可以接收一个key函数来实现自定义的排序,key指定的函数将作用于list的每一个元素上,并根据key函数返回的结果进行排序;
# - 对字符串排序,默认是按照ASCII的大小进行比较,但可以通过key函数实现自定义的排序;
# - 使用sorted()排序的关键在于实现一个映射函数;
#
# ### 对比sort()
# - 对于一个无序的列表a,调用a.sort(),对a进行排序后返回a,sort()函数修改原列表内容;
# - 对于一个无序的列表a,调用sorted(a),对a进行排序后返回一个新的列表,而对原列表内容不产生影响;
# - 当列表由list(或者tuple)组成时,默认情况下,sort和sorted都会根据list[0](或者tuple[0])作为排序的key,进行排序;
|
import numpy as np
def multi_gen(generators, shuffle=False):
"""
Generator that combines multiple other generators to return samples. Will
either return a value from each generator in succession or randomly
depending on the value of the shuffle parameter.
"""
i = -1
while 1:
i = np.random.randint(0, 3) if shuffle else (i + 1) % len(generators)
gen = generators[i]
sample = next(gen)
yield sample
def ts_generator(
data,
lookback,
target_col=0,
n_outputs=1,
step=1,
min_index=0,
max_index=None,
delay=0,
shuffle=False,
batch_size=16,
):
"""
Generator that creates 3d time series shaped data for use in RNN layers
or similar
Args:
data (array): an indexable matrix of timeseries data
lookback (int): how many timesteps back the input data should go
delay (int): how many steps into the future the target should be
min_index (int): point in data at which to start
max_index (int): point in data at which to finish
shuffle (boolean): whether to shuffle the samples
batch_size (int): the number of samples per batch
step (int): the period in timesteps at which to sample the data
"""
if max_index is None:
max_index = len(data) - delay - n_outputs
i = min_index + lookback
# if shuffle:
# np.random.shuffle(data)
while 1:
if shuffle:
rows = np.random.randint(min_index + lookback, max_index, size=batch_size)
else:
if i + batch_size >= max_index:
i = min_index + lookback
rows = np.arange(i, min(i + batch_size, max_index))
i += len(rows)
samples = np.zeros((len(rows), lookback // step, data.shape[-1]))
targets = np.zeros((len(rows), n_outputs))
for j, _ in enumerate(rows):
indices = range(rows[j] - lookback, rows[j], step)
samples[j] = data[indices]
target_start = rows[j] + delay
target_end = target_start + n_outputs
targets[j] = data[target_start:target_end, target_col]
if n_outputs == 1:
targets = targets.reshape(targets.shape[:-1])
yield samples, targets, [None]
def ts_seasonal_generator(
data,
target_col=0,
block_size=24,
n_outputs=12,
step=1,
min_index=0,
max_index=None,
delay=0,
shuffle=False,
batch_size=16,
freq=5,
):
"""
Generator that creates 3d time series shaped data for use in RNN layers
or similar
Args:
data (array): an indexable matrix of timeseries data
lookback (int): how many timesteps back the input data should go
delay (int): how many steps into the future the target should be
min_index (int): point in data at which to start
max_index (int): point in data at which to finish
shuffle (boolean): whether to shuffle the samples
batch_size (int): the number of samples per batch
step (int): the period in timesteps at which to sample the data
"""
half_sample = block_size // 2
lookback = (60 // freq) * 24 * 7 + half_sample
lookback_d = (60 // freq) * 24 + half_sample
if max_index is None:
max_index = len(data) - delay - n_outputs
i = min_index + lookback
while 1:
if shuffle:
rows = np.random.randint(min_index + lookback, max_index, size=batch_size)
else:
if i + batch_size >= max_index:
i = min_index + lookback
rows = np.arange(i, min(i + batch_size, max_index))
i += len(rows)
samples = np.zeros((len(rows), block_size // step, data.shape[-1] * 3))
targets = np.zeros((len(rows), n_outputs))
for j, _ in enumerate(rows):
indices1 = range(rows[j] - block_size, rows[j], step)
indices2 = range(rows[j] - lookback, rows[j] - lookback + block_size, step)
indices3 = range(rows[j] - lookback_d, rows[j] - lookback_d + block_size, step)
data1 = data[indices1]
data2 = data[indices2]
data3 = data[indices3]
all_data = np.hstack((data1, data2, data3))
#print(samples.shape)
#print(data1.shape, data2.shape, data3.shape)
#print(all_data.shape)
samples[j] = all_data
target_start = rows[j] + delay
target_end = target_start + n_outputs
targets[j] = data[target_start:target_end, target_col]
if n_outputs == 1:
targets = targets.reshape(targets.shape[:-1])
yield samples, targets, [None]
|
import pandas as pd
import os
import re
import pprint
import shutil
# Clean all the obvious typos
corrections ={'BAUGHWJV':'BAUGHMAN',
'BOHNE':'BOEHNE',
'EISEMENGER':'EISENMENGER',
'GEITHER':'GEITHNER',
'KIMBREL':'KIMEREL',
'MATTINGLY': 'MATTLINGLY',
'FORESTALL':'FORRESTAL',
'GRENSPAN':'GREENSPAN',
'GREESPAN':'GREENSPAN',
'GREENPSAN':'GREENSPAN',
'GREENSPAN,':'GREENSPAN',
'GREENPAN':'GREENSPAN',
'McANDREWS':'MCANDREWS',
'MCDONUGH':'MCDONOUGH',
'MOSCOW':'MOSKOW',
'MORRIS':'MORRRIS',
'MONHOLLAN':'MONHOLLON',
'MILIER':'MILLER',
'MILER':'MILLER',
'SCWLTZ':'SCHULTZ',
'SCHELD':'SCHIELD',
'WILLZAMS':'WILLIAMS',
'WALLJCH':'WALLICH',
'VOLCKFR':'VOLCKER',
'VOLCRER':'VOLKER',
'ALLISON for':'ALLISON',
'ALTMA"':'ALTMANN',
'B A U G W':'BAUGW',
'BIES (as read by Ms':'BIES',
'BLACK &':'BLACK',
'MAYO/MR':'MAYO',
'Greene':"GREENE",
'CROSS,':'CROSS',
'GREENSPAN,':'GREENSPAN',
'HOSKINS,':'HOSKINS',
'MACCLAURY':'MACLAURY',
'MORRRIS':'MORRIS',
"O'CONNELL":'O’CONNELL',
'SOLOMON]':'SOLOMON',
'TRUMAN-':'TRUMAN',
'VOLCKER,':'VOLCKER',
'VOLKER,':'VOLCKER',
'WALLlCH':'WALLICH',
'[BALLES]':'BALLES',
'[GARDNER]':'GARDNER',
'[KICHLINE]?':'KICHLINE',
'[PARDEE]':'PARDEE',
'[ROOS]':'ROOS',
'[STERN':'STERN',
'[WILLES]':'WILLES',
'ŞAHIN':'SAHIN',
'[STERN(?)':'STERN',
'[STERN]':'STERN',
'GRALEY':'GRAMLEY',
'ALTMA”':'ALTMANN'}
def name_corr(val):
sentence=""
dictkeys=[key for key, value in corrections.items()]
if val in dictkeys:
val = corrections[val]
else:
if re.match(".*\(\?\)",val):
val = re.search("(.*)(\(\?\))",val)[1]
if val in dictkeys:
val = corrections[val]
if len(val.split(" "))>1:
#print(val.split(" ")[0])
#print(val.split(" ")[1:])
sentencehelp = " ".join(val.split(" ")[1:])
if not len(re.findall("Yes",sentencehelp))>7:
if len(sentencehelp)>10:
sentence = sentencehelp
#print(sentence)
val = val.split(" ")[0]
if val in dictkeys:
val = corrections[val]
#print(val)
return val,sentence
def get_interjections():
base_directory = base_directory = "../../../collection/python/data/transcript_raw_text"
raw_doc = os.listdir(base_directory)
filelist = sorted(raw_doc)
documents = []
if os.path.exists("../output/speaker_data"):
shutil.rmtree("../output/speaker_data")
os.mkdir("../output/speaker_data")
for doc_path in filelist:
with open("{}/{}".format(base_directory,doc_path),'r') as f:
documents.append(f.read().replace("\n"," ").replace(":",".").replace(r"\s\s+"," "))
date = pd.Series(data=filelist).apply(lambda x: x[0:10])
#print(date)
parsed_text = pd.DataFrame()
for doc_index in range(len(documents)):
if doc_index%10 == 0:
print("Working on producing interjections for doc #{} of ~{}".format(doc_index,len(documents)))
#THIS METRIC FAILES FOR 59 out of 4857 occurances
interjections = re.split(' MR\. | MS\. | CHAIRMAN | VICE CHAIRMAN ', documents[doc_index])[1:]
temp_df = pd.DataFrame(columns=['Date','Speaker','content'],index=range(len(interjections)))
#Temporary data frame
for j in range(len(interjections)):
interjection = interjections[j]
temp_df['Date'].loc[j] = date[doc_index]
#speaker = "".join([char for char in if char.isalnum()])
speakercontent = interjection.split('.')[0].strip()
name,sentence = name_corr(speakercontent)
content = ''.join(interjection.split('.')[1:])
if not sentence=="":
content = sentence +" "+content
#print(content)
temp_df['Speaker'].loc[j] = name
temp_df['content'].loc[j] = content
parsed_text = pd.concat([parsed_text,temp_df],ignore_index=True)
parsed_text.to_pickle("parsed_text.pkl")
parsed_text = pd.read_pickle("parsed_text.pkl")
#speakerlist = sorted(parsed_text["Speaker"].unique().tolist())
# Get names of indexes for which we have an unidentified speaker and drop those
indexNames = parsed_text[ (parsed_text['Speaker'] == 'mY0') | (parsed_text['Speaker'] == 'WL”') | (parsed_text['Speaker'] == 'W') | (parsed_text['Speaker'] == 'AL"N') ].index
parsed_text.drop(indexNames , inplace=True)
parsed_text["content"] = parsed_text["content"].apply(lambda x: " ".join(str(x).split()[1:]) if len(str(x).split())>1 and str(x).split()[0]=="LINDSEY" else x)
parsed_text["Speaker"] = parsed_text["Speaker"].apply(lambda x: "LINDSEY" if x=="D" else x)
# Delete content with a check for presence of members.
#parsed_text['check']=parsed_text['content'].apply(lambda x: len(re.findall("Yes",x)))
#parsed_text['d_presence']=parsed_text['check']>7
parsed_text.to_csv("../output/interjections.csv",index=False)
return parsed_text
'''
The FOMC Transcript is split into 2 sections:
1)Economic Discussion, 2) Policy Discussion
This function tags each interjection by an FOMC member with their assosiated FOMC discussion
'''
def tag_interjections_with_section(interjection_df):
separation_df = pd.read_excel("../data/Separation.xlsx")
meeting_df = pd.read_csv("../../../derivation/python/output/meeting_derived_file.csv")
separation_df = separation_df.rename(columns={separation_df.columns[0]:"date_string"})
separation_df.date_string = separation_df.date_string.apply(str)
separation_df['Date'] = pd.to_datetime(separation_df.date_string,format="%Y%m")
interjection_df['Date'] = pd.to_datetime(interjection_df['Date'])
interjection_df = interjection_df[(interjection_df.Date>pd.to_datetime("1987-07-31"))&
(interjection_df.Date<pd.to_datetime("2006-02-01"))]
cc_df = meeting_df[meeting_df.event_type=="Meeting"]
print(cc_df)
cc_df['Date'] = pd.to_datetime(cc_df['start_date'])
cc_df['end_date'] = pd.to_datetime(cc_df['end_date'])
interjection_df = interjection_df[interjection_df['Date'].isin(cc_df['Date'])]
interjection_df = pd.merge(interjection_df,cc_df[['Date','end_date']],on="Date",how="left")
interjection_df['date_string'] = interjection_df.end_date.\
apply(lambda x: x.strftime("%Y%m")).apply(str)
separation_df['date_ind'] = separation_df.date_string.astype(int)
separation_df = separation_df.set_index('date_ind')
meeting_groups = interjection_df.groupby("Date")
tagged_interjections = pd.DataFrame(columns=interjection_df.columns)
for meeting_number,date_ind in enumerate(interjection_df['date_string'].drop_duplicates().astype(int)):
meeting_date = interjection_df[interjection_df.date_string.astype(int)==date_ind].reset_index(drop=True)
meeting_date['FOMC_Section'] = 0
if date_ind not in list(separation_df.index):
tagged_interjections = pd.concat([tagged_interjections, meeting_date], ignore_index = True)
continue
try:
meeting_date.loc[separation_df['FOMC1_start'][date_ind]:
separation_df['FOMC1_end'][date_ind],"FOMC_Section"] = 1
#print(FOMC1)
if separation_df['FOMC2_end'][date_ind] == 'end':
meeting_date.loc[separation_df['FOMC2_start'][date_ind]:
,"FOMC_Section"] = 2
else:
meeting_date.loc[separation_df['FOMC2_start'][date_ind]:
separation_df['FOMC2_end'][date_ind],"FOMC_Section"]=2
#FOMC2 = meeting_date.iloc[separation['FOMC2_start'][date]:]
tagged_interjections = pd.concat([tagged_interjections, meeting_date], ignore_index = True)
except:
tagged_interjections = pd.concat([tagged_interjections, meeting_date], ignore_index = True)
tagged_interjections.to_csv("tagged_interjections.csv",index=False)
return tagged_interjections
def generate_speaker_corpus(tagged_interjections):
tagged_interjections['content'] = tagged_interjections['content'].fillna("")
tagged_interjections['Date'] = pd.to_datetime(tagged_interjections['Date'])
speaker_statements = tagged_interjections.groupby(['Date','Speaker','FOMC_Section'])['content'].apply(lambda x: "%s" % " ".join(x))
speaker_statements = speaker_statements.reset_index()
dates_df = pd.read_csv("../../../collection/python/output/derived_data.csv")
dates_df['start_date'] = pd.to_datetime(dates_df['start_date'])
dates_df['end_date'] = pd.to_datetime(dates_df['end_date'])
speaker_statements = speaker_statements.merge(dates_df[["start_date","end_date"]].drop_duplicates(),left_on="Date",right_on="start_date",how="left")
speaker_statements.to_pickle("../output/speaker_data/speaker_corpus.pkl")
speaker_statements.to_csv("../output/speaker_data/speaker_corpus.csv")
print("Completed generating speaker statements!")
return speaker_statements
def generate_speaker_files(speaker_statements):
speakers = [speaker for speaker in set(speaker_statements["Speaker"])]
print("Number of speakers:{}".format(len(speakers)))
count = 0
for speaker in speakers:
print("Currently working on statements for speaker {} of {}. Name:{}".format(count,len(speakers),speaker))
speaker_df = speaker_statements[speaker_statements["Speaker"]==speaker]
speaker_path = "{}/{}".format("../output/speaker_data",speaker)
if not os.path.exists(speaker_path):
os.mkdir(speaker_path)
speaker_df[['Date','content']].to_csv("{}/{}_{}".format(speaker_path,speaker,"statements_by_meeting.csv"))
speaker_list = list(speaker_df["content"])
with open("{}/{}_{}".format(speaker_path,speaker,"corpus.txt"),"w+") as f:
f.write(" ".join(speaker_list))
count+=1
def main():
interjection_df = get_interjections()
tagged_interjections = tag_interjections_with_section(interjection_df)
speaker_statements = generate_speaker_corpus(tagged_interjections)
generate_speaker_files(speaker_statements)
if __name__ == "__main__":
main()
# =============================================================================
# ## Do some checks:
# with open('../../output/data.json', 'r') as speakerids:
# speakerid = json.load(speakerids)
#
# speakerlist = [ x.lower() for x in speaker_statements["Speaker"].unique().tolist()]
#
# for key,value in speakerid.items():
# if key.lower() not in speakerlist:
# print(key)
# else:
# print('in list')
#
# =============================================================================
|
from typing import Text, Final
CLI_USAGE_TEXT: Final[Text] = """[quote]Write any thought you have without quitting from the command line[/quote]
[header]USAGE[/header]
codenotes <command> <annotation> <text> <flags>
[header]CORE COMMANDS[/header]
add Create new note or task with the content typed
search Search for notes or tasks with the parameters specified
[header]ANNOTATION[/header]
note/task Type of annotations
[header]FLAGS[/header]
--version, -v Show codenotes version
[header]EXAMPLES[/header]
$ codenotes add task Finish coding the tests --new-categoery Reminders
$ codenotes add task Create documentation for the codenotes proyect; Release the proyect -p
$ codenotes search note --today
[header]FEEDBACK[/header]
Open an issue in [u]github.com/EGAMAGZ/codenotes[/u]"""
ADD_NOTE_USAGE_TEXT: Final[Text] = """[quote]Write any thought you have without quitting from the command line[/quote]
[header]USAGE[/header]
codenotes add note <text> <flags>
[header]FLAGS[/header]
--title,-t <title> Sets a title to the note with a limit of 30 characters. When a title is not specified, it takes
\t\tthe first 30 characters from the note
--category,-c <category> Create a new category if it not exist and will store the note in it
--preview, -p Shows a preview of the note that will be save
[header]USAGE[/header]
$ codenotes add note I got an idea for UI --title UI Idea --category Codenotes"""
ADD_TASK_USAGE_TEXT: Final[Text] = """[quote]Write any thought you have without quitting from the command line[/quote]
[header]USAGE[/header]
codenotes add task <text> <flags>
[header]FLAGS[/header]
--category,-c <category> Create a new category if it not exist and will store the note in it
--preview, -p Shows a preview of the note that will be save
[header]TEXT[/header]
To save two or more task, use the symbol ; to indicate the ending of a task.
[header]USAGE[/header]
$ codenotes add task Finish coding the tests --new-categoery Reminders
$ codenotes add task Create documentation for the codenotes proyect; Release the proyect -p"""
SEARCH_USAGE_TEXT: Final[Text] = """[quote]Write any thought you have without quitting from the command line[/quote]
[header]USAGE[/header]
codenotes search <annotation> <text> <flags>
[header]ANNOTATION[/header]
note/task Type of annotations
[header]TEXT[/header]
Text that will be search if any annotations contains it.
[header]FLAGS[/header]
--today, -t Search annotations created today
--yesterday, -y Search annotations created yesterday
--week, -w Search annotations created in the week
--month, -m Search annotations created in the month
[header]USAGE[/header]
$ codenotes search note --today
$ codenotes search task Finish my project --month"""
|
class KeyNotFoundException(Exception):
def __init__(self, search_key, message='No information could be found for key {key}'):
# Call the base class constructor with the parameters it needs
message = message.format(key=search_key)
super(KeyNotFoundException, self).__init__(message)
# Custom values
self.search_key = search_key
class ValueForExistingKeyNotFoundException(Exception):
# the name is terrible, BUT it is descriptive
def __init__(self, search_key, search_value, found_key,
message='No value information could be found for key {key} and value {value}'):
message = message.format(key=search_key, value=search_value)
# Call the base class constructor with the parameters it needs
super(ValueForExistingKeyNotFoundException, self).__init__(message)
# Custom values
self.search_key = search_key
self.search_value = search_value
self.found_key = found_key
|
"""Test views train page"""
from django.test import RequestFactory, TestCase
from django.contrib.auth.models import AnonymousUser
import sys
import os
from core.models import AI_Tiles as AITilesTable
from core.models import AI_Characteristics as AICharsTable
from core.models import AI_Objects as AIObjTable
sys.path.append(os.path.join(os.path.dirname("src"), '..'))
# pylint: disable=all
from src.core.views import *
# Create your tests here.
class TestViews(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
tile_ai = AITilesTable()
tile_ai.x_coord = 2
tile_ai.y_coord = 2
tile_ai.year = 2010
tile_ai.save()
tile_obj = AIObjTable()
tile_obj.tiles_id = tile_ai
tile_obj.x_coord = 0
tile_obj.y_coord = 0
tile_obj.prediction = 100
tile_obj.type = 'oiltank'
tile_obj.save()
tile_chars = AICharsTable()
tile_chars.tiles_id = tile_ai
tile_chars.land_prediction = 1
tile_chars.water_prediction = 0
tile_chars.buildings_prediction = 0
tile_chars.save()
def test_get_accuracy(self):
# Create an instance of a GET request.
request = self.factory.get('get_accuracy')
# an AnonymousUser instance.
request.user = AnonymousUser()
response = get_accuracy(request)
self.assertEqual(response.status_code, 200)
def test_train(self):
# Create an instance of a GET request.
request = self.factory.get('train')
# an AnonymousUser instance.
request.user = AnonymousUser()
response = train(request)
self.assertEqual(response.status_code, 200)
|
from gerenciador import *
def test_organizar_dados():
assert True == organizar_dados(12,0)
def test_salvar_dados_bd():
assert False == salvar_dados_bd(1,12,12,12,0,0,0)
def test_deletar_dados():
assert True == deletar_dados()
|
from django.core.management import call_command
from django.test import TestCase
class TestImportCommand(TestCase):
def test_load_commands(self):
"""
A very dumb test that just confirms that the import succeeds without
unhandled exceptions raised.
TODO: Don't be so stupid.
"""
call_command('import_usda')
|
from __future__ import annotations
from typing import Dict, Text, Any
from rasa.engine.graph import GraphComponent, ExecutionContext
from rasa.engine.storage.resource import Resource
from rasa.engine.storage.storage import ModelStorage
from rasa.shared.core.training_data.structures import StoryGraph
from rasa.shared.importers.importer import TrainingDataImporter
class StoryGraphProvider(GraphComponent):
"""Provides the training data from stories."""
def __init__(self, config: Dict[Text, Any]) -> None:
"""Creates provider from config."""
self._config = config
@staticmethod
def get_default_config() -> Dict[Text, Any]:
"""Returns default configuration (see parent class for full docstring)."""
return {"exclusion_percentage": None}
@classmethod
def create(
cls,
config: Dict[Text, Any],
model_storage: ModelStorage,
resource: Resource,
execution_context: ExecutionContext,
) -> StoryGraphProvider:
"""Creates component (see parent class for full docstring)."""
return cls(config)
def provide(self, importer: TrainingDataImporter) -> StoryGraph:
"""Provides the story graph from the training data.
Args:
importer: instance of TrainingDataImporter.
Returns:
The story graph containing stories and rules used for training.
"""
return importer.get_stories(**self._config)
|
from setuptools import setup, find_packages
VERSION = '0.1.0'
DESCRIPTION = 'Connect to any FANTM device'
LONG_DESCRIPTION = 'Frontend for connecting to devlprd and processing data from a FANTM DEVLPR'
# Setting up
setup(
name="pydevlpr-fantm",
version=VERSION,
author="Ezra Boley",
author_email="hello@getfantm.com",
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url='https://github.com/fantm/libdevlpr-plugin',
packages=find_packages(where="src"),
install_requires=['websockets'], # add any additional packages that
# needs to be installed along with your package.
keywords=['python', 'FANTM', 'DEVLPR'],
classifiers= [
"Development Status :: 3 - Alpha",
"Intended Audience :: Education",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
]
)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-28 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('titles', '0015_auto_20171226_1052'),
]
operations = [
migrations.AlterModelOptions(
name='title',
options={'ordering': ('-release_date', '-update_date')},
),
migrations.RenameField(
model_name='title',
old_name='being_updated',
new_name='getting_details',
),
migrations.RenameField(
model_name='title',
old_name='updated',
new_name='has_details',
),
migrations.AlterField(
model_name='title',
name='type',
field=models.IntegerField(blank=True, choices=[(None, ''), (0, 'Movie'), (1, 'TV Show')], null=True),
),
]
|
"""
tkinter HTML text widgets
"""
import sys
import tk_html_widgets as tk_html
from ..Events import Bindings
from ..Widgets.Widgets import ScrolledText, ViewState, tkEvent
__all__ = [
'HTMLScrolledText', 'HTMLText', 'HTMLLabel'
]
class HTMLScrolledText(ScrolledText):
_bindIDs = set()
__doc__ = tk_html.HTMLScrolledText.__doc__
def __init__(self, *args, html=None, **kwargs):
super().__init__(*args, **kwargs)
self._w_init(kwargs)
self.html_parser = tk_html.html_parser.HTMLTextParser()
if isinstance(html, str):
self.set_html(html)
def _w_init(self, kwargs):
if not 'wrap' in kwargs.keys():
self.tb.config(wrap='word')
if not 'background' in kwargs.keys():
if sys.platform.startswith('win'):
self.tb.config(background='SystemWindow')
else:
self.tb.config(background='white')
def fit_height(self):
""" Fit widget height to wrapped lines """
for h in range(1, 4):
self.tb.config(height=h)
self.root.update()
if self.tb.yview()[1] >= 1:
break
else:
self.tb.config(height=0.5 + 3 / self.tb.yview()[1])
return self
def set_html(self, html, strip=True):
# ------------------------------------------------------------------------------------------
"""
Set HTML widget text. If strip is enabled (default) it ignores spaces and new lines.
"""
for ID in self._bindIDs: self.unbind(ID)
prev_state = ViewState(self.tb['state'])
self.tb.Enable()
self.tb.Clear()
self.tb.tag_delete(self.tb.tag_names)
self.html_parser.w_set_html(self.tb, html, strip=strip)
self._bindIDs.clear()
self._setupBindings()
return self.tb.Enable(state=prev_state)
def _setupBindings(self):
self._bindIDs.add(self.tb.Bind(Bindings.ButtonPress, func=self.HandlePress, add=True))
self._bindIDs.add(self.tb.Bind(Bindings.ButtonRelease, func=self.HandleRelease, add=True))
self._bindIDs.add(self.tb.Bind(Bindings.FocusIn, func=self.HandleFocusIn, add=True))
self._bindIDs.add(self.tb.Bind(Bindings.FocusOut, func=self.HandleFocusOut, add=True))
def HandlePress(self, event: tkEvent): pass
def HandleRelease(self, event: tkEvent): pass
def HandleFocusIn(self, event: tkEvent): pass
def HandleFocusOut(self, event: tkEvent): pass
@property
def txt(self) -> str: return self.tb.txt
@txt.setter
def txt(self, value: str): self.set_html(value)
class HTMLText(HTMLScrolledText):
__doc__ = tk_html.HTMLText.__doc__
""" HTML text widget """
def _w_init(self, kwargs):
# ------------------------------------------------------------------------------------------
super()._w_init(kwargs)
self.vbar.hide()
def fit_height(self):
# ------------------------------------------------------------------------------------------
super().fit_height()
# self.master.update()
self.vbar.hide()
class HTMLLabel(HTMLText):
__doc__ = tk_html.HTMLLabel.__doc__
def _w_init(self, kwargs):
# ------------------------------------------------------------------------------------------
super()._w_init(kwargs)
if not 'background' in kwargs.keys():
if sys.platform.startswith('win'):
self.tb.config(background='SystemButtonFace')
else:
self.tb.config(background='#d9d9d9')
if not 'borderwidth' in kwargs.keys():
self.tb.config(borderwidth=0)
if not 'padx' in kwargs.keys():
self.tb.config(padx=3)
def set_html(self, *args, **kwargs): return super().set_html(*args, **kwargs).Disable()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
import environ
from django.conf import global_settings
from django.contrib import messages
from vaas.configuration.loader import YamlConfigLoader
env = environ.Env()
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
current_dir = os.path.abspath(os.path.dirname(__file__))
config_loader = YamlConfigLoader()
if not config_loader.determine_config_file('db_config.yml'):
raise EnvironmentError('Cannot find db_config file')
DATABASES = config_loader.get_config_tree('db_config.yml')
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pwm_&@a%yd8+7mqf9=*l56+y!@sb7ab==g942j7++gnr9l2%*d'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: don't run with debug turned on in production!
ALLOWED_HOSTS = []
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
# Application definition
INSTALLED_APPS = (
'django_nose',
'vaas.adminext',
'django_admin_bootstrapped',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social_django',
'tastypie',
'vaas.manager',
'vaas.cluster',
'vaas.router',
'vaas.monitor',
'vaas.account',
'vaas.purger',
'taggit',
'django_ace',
'simple_history',
)
# Plugins definition
INSTALLED_PLUGINS = ()
MIDDLEWARE_PLUGINS = ()
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'log_request_id.middleware.RequestIDMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'vaas.manager.middleware.VclRefreshMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
]
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/admin/'
SECURE_CONTENT_TYPE_NOSNIFF = True
ROOT_URLCONF = 'vaas.urls'
WSGI_APPLICATION = 'vaas.external.wsgi.application'
SIGNALS = 'on'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Warsaw'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.debug',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.request',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': '/tmp/debug.log',
'formatter': 'verbose'
},
},
'loggers': {
'django': {
'handlers': ['file'],
'propagate': False,
'level': 'ERROR',
},
'vaas': {
'handlers': ['file'],
'propagate': False,
'level': 'DEBUG',
},
'': {
'handlers': ['file'],
'level': 'INFO',
}
}
}
VAAS_LOADER_MAX_WORKERS = 30
VAAS_RENDERER_MAX_WORKERS = 30
REFRESH_TRIGGERS_CLASS = (
'Probe', 'Backend', 'Director', 'VarnishServer', 'VclTemplate', 'VclTemplateBlock', 'TimeProfile', 'VclVariable',
'Route'
)
# CELERY
BROKER_URL = env.str('BROKER_URL', default='redis://localhost:6379/1')
CELERY_RESULT_BACKEND = env.str('CELERY_RESULT_BACKEND', default='redis://localhost:6379/2')
CELERY_TASK_RESULT_EXPIRES = env.int('CELERY_TASK_RESULT_EXPIRES', default=600)
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IGNORE_RESULT = env.bool('CELERY_IGNORE_RESULT', False)
CELERY_TASK_PUBLISH_RETRY = env.bool('CELERY_TASK_PUBLISH_RETRY', True)
# 5min we will wait for kill task
CELERY_TASK_SOFT_TIME_LIMIT_SECONDS = 300
CELERY_ROUTES = {
'vaas.router.report.fetch_urls_async': {'queue': 'routes_test_queue'},
'vaas.*': {'queue': 'worker_queue'},
}
VARNISH_COMMAND_TIMEOUT = 5
VARNISH_VCL_INLINE_COMMAND_TIMEOUT = 60
# UWSGI CONTEXT SWITCH (UGREEN)
ENABLE_UWSGI_SWITCH_CONTEXT = env.bool('ENABLE_UWSGI_SWITCH_CONTEXT', False)
VCL_TEMPLATE_COMMENT_REGEX = env.str('VCL_TEMPLATE_COMMENT_REGEX', default=None)
VCL_TEMPLATE_COMMENT_VALIDATION_MESSAGE = env.str('VCL_TEMPLATE_COMMENT_VALIDATION_MESSAGE', default=None)
ROUTES_LEFT_CONDITIONS = env.dict('ROUTES_LEFT_CONDITIONS', default={
'req.url': 'URL',
'req.http.Host': 'Domain',
'req.http.X-Example': 'X-Example',
})
PURGER_HTTP_CLIENT_TIMEOUT = env.int('PURGER_HTTP_CLIENT_TIMEOUT', default=10)
PURGER_MAX_HTTP_WORKERS = env.int('PURGER_MAX_HTTP_WORKERS', default=100)
# VALIDATION_HEADER
VALIDATION_HEADER = env.str('VALIDATION_HEADER', default='x-validation')
FETCHER_HTTP_CLIENT_TIMEOUT = env.int('FETCHER_HTTP_CLIENT_TIMEOUT', default=10)
FETCHER_MAX_HTTP_WORKERS = env.int('FETCHER_MAX_HTTP_WORKERS', default=100)
# ENABLE RUN TEST BUTTON
ROUTE_TESTS_ENABLED = env.bool('ROUTE_TESTS_ENABLED', default=True)
# STATSD ENV
STATSD_ENABLE = env.bool('STATSD_ENABLE', default=False)
STATSD_HOST = env.str('STATSD_HOST', default='localhost')
STATSD_PORT = env.int('STATSD_PORT', default=8125)
STATSD_PREFIX = env.str('STATSD_PREFIX', default='example.statsd.path')
# HEADER FOR PERMIT ACCESS TO /vaas/ ENDPOINT
ALLOW_METRICS_HEADER = env.bool('ALLOW_METRICS_HEADER', default='x-allow-metric-header')
CLUSTER_IN_SYNC_ENABLED = env.bool('CLUSTER_IN_SYNC_ENABLED', default=False)
MESH_X_ORIGINAL_HOST = env.bool('MESH_X_ORIGINAL_HOST', default='x-original-host')
SERVICE_TAG_HEADER = env.bool('SERVICE_TAG_HEADER', default='x-service-tag')
|
import time
import warnings
from datetime import datetime
from cybox.common import Hash
from cybox.objects.address_object import Address
from cybox.objects.uri_object import URI
# suppress PyMISP warning about Python 2
warnings.filterwarnings('ignore', 'You\'re using python 2, it is strongly '
'recommended to use python >=3.4')
from pymisp import PyMISP
from certau.util.stix.helpers import package_time
from .base import StixTransform
class StixMispTransform(StixTransform):
"""Insert data from a STIX package into a MISP event.
This class inserts data from a STIX package into MISP (the Malware
Information Sharing Platform - see http://www.misp-project.org/).
A PyMISP (https://github.com/CIRCL/PyMISP) object is passed to
the constructor and used for communicating with the MISP host.
The helper function :py:func:`get_misp_object` can be used to
instantiate a PyMISP object.
Args:
package: the STIX package to process
misp: the PyMISP object used to communicate with the MISP host
distribution: the distribution setting for the MIST event (0-3)
threat_level: the threat level setting for the MISP event (0-3)
analysis: the analysis level setting for the MISP event (0-2)
information: info field value (string) for the MISP event
published: a boolean indicating whether the event has been
published
"""
OBJECT_FIELDS = {
'Address': ['address_value'],
'DomainName': ['value'],
'EmailMessage': [
'header.from_.address_value',
'header.subject',
],
'File': ['hashes.type_', 'hashes.simple_hash_value'],
'HTTPSession': ['http_request_response.http_client_request.' +
'http_request_header.parsed_header.user_agent'],
'Mutex': ['name'],
'SocketAddress': ['ip_address.address_value'],
'URI': ['value'],
'WinRegistryKey': ['hive', 'key', 'values.name', 'values.data'],
}
OBJECT_CONSTRAINTS = {
'Address': {
'category': [Address.CAT_IPV4, Address.CAT_IPV6],
},
'File': {
'hashes.type_': [Hash.TYPE_MD5, Hash.TYPE_SHA1, Hash.TYPE_SHA256],
},
'URI': {
'type_': [URI.TYPE_URL],
},
}
STRING_CONDITION_CONSTRAINT = ['None', 'Equals']
MISP_FUNCTION_MAPPING = {
'Address': 'add_ipdst',
'DomainName': 'add_domain',
'EmailMessage': ['add_email_src', 'add_email_subject'],
'File': 'add_hashes',
'HTTPSession': 'add_useragent',
'Mutex': 'add_mutex',
'SocketAddress': 'add_ipdst', # Consider update to PyMISP API for port
'URI': 'add_url',
'WinRegistryKey': 'add_regkey',
}
def __init__(self, package, default_title=None, default_description=None,
default_tlp='AMBER',
misp=None, # PyMISP object must be provided
distribution=0, # this organisation only
threat_level=1, # threat
analysis=2, # analysis
information=None,
published=False):
super(StixMispTransform, self).__init__(
package, default_title, default_description, default_tlp,
)
self.misp = misp
self.distribution = distribution
self.threat_level = threat_level
self.analysis = analysis
self.information = information
self.published = published
# ##### Properties
@property
def misp(self):
return self._misp
@misp.setter
def misp(self, misp):
if not isinstance(misp, PyMISP):
raise TypeError('expected PyMISP object')
self._misp = misp
@property
def distribution(self):
return self._distribution
@distribution.setter
def distribution(self, distribution):
self._distribution = int(distribution)
@property
def threat_level(self):
return self._threat_level
@threat_level.setter
def threat_level(self, threat_level):
self._threat_level = int(threat_level)
@property
def analysis(self):
return self._analysis
@analysis.setter
def analysis(self, analysis):
self._analysis = int(analysis)
@property
def information(self):
return self._information
@information.setter
def information(self, information):
self._information = '' if information is None else str(information)
@property
def published(self):
return self._published
@published.setter
def published(self, published):
self._published = bool(published)
@property
def event(self):
return self._event
@event.setter
def event(self, event):
self._event = event
# ##### Class helper methods
@staticmethod
def get_misp_object(misp_url, misp_key, misp_ssl=False, misp_cert=None):
"""Returns a PyMISP object for communicating with a MISP host.
Args:
misp_url: URL for MISP API end-point
misp_key: API key for accessing MISP API
misp_ssl: a boolean value indicating whether the server's SSL
certificate will be verified
misp_cert: a tuple containing a certificate and key for SSL
client authentication
"""
return PyMISP(misp_url, misp_key, ssl=misp_ssl, cert=misp_cert)
def init_misp_event(self):
if not self.information:
# Try the package header for some 'info'
title = self.package_title()
description = self.package_description()
if title or description:
self.information = title
if title and description:
self.information += ' | '
if description:
self.information += description
timestamp = package_time(self.package) or datetime.now()
self.event = self.misp.new_event(
distribution=self.distribution,
threat_level_id=self.threat_level,
analysis=self.analysis,
info=self.information,
date=timestamp.strftime('%Y-%m-%d'),
)
# Add TLP tag to the event
package_tlp = self.package_tlp().lower()
tlp_tag_id = None
misp_tags = self.misp.get_all_tags()
if 'Tag' in misp_tags:
for tag in misp_tags['Tag']:
if tag['name'] == 'tlp:{}'.format(package_tlp):
tlp_tag_id = tag['id']
break
if tlp_tag_id is not None:
self.misp.tag(self.event['Event']['uuid'], tlp_tag_id)
# ##### Overridden class methods
def publish_fields(self, fields, object_type):
if isinstance(self.MISP_FUNCTION_MAPPING[object_type], list):
for field, function in zip(
self.OBJECT_FIELDS[object_type],
self.MISP_FUNCTION_MAPPING[object_type]):
if field in fields:
add_method = getattr(self.misp, function)
add_method(self.event, fields[field])
else:
add_method = getattr(self.misp,
self.MISP_FUNCTION_MAPPING[object_type])
if object_type == 'File':
# Convert the hash type and value to kwargs
hash_type = fields['hashes.type_'].lower()
kwargs = {hash_type: fields['hashes.simple_hash_value']}
add_method(self.event, **kwargs)
elif object_type == 'WinRegistryKey':
# Combine hive and key into regkey
regkey = ''
regkey += fields.get('hive', '')
regkey += fields.get('key', '')
# Merge the name and values
regvalue = ''
regvalue += fields.get('values.name', '')
data = fields.get('values.data', '')
if data:
regvalue += '\\' if regvalue else ''
regvalue += data
if regkey or regvalue:
add_method(self.event, regkey, regvalue)
else:
self._logger.debug('skipping WinRegistryKey with no data')
else:
# A single value
field = self.OBJECT_FIELDS[object_type][0]
if field in fields:
add_method(self.event, fields[field])
def publish_observable(self, observable, object_type):
if 'fields' in observable:
for fields in observable['fields']:
self.publish_fields(fields, object_type)
def publish(self):
if self.observables:
self._logger.info("Publishing results to MISP")
self.init_misp_event()
for object_type in sorted(self.OBJECT_FIELDS.keys()):
if object_type in self.observables:
for observable in self.observables[object_type]:
self.publish_observable(observable, object_type)
if self.published:
self.misp.publish(self.event)
else:
self._logger.info("Package has no observables - skipping")
|
# 1567. SMS-спам
# solved
text_input = input()
abc = ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z')
symbols = ('.', ',', '!')
result = 0
for i in range(len(text_input)):
current_letter = text_input[i:i+1]
if current_letter in abc:
result += abc.index(current_letter) % 3 + 1
elif current_letter in symbols:
result += symbols.index(current_letter) % 3 + 1
elif current_letter == ' ':
result += 1
print(result)
|
import os
import sys
from pip.req import parse_requirements
from setuptools import setup, find_packages
from ypconfig import __version__
try:
from distutils.command.build_py import build_py_2to3 as build_py
except ImportError:
from distutils.command.build_py import build_py
major, minor = sys.version_info[:2] # Python version
if major < 3:
print("We need at least python 3")
sys.exit(1)
elif major == 3:
PYTHON3 = True
try:
import lib2to3 # Just a check--the module is not actually used
except ImportError:
print("Python 3.X support requires the 2to3 tool.")
sys.exit(1)
reqs = ['schema', 'pyroute2', 'PyYAML', 'docopt']
setup(
name='ypconfig',
version=__version__,
description='Tools required for ypconfig',
author='Mark Schouten',
author_email='mark@tuxis.nl',
url='https://github.com/ypconfig/ypconfig',
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.5',
],
license='BSD 2-Clause',
setup_requires=reqs,
install_requires=reqs,
packages=find_packages(exclude=['tests', 'tests.*']),
platforms=['linux'],
data_files=[
],
entry_points={'console_scripts': ['ypconfig = ypconfig.cli:main']}
)
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
import enum
import typing
import sphinx_theme_pd
project = "AtCoder"
author = "Hiroshi Tsuyuki <kagemeka1@gmail.com>"
copyright = f"2022, {author}"
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.napoleon", # enable numpy/google documentation styles.
"sphinx_rtd_theme",
]
templates_path = ["_templates"]
# relative to conf.py
language = "en"
# https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-language
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# relative to source directory.
# https://sphinx-themes.org/#themes
class _HtmlTheme(enum.Enum):
ALABASTER = "alabaster"
FURO = "furo"
SPHINX_RTD_THEME = "sphinx_rtd_theme"
PYTHON_DOCS_THEME = "python_docs_theme"
SPHINX_THEME_PD = "sphinx_theme_pd"
SPHINX_BOOK_THEME = "sphinx_book_theme"
PYDATA_SPHINX_THEME = "pydata_sphinx_theme"
html_theme = _HtmlTheme.FURO.value
html_theme_path: list[str] = [
sphinx_theme_pd.get_html_theme_path(),
]
# relative to conf.py
html_static_path = ["_static"]
# relative to conf.py
todo_include_todos = True
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import msgprint
def get_context(context):
context.no_cache = True
# context.sub = frappe.db.sql("""select name, status as date from `tabSupplier` """,as_dict=1)
context.so_list = frappe.db.sql(""" select name,customer,rounded_total as Total,delivery_date from `tabSales Order` order by delivery_date asc""",as_dict=1)
context.sub = frappe.db.sql("""select s.name as name,t.exp_end_date as date from `tabSupplier` s left join `tabTask` t on s.name=t.supplier and t.status !='Closed' order by exp_end_date asc""",as_dict=1)
print context.sub
@frappe.whitelist(allow_guest=False)
def get_events(start,end):
no_cache = True
event_list = frappe.db.sql("""select description as title, starts_on as start from `tabEvent` where starts_on=%s and ends_on=%s""",format(start,end),as_dict=1)
return event_list
|
lista=[1, 2, 3, 4, 6, 5]
lista.sort(reverse=True)
print(lista)
lista=['a', 'b', 'c', 'd', 'e']
print(sorted(lista, reverse=True))
liczba=0xf
liczba2=0xa
print(liczba)
print(liczba2)
print(liczba+liczba2)
|
from utils.network.headless import HeadlessBrowser
from utils.network.socket import Socket
from utils.logging.log import Log
from utils.type.dynamic import DynamicObject
from database.session import Session
from database.engine import Engine
from database.models import Domain
from pipeline.elastic import Elastic
from pipeline.elastic.documents import Webpage, Service, Port
from datetime import datetime
from urllib.parse import urlparse
from io import BytesIO
import pipeline.source as pipelines
import boto3
import os
class Crawler:
"""
DarkLight onion domain crawler.
"""
def __init__(self, ini):
Log.i("Starting crawler")
self.ini = ini
def scan(self, url):
"""Scan and crawl url which user requested."""
Log.i("Trying to crawl {} url".format(url))
domain = urlparse(url).netloc
obj = DynamicObject()
# Step 1. Visit website using headless tor browser
Log.d("Step 1. Visiting {} website using headless browser".format(url))
browser = HeadlessBrowser(
ini=self.ini,
tor_network=True
)
report = browser.run(url)
del browser
# if browser have an exception return from here
if not report:
return obj
obj.webpage = report
# Step 2. Scan common service port
Log.d("Step 2. Scanning {} domain's common service port".format(domain))
obj.port = self._portscan(domain)
# Step 3. TO-DO
return obj
def _portscan(self, domain):
"""Scan and check opened port."""
socket = Socket(
tor_network=True,
ini=self.ini,
)
# common service port list
services = [
{'number': 20, 'status': False},
{'number': 21, 'status': False},
{'number': 22, 'status': False},
{'number': 23, 'status': False},
{'number': 25, 'status': False},
{'number': 80, 'status': False},
{'number': 110, 'status': False},
{'number': 123, 'status': False}, # NTP
{'number': 143, 'status': False},
{'number': 194, 'status': False}, # IRC
{'number': 389, 'status': False},
{'number': 443, 'status': False},
{'number': 993, 'status': False}, # IMAPS
{'number': 3306, 'status': False},
{'number': 3389, 'status': False},
{'number': 5222, 'status': False}, # XMPP
{'number': 6667, 'status': False}, # Public IRC
{'number': 8060, 'status': False}, # OnionCat
{'number': 8333, 'status': False}, # Bitcoin
]
for i in range(len(services)):
opened = socket.ping_check(domain, services[i]['number'])
services[i]['status'] = opened
Log.d("{} port is {}".format(
services[i]['number'], 'opened' if opened else 'closed'
))
del socket
return services
def save(self, id, obj):
"""Save crawled data into database."""
Log.i("Saving crawled data")
meta = {
'id': id,
}
engine = Engine.create(ini=self.ini)
with Session(engine=engine) as session:
domain = session.query(Domain).filter_by(uuid=id).first()
engine.dispose()
# pass the pipeline before saving data (for preprocessing)
for pipeline in pipelines.__all__:
_class = pipeline(domain, data=obj, ini=self.ini)
if _class.active:
Log.d(f"handling the {_class.name} pipeline")
try:
_class.handle()
except:
Log.e(f"Error while handling {_class.name} pipeline")
else:
Log.d(f"{_class.name} pipeline isn't active")
del _class
with Elastic(ini=self.ini):
# upload screenshot at Amazon S3
screenshot = self.upload_screenshot(obj.webpage.screenshot, id)
Webpage(
meta=meta,
url=obj.webpage.url,
domain=obj.webpage.domain,
title=obj.webpage.title,
time=datetime.now(),
source=obj.webpage.source,
screenshot=screenshot,
language=obj.webpage.language,
headers=obj.webpage.headers,
tree=obj.webpage.tree,
).save()
Port(
meta=meta,
services=[
Service(number=port['number'], status=port['status']) for port in obj.port]
).save()
def upload_screenshot(self, screenshot, id):
"""Upload screenshot into S3 storage or local storage."""
bucket = self.ini.read('STORAGE', 'BUCKET_NAME')
key = f'screenshot/{id}.jpg'
# if user want to upload screenshot into s3 storage
if bucket:
client = boto3.client(service_name='s3',
region_name=self.ini.read('STORAGE', 'REGION_NAME'),
aws_access_key_id=self.ini.read('STORAGE', 'AWS_ACCESS_KEY_ID'),
aws_secret_access_key=self.ini.read('STORAGE', 'AWS_SECRET_ACCESS_KEY'))
client.upload_fileobj(BytesIO(screenshot),
Bucket=bucket,
Key=key,
ExtraArgs={'ACL': 'public-read'})
return f"{client.meta.endpoint_url}/{bucket}/{key}"
else:
if not os.path.exists('screenshot'):
os.mkdir('screenshot')
with open(key, 'wb') as f:
f.write(screenshot)
return key
def __del__(self):
Log.i("Ending crawler")
del self
|
#!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2016-09-14 15:19:35 +0200 (Wed, 14 Sep 2016)
#
# https://github.com/harisekhon/devops-python-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Tool to generate some random data in HBase for test purposes
Uses the HBase Thrift server. For versions older than HBase 0.96+ or using modified protocols, the connection
protocol / compat / transport settings will need to be adjusted.
Prints a dot for every 100 rows sent to let you know it's still working.
Tested on Hortonworks HDP 2.3 (HBase 1.1.2) and Apache HBase 1.0.3, 1.1.6, 1.2.1, 1.2.2, 1.3.1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#from __future__ import unicode_literals
#import logging
import os
import sys
import time
import traceback
import socket
try:
# pylint: disable=wrong-import-position
import happybase # pylint: disable=unused-import
# happybase.hbase.ttypes.IOError no longer there in Happybase 1.0
try:
# this is only importable after happybase module
# pylint: disable=import-error
from Hbase_thrift import IOError as HBaseIOError
except ImportError:
# probably Happybase <= 0.9
# pylint: disable=import-error,no-name-in-module,ungrouped-imports
from happybase.hbase.ttypes import IOError as HBaseIOError
from thriftpy.thrift import TException as ThriftException
import humanize
except ImportError as _:
print('module import error - did you forget to build this project?\n\n'
+ traceback.format_exc(), end='')
sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import log, die, random_alnum, autoflush, log_option
from harisekhon.utils import validate_host, validate_port, validate_database_tablename, validate_int
from harisekhon import CLI
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.5.2'
class HBaseGenerateData(CLI):
def __init__(self):
# Python 2.x
super(HBaseGenerateData, self).__init__()
# Python 3.x
# super().__init__()
self.conn = None
self.host = None
self.port = 9090
self.verbose_default = 2
self.default_table_name = 'HS_test_data'
self.default_num_rows = 10000
self.default_key_length = 20
self.default_value_length = 40
self.default_skew_pc = 90
self.table = self.default_table_name
self.num_rows = self.default_num_rows
self.key_length = self.default_key_length
self.value_length = self.default_value_length
self.skew = False
self.skew_pc = self.default_skew_pc
self.drop_table = False
self.use_existing_table = False
self.column_family = 'cf1'
self.timeout_default = 6 * 3600
autoflush()
def add_options(self):
self.add_hostoption(name='HBase Thrift Server', default_host='localhost', default_port=self.port)
self.add_opt('-T', '--table', default=self.default_table_name, help='Table to create with the generated data.' +
' Will refuse to send data to any already existing table for safety reasons')
self.add_opt('-n', '--num', default=self.default_num_rows,
help='Number of rows to generate (default {0})'.format(self.default_num_rows))
self.add_opt('-K', '--key-length', default=self.default_key_length,
help='Key length (default: {0})'.format(self.default_key_length))
self.add_opt('-L', '--value-length', default=self.default_value_length,
help='Value length (default: {0})'.format(self.default_value_length))
self.add_opt('-s', '--skew', action='store_true', default=False,
help='Skew the data row keys intentionally for testing (default: False). This will use a key of ' +
'all \'A\'s of length --key-length, plus a numerically incrementing padded suffix')
self.add_opt('--skew-percentage', '--pc', default=self.default_skew_pc,
help='Skew percentage (default: {0})'.format(self.default_skew_pc))
self.add_opt('-d', '--drop-table', action='store_true', default=False,
help='Drop test data table (only allowed if keeping the default table name for safety)')
self.add_opt('-X', '--use-existing-table', action='store_true',
help='Allows sending data to an existing table. ' +
'Dangerous but useful to test pre-splitting schemes on test tables')
def process_args(self):
# this resets DEBUG env var
#log.setLevel(logging.INFO)
self.no_args()
self.host = self.get_opt('host')
self.port = self.get_opt('port')
validate_host(self.host)
validate_port(self.port)
# happybase socket requires an integer
self.port = int(self.port)
self.table = self.get_opt('table')
self.num_rows = self.get_opt('num')
self.key_length = self.get_opt('key_length')
self.value_length = self.get_opt('value_length')
validate_database_tablename(self.table)
validate_int(self.num_rows, 'num rows', 1, 1000000000)
validate_int(self.key_length, 'key length', 10, 1000)
validate_int(self.value_length, 'value length', 1, 1000000)
self.num_rows = int(self.num_rows)
self.skew = self.get_opt('skew')
log_option('skew data', self.skew)
self.skew_pc = self.get_opt('skew_percentage')
validate_int(self.skew_pc, 'skew percentage', 0, 100)
self.skew_pc = int(self.skew_pc)
self.drop_table = self.get_opt('drop_table')
self.use_existing_table = self.get_opt('use_existing_table')
if self.drop_table and self.table != self.default_table_name:
die("not allowed to use --drop-table if using a table name other than the default table '{0}'"\
.format(self.default_table_name))
def get_tables(self):
try:
log.info('getting table list')
return self.conn.tables()
except (socket.timeout, ThriftException, HBaseIOError) as _:
die('ERROR while trying to get table list: {0}'.format(_))
def run(self):
# might have to use compat / transport / protocol args for older versions of HBase or if protocol has been
# configured to be non-default, see:
# http://happybase.readthedocs.io/en/stable/api.html#connection
try:
log.info('connecting to HBase Thrift Server at {0}:{1}'.format(self.host, self.port))
self.conn = happybase.Connection(host=self.host, port=self.port, timeout=10 * 1000) # ms
tables = self.get_tables()
# of course there is a minor race condition here between getting the table list, checking and creating
# not sure if it's solvable, if you have any idea of how to solve it please let me know, even locking
# would only protect again multiple runs of this script on the same machine...
if self.table in tables:
if self.drop_table:
log.info("table '%s' already existed but -d / --drop-table was specified, removing table first",
self.table)
self.conn.delete_table(self.table, disable=True)
# wait up to 30 secs for table to be deleted
#for _ in range(30):
# if self.table not in self.get_tables():
# break
# log.debug('waiting for table to be deleted before creating new one')
# time.sleep(1)
elif self.use_existing_table:
pass
else:
die("WARNING: table '{0}' already exists, will not send data to a pre-existing table for safety"\
.format(self.table) +
". You can choose to either --drop-table or --use-existing-table")
if not self.use_existing_table:
self.create_table()
self.populate_table()
log.info('finished, closing connection')
self.conn.close()
except (socket.timeout, ThriftException, HBaseIOError) as _:
die('ERROR: {0}'.format(_))
def create_table(self):
log.info('creating table %s', self.table)
self.conn.create_table(self.table, {self.column_family: dict(max_versions=1)})
def populate_table(self):
table = self.table
key_length = self.key_length
value_length = self.value_length
table_conn = None
# does not actually connect until sending data
#log.info("connecting to test table '%s'", table)
try:
table_conn = self.conn.table(table)
except (socket.timeout, ThriftException, HBaseIOError) as _:
die('ERROR while trying to connect to table \'{0}\': {1}'.format(table, _))
log.info("populating test table '%s' with random data", table)
if self.use_existing_table:
self.column_family = sorted(table_conn.families().keys())[0]
cf_col = self.column_family + ':col1'
try:
skew_prefix = 'A' * key_length
skew_mod = max(1, 100.0 / self.skew_pc)
#log.info('skew mod is %s', skew_mod)
width = len('{0}'.format(self.num_rows))
start = time.time()
for _ in range(self.num_rows):
if self.skew and int(_ % skew_mod) == 0:
table_conn.put(bytes(skew_prefix + '{number:0{width}d}'.format(width=width, number=_)), \
{bytes(cf_col): bytes(random_alnum(value_length))})
else:
table_conn.put(bytes(random_alnum(key_length)), {bytes(cf_col): bytes(random_alnum(value_length))})
if _ % 100 == 0:
print('.', file=sys.stderr, end='')
print(file=sys.stderr)
time_taken = time.time() - start
log.info('sent %s rows of generated data to HBase in %.2f seconds (%d rows/sec, %s/sec)',
self.num_rows,
time_taken,
self.num_rows / time_taken,
humanize.naturalsize(self.num_rows * (key_length + value_length) / time_taken)
)
except (socket.timeout, ThriftException, HBaseIOError) as _:
exp = str(_)
exp = exp.replace('\\n', '\n')
exp = exp.replace('\\t', '\t')
die('ERROR while trying to populate table \'{0}\': {1}'.format(table, exp))
if __name__ == '__main__':
HBaseGenerateData().main()
|
# coding: utf-8
# In[17]:
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
# In[18]:
driver = webdriver.Firefox()
# In[19]:
driver.get("http://pythonforengineers.com/articles/")
# In[20]:
elem = driver.find_element_by_name("s")
# In[21]:
elem.send_keys("reddit")
# In[22]:
elem.send_keys(Keys.RETURN)
# In[23]:
link = driver.find_element_by_link_text("Build a Reddit Bot Part 1")
# In[24]:
link.click()
# In[25]:
driver.close()
# In[ ]:
|
from __future__ import print_function
from vivo2notld.definitions import definitions, list_definitions
from vivo2notld.utility import execute, execute_list
import argparse
import codecs
if __name__ == "__main__":
parser = argparse.ArgumentParser()
all_definitions = []
all_definitions.extend(definitions.keys())
all_definitions.extend(list_definitions.keys())
parser.add_argument("definition", choices=all_definitions)
parser.add_argument("subject_namespace", help="For example, http://vivo.gwu.edu/individual/")
parser.add_argument("subject_identifier", help="For example, n115")
parser.add_argument("endpoint",
help="Endpoint for SPARQL Query of VIVO instance,e.g., http://localhost/vivo/api/sparqlQuery.")
parser.add_argument("username", help="Username for VIVO root.")
parser.add_argument("password", help="Password for VIVO root.")
parser.add_argument("--format", choices=["json", "yaml", "xml", "nt", "pretty-xml", "trix"],
help="The format for serializing. Default is json.", default="json")
parser.add_argument("--indent", default="4", type=int, help="Number of spaces to use for indents.")
parser.add_argument("--file", help="Filepath to which to serialize.")
parser.add_argument("--debug", action="store_true", help="Also output the query, result graph, and python object.")
parser.add_argument("--offset", type=int, help="Offset for lists.")
parser.add_argument("--limit", type=int, help="Limit for lists.")
#Parse
args = parser.parse_args()
main_select_q = None
main_count_q = None
if args.definition in definitions:
main_o, main_s, main_g, main_q = execute(definitions[args.definition], args.subject_namespace,
args.subject_identifier, args.endpoint, args.username, args.password,
serialization_format=args.format, indent_size=args.indent)
else:
(main_o, main_s, main_g,
main_q, main_select_q, main_count_q) = execute_list(list_definitions[args.definition], args.subject_namespace,
args.subject_identifier, args.endpoint, args.username,
args.password, serialization_format=args.format,
indent_size=args.indent, offset=args.offset, limit=args.limit)
if args.file:
with codecs.open(args.file, "w") as out:
out.write(main_o)
else:
print(main_o)
if args.debug:
print("""
PYTHON OBJECT:
{s}
RESULT GRAPH:
{g}
QUERY:
{q}
""".format(s=main_s, g=main_g.serialize(format="turtle"), q=main_q))
if args.definition not in definitions:
print("""
SELECT QUERY:
{select_q}
COUNT QUERY:
{count_q}
""".format(select_q=main_select_q,
count_q=main_count_q))
|
class Solution:
def longestWord(self, words: List[str]) -> str:
word_set = set(words)
ans = ""
for word in word_set:
flag = True
for i in range(1, len(word)):
if word[:i] not in word_set:
flag = False
break
if flag:
if len(word) > len(ans):
ans = word
elif len(word) == len(ans) and word < ans:
ans = word
return ans
|
#!/usr/bin/env python
from nanpy import ArduinoApi
from time import sleep
from nanpy.sockconnection import SocketManager
# import logging
# logging.basicConfig(level=logging.DEBUG)
PIN=2
connection = SocketManager()
a = ArduinoApi(connection=connection)
a.pinMode(PIN, a.OUTPUT)
for i in range(10000):
a.digitalWrite(PIN, (i + 1) % 2)
sleep(0.2)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-12 19:24
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0014_invoice_additional_text'),
]
operations = [
migrations.RemoveField(
model_name='invoice',
name='is_cancelled',
),
migrations.AddField(
model_name='invoice',
name='is_cancellation',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='invoice',
name='refers',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='refered', to='pretixbase.Invoice'),
),
]
|
import sys
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
from .client import *
from .base_asset import BaseAsset
from .cache_decorator import memoized
from .special_class_methods import special_classes
from .none_deref import NoneDeref
from .string_utils import split_attribute
class V1Meta(object):
def __init__(self, *args, **kw):
self.server = V1Server(*args, **kw)
self.global_cache = {}
self.dirtylist = []
self._memoized_data = {}
def __getattr__(self, attr):
"Dynamically build asset type classes when someone tries to get attrs "
"that we don't have."
return self.asset_class(attr)
def __enter__(self):
return self
def __exit__(self, *args, **kw):
self.clear_memoized_cache()
self.commit()
def clear_memoized_cache(self):
"""Clears the memoization cache produced by the @memoized decorator"""
self._memoized_data={}
@memoized # from .cache_decorator
def asset_class(self, asset_type_name):
xmldata = self.server.get_meta_xml(asset_type_name)
class_members = {
'_v1_v1meta': self,
'_v1_asset_type_name': asset_type_name,
}
for operation in xmldata.findall('Operation'):
opname = operation.get('name')
def operation_func(myself, opname2=opname):
myself._v1_execute_operation(opname2)
class_members[opname] = operation_func
for attribute in xmldata.findall('AttributeDefinition'):
attr = attribute.get("name")
if attribute.get('attributetype') == 'Relation':
if attribute.get('ismultivalue') == 'True':
def getter(self, attr=attr):
return self._v1_getattr(attr)
def setter(self, value, attr=attr):
return self._v1_setattr(attr, list(value))
def deleter(self, attr=attr):
raise NotImplementedError
else:
def getter(self, attr=attr):
v = self._v1_getattr(attr)
if v:
return self._v1_getattr(attr)[0]
else:
return NoneDeref()
def setter(self, value, attr=attr):
return self._v1_setattr(attr, value)
def deleter(self, attr=attr):
raise NotImplementedError
else:
def getter(self, attr=attr):
return self._v1_getattr(attr)
def setter(self, value, attr=attr):
return self._v1_setattr(attr, value)
def deleter(self, attr=attr):
raise NotImplementedError
class_members[attr] = property(getter, setter, deleter)
bases = [BaseAsset,]
# mix in any special methods
if asset_type_name in special_classes:
mixin = special_classes[asset_type_name]
bases.append(mixin)
new_asset_class = type(asset_type_name, tuple(bases), class_members)
return new_asset_class
def add_to_dirty_list(self, asset_instance):
# at some point we're going to flush the items in the dirty list. Since there
# are a few triggers to do this, it's best to clear our memoization cache for
# query responses as soon as we have something that can get flushed rather than
# waiting for it to actually be flushed
self.clear_memoized_cache()
self.dirtylist.append(asset_instance)
def commit(self):
errors = []
# we're flushing changes, make sure our memoization cache is cleared so the updates
# are re-queried
if self.dirtylist:
self.clear_memoized_cache()
for asset in self.dirtylist:
try:
asset._v1_commit()
except V1Error as e:
errors.append(e)
self.dirtylist = []
return errors
def generate_update_doc(self, newdata):
update_doc = Element('Asset')
for attrname, newvalue in newdata.items():
if newvalue is None: # single relation was removed
node = Element('Relation')
node.set('name', attrname)
node.set('act', 'set')
elif isinstance(newvalue, BaseAsset): # single relation was changed
node = Element('Relation')
node.set('name', attrname)
node.set('act', 'set')
ra = Element('Asset')
ra.set('idref', newvalue.idref)
node.append(ra)
elif isinstance(newvalue, list): # multi relation was changed
node = Element('Relation')
node.set('name', attrname)
for item in newvalue:
child = Element('Asset')
child.set('idref', item.idref)
child.set('act', 'add')
node.append(child)
else: # Not a relation
node = Element('Attribute')
node.set('name', attrname)
node.set('act', 'set')
node.text = str(newvalue)
update_doc.append(node)
return update_doc
def create_asset(self, asset_type_name, newdata):
update_doc = self.generate_update_doc(newdata)
new_asset_xml = self.server.create_asset(asset_type_name, update_doc)
asset_type, asset_oid, asset_moment = new_asset_xml.get('id').split(':')
return self.asset_class(asset_type)(asset_oid)
def update_asset(self, asset_type_name, asset_oid, newdata):
update_doc = self.generate_update_doc(newdata)
return self.server.update_asset(asset_type_name, asset_oid, update_doc)
def execute_operation(self, asset_type_name, oid, opname):
return self.server.execute_operation(asset_type_name, oid, opname)
def get_attr(self, asset_type_name, oid, attrname):
xml = self.server.get_attr(asset_type_name, oid, attrname)
dummy_asset = ElementTree.Element('Asset')
dummy_asset.append(xml)
return self.unpack_asset(dummy_asset)[attrname]
def query(self, asset_type_name, wherestring, selstring):
return self.server.get_query_xml(asset_type_name, wherestring, selstring)
def read_asset(self, asset_type_name, asset_oid):
xml = self.server.get_asset_xml(asset_type_name, asset_oid)
return self.unpack_asset(xml)
def unpack_asset(self, xml):
output = {}
self.unpack_asset_relations(output, xml)
self.unpack_asset_attributes(output, xml)
return output
def unpack_asset_attributes(self, output, xml):
for attribute in xml.findall('Attribute'):
#key = attribute.get('name').replace('.','_')
key = attribute.get('name')
values = [v.text for v in attribute.findall('Value')]
if len(values) == 0:
values = [attribute.text]
self.add_attribute_to_output(output, key, values)
def unpack_asset_relations(self, output, xml):
# we sort relations in order to insert the shortest ones first, so that
# containing relations are added before leaf ones.
for relation in sorted(xml.findall('Relation'), key=lambda x: x.get('name')):
key = relation.get('name')
related_asset_elements = relation.findall('Asset')
rellist = []
for value_element in related_asset_elements:
relation_idref = value_element.get('idref')
value = self.asset_from_oid(relation_idref)
rellist.append(value)
self.add_relation_to_output(output, key, rellist)
def add_relation_to_output(self, output, relation, assets):
if self.is_attribute_qualified(relation):
(container, leaf) = self.split_relation_to_container_and_leaf(relation)
asset = self.get_related_asset(output, container)
# asset may be unset because the reference is broken
if asset:
asset.with_data({leaf: assets})
else:
output[relation] = assets
def add_attribute_to_output(self, output, relation, values):
if self.is_attribute_qualified(relation):
(container, leaf) = self.split_relation_to_container_and_leaf(relation)
for (asset, value) in zip(self.get_related_assets(output, container), values):
# for calculated values it is not an asset so take the value directly
if hasattr(asset, 'with_data'):
asset.with_data({leaf: value})
else:
output[relation] = value
else:
output[relation] = values[0]
def is_attribute_qualified(self, relation):
parts = split_attribute(relation)
return len(parts) > 1
def split_relation_to_container_and_leaf(self, relation):
parts = split_attribute(relation)
return ('.'.join(parts[:-1]), parts[-1])
def get_related_assets(self, output, relation):
if self.is_attribute_qualified(relation):
parts = split_attribute(relation)
assets = output[parts[0]]
for part in parts[1:]:
try:
asset = assets[0]
except IndexError:
return []
assets = asset._v1_getattr(part)
return assets
else:
return output[relation]
def get_related_asset(self, output, relation):
assets = self.get_related_assets(output, relation)
try:
return assets[0]
except IndexError:
return None
def asset_from_oid(self, oidtoken):
asset_type, asset_id = oidtoken.split(':')[:2]
AssetClass = self.asset_class(asset_type)
instance = AssetClass(asset_id)
return instance
def set_attachment_blob(self, attachment, data=None):
intid = attachment.intid if isinstance(attachment, BaseAsset) else attachment
return self.server.set_attachment_blob(intid, data)
get_attachment_blob = set_attachment_blob
# This will eventually require iso8601 module
#type_converters = dict(
# Boolean = bool
# Numeric = float,
# Date = iso8601.parse_date,
# Duration = str,
# Text = str,
# LongText = str,
# Relation = str,
# Rank = str,
# AssetType = str,
# Opaque = str,
# State = int,
# Password = str,
# Blob = str,
#)
|
"""
Usage:
project_update.py [ --get_registered_shell_command ]
this module exposes no other useful functions to the commandline
"""
# stdlib
import pathlib
import shutil
from typing import Dict, List, Union
# EXT
from docopt import docopt # type: ignore
# OWN
import project_conf
def format_commandline_help_file() -> None:
source_file = pathlib.Path(__file__).parent / '.docs/commandline_help.txt'
if source_file.is_file():
with open(source_file, 'r') as f_sourcefile:
commandline_help_txt_lines = f_sourcefile.readlines()
with open(source_file, 'w') as f_targetfile:
target_lines = list()
target_lines.append('.. code-block:: bash\n\n')
target_lines.append('')
for commandline_help_txt_line in commandline_help_txt_lines:
target_lines.append(' ' + commandline_help_txt_line)
f_targetfile.writelines(target_lines)
else:
with open(str(source_file), 'w') as f_targetfile:
f_targetfile.write('.. code-block:: bash\n\n there are no commandline options\n')
def create_commandline_help_file() -> None:
"""
>>> create_commandline_help_file()
"""
import subprocess
import sys
module_path = pathlib.Path('./{src_dir}/{module_name}.py'.format(src_dir=project_conf.src_dir, module_name=project_conf.module_name))
if module_path.is_file():
module_path = module_path.resolve()
command = '{sys_executable} {module_path} -h > ./.docs/commandline_help.txt'.format(sys_executable=sys.executable, module_path=module_path)
subprocess.run(command, shell=True)
format_commandline_help_file()
def create_init_config_file() -> None:
path_source_dir = get_path_template_dir_local() / 'templates'
path_target_dir = pathlib.Path(__file__).parent.resolve() / project_conf.src_dir
path_target_dir.mkdir(parents=True, exist_ok=True)
# overwrite __init__conf__py from template
path_targetfile = path_target_dir / '__init__conf__.py'
path_sourcefile = path_source_dir / '__init__conf__.py'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# replace the markers
with open(path_targetfile, 'r') as f_targetfile:
text = f_targetfile.read()
text = text.replace('{version}', project_conf.version)
text = text.replace('{title}', project_conf.init_config_title)
text = text.replace('{name}', project_conf.init_config_name)
text = text.replace('{url}', project_conf.url)
text = text.replace('{author}', project_conf.author)
text = text.replace('{author_email}', project_conf.author_email)
text = text.replace('{shell_command}', project_conf.shell_command)
with open(path_targetfile, 'w') as f_targetfile:
f_targetfile.write(text)
# copy __init__.py if not there from template
path_targetfile = path_target_dir / '__init__.py'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / '__init__.py'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy main.py if not there from template
path_targetfile = path_target_dir / (project_conf.module_name + '.py')
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / 'main.py'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy __doc__.py if not there from template
path_targetfile = path_target_dir / '__doc__.py'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / '__doc__.py'
shutil.copy(str(path_sourcefile), str(path_targetfile))
def is_in_own_project_folder() -> bool:
if pathlib.Path(__file__).parts[-2] == 'lib_travis_template':
return True
else:
return False
def get_path_template_dir_local() -> pathlib.Path:
path_current_dir = pathlib.Path(__file__).parent.resolve()
while True:
path_current_dir = path_current_dir.parent
path_current_subdirs = path_current_dir.glob('**/')
for subdir in path_current_subdirs:
if subdir.parts[-1] == 'lib_travis_template':
return subdir
def is_ok_to_copy(path_source_file: pathlib.Path) -> bool:
""" its ok when a file and not in the list """
files_not_to_copy = ['requirements.txt', 'project_conf.py', '.travis.yml', 'README.rst',
'CHANGES.rst', 'description.rst', 'usage.rst', 'installation.rst', 'acknowledgment.rst',
'badges_project.rst', 'badges_with_jupyter.rst', 'badges_without_jupyter.rst', '__doc__.py',
'index.rst', 'index_jupyter.rst', 'try_in_jupyter.rst']
if path_source_file.is_file():
if path_source_file.name in files_not_to_copy:
return False
else:
return True
else:
return False
def get_paths_to_copy(path_source_dir: pathlib.Path) -> List[pathlib.Path]:
paths_source = list(path_source_dir.glob('*'))
paths_source = paths_source + list(path_source_dir.glob('**/.docs/*'))
paths_source = paths_source + list(path_source_dir.glob('**/tests/*'))
paths_source = sorted(paths_source)
return paths_source
def copy_project_files() -> None:
"""
copy the template files to the current project on the local development machine
we dont overwrite some files, see code
"""
path_source_dir = get_path_template_dir_local()
path_target_dir = pathlib.Path(__file__).parent.resolve()
s_path_source_dir = str(path_source_dir)
s_path_target_dir = str(path_target_dir)
l_path_sourcefiles = get_paths_to_copy(path_source_dir)
for path_sourcefile in l_path_sourcefiles:
if is_ok_to_copy(path_sourcefile):
s_path_sourcefile = str(path_sourcefile)
s_path_targetfile = s_path_sourcefile.replace(s_path_source_dir, s_path_target_dir, 1)
path_targetfile = pathlib.Path(s_path_targetfile)
if not path_targetfile.parent.is_dir():
path_targetfile.parent.mkdir(exist_ok=True)
shutil.copy(s_path_sourcefile, s_path_targetfile)
def copy_template_files() -> None:
path_source_dir = get_path_template_dir_local()
path_target_dir = pathlib.Path(__file__).parent.resolve()
# copy CHANGES.rst template if not there
path_targetfile = path_target_dir / 'CHANGES.rst'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / 'templates/CHANGES.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy usage.rst template if not there
path_targetfile = path_target_dir / '.docs/usage.rst'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / 'templates/usage.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy description.rst template if not there
path_targetfile = path_target_dir / '.docs/description.rst'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / 'templates/description.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy acknowledgment.rst template if not there
path_targetfile = path_target_dir / '.docs/acknowledgment.rst'
if not path_targetfile.is_file():
path_sourcefile = path_source_dir / 'templates/acknowledgment.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy index.rst template if not there
path_targetfile = path_target_dir / '.docs/index.rst'
if not path_targetfile.is_file():
if project_conf.badges_with_jupiter:
path_sourcefile = path_source_dir / 'templates/index_jupyter.rst'
else:
path_sourcefile = path_source_dir / 'templates/index.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# copy try_in_jupyter.rst template if not there
path_targetfile = path_target_dir / '.docs/try_in_jupyter.rst'
if project_conf.badges_with_jupiter:
path_sourcefile = path_source_dir / 'templates/try_in_jupyter.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
else:
path_targetfile.unlink(missing_ok=True)
# overwrite badges template
if project_conf.badges_with_jupiter:
path_sourcefile = path_source_dir / '.docs/badges_with_jupyter.rst'
else:
path_sourcefile = path_source_dir / '.docs/badges_without_jupyter.rst'
path_targetfile = path_target_dir / '.docs/badges_project.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
# overwrite installation.rst template
path_targetfile = path_target_dir / '.docs/installation.rst'
path_sourcefile = path_source_dir / 'templates/installation.rst'
shutil.copy(str(path_sourcefile), str(path_targetfile))
def replace_marker(text: str, marker: str, src_filename: str, replace_marker_with_src_file: bool = True) -> str:
""" replace a marker in the text with the content of a file, or with '' """
if replace_marker_with_src_file:
path_base_dir = pathlib.Path(__file__).parent
path_src_filename = path_base_dir / src_filename
with open(str(path_src_filename), 'r') as f_src_filename:
s_src = f_src_filename.read()
text = text.replace(marker, s_src)
else:
text = text.replace(marker, '')
return text
def create_travis_file() -> None:
if not project_conf.travis_pypi_secure_code:
travis_pypi_secure_code = '# - secure: "none"'
else:
travis_pypi_secure_code = '- secure: "{code}"'.format(code=project_conf.travis_pypi_secure_code)
path_base_dir = pathlib.Path(__file__).parent
text = '{travis_template}\n'
text = replace_marker(text=text, marker='{travis_template}', src_filename='.travis_template.yml')
text = replace_marker(text=text, marker='{travis_template_linux_addon}',
src_filename='.travis_template_linux_addon.yml', replace_marker_with_src_file=project_conf.linux_tests)
text = replace_marker(text=text, marker='{travis_template_osx_addon}',
src_filename='.travis_template_osx_addon.yml', replace_marker_with_src_file=project_conf.osx_tests)
text = replace_marker(text=text, marker='{travis_template_pypy_addon}',
src_filename='.travis_template_pypy_addon.yml', replace_marker_with_src_file=project_conf.pypy_tests)
text = replace_marker(text=text, marker='{travis_template_windows_addon}',
src_filename='.travis_template_windows_addon.yml', replace_marker_with_src_file=project_conf.windows_tests)
text = replace_marker(text=text, marker='{travis_template_wine_addon}',
src_filename='.travis_template_wine_addon.yml', replace_marker_with_src_file=project_conf.wine_tests)
text = text.replace('{package_name}', project_conf.package_name)
text = text.replace('{cc_test_reporter_id}', project_conf.cc_test_reporter_id)
text = text.replace('{travis_pypi_secure_code}', travis_pypi_secure_code)
text = text.replace('{travis_repo_slug}', project_conf.travis_repo_slug)
text = text.replace('{github_master}', project_conf.github_master)
target_file = path_base_dir / '.travis.yml'
with open(target_file, 'w') as f_target_file:
f_target_file.write(text)
if not is_in_own_project_folder():
(path_base_dir / '.travis_template.yml').unlink()
(path_base_dir / '.travis_template_linux_addon.yml').unlink()
(path_base_dir / '.travis_template_osx_addon.yml').unlink()
(path_base_dir / '.travis_template_pypy_addon.yml').unlink()
(path_base_dir / '.travis_template_windows_addon.yml').unlink()
(path_base_dir / '.travis_template_wine_addon.yml').unlink()
def main(docopt_args: Dict[str, Union[bool, str]]) -> None:
if docopt_args['--get_registered_shell_command']:
print(project_conf.shell_command)
else:
create_init_config_file()
# copy files from template folder to current project
if not is_in_own_project_folder(): # we dont want to copy if we run this in the template project itself
copy_project_files()
copy_template_files()
# create travis file
create_travis_file()
# create readme.rst
create_commandline_help_file()
import build_docs
build_docs_args = dict()
build_docs_args['<TRAVIS_REPO_SLUG>'] = '{}/{}'.format(project_conf.github_account, project_conf.package_name)
build_docs.main(build_docs_args)
# entry point via commandline
def main_commandline() -> None:
"""
>>> main_commandline() # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
docopt.DocoptExit: ...
"""
docopt_args = docopt(__doc__)
main(docopt_args) # pragma: no cover
# entry point if main
if __name__ == '__main__':
main_commandline()
|
#!/usr/bin/env python
"""Simple draft client with websockets for Vainglory, but more or less usable for whatever draft you want..."""
import json
import logging
import os.path
import secrets
import threading
import time
from datetime import datetime
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
from cryptography.fernet import Fernet, InvalidToken
from tornado import gen
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="enable or disable debug mode", type=bool)
define("cookie_key", default=secrets.token_urlsafe(32), help="cookie secret key", type=str)
# TODO move to class
key = Fernet.generate_key()
f = Fernet(key)
# TODO move to Redis
draft_states = {}
# TODO consistency with role / side / team
# TODO consistency with blue / red / 1 / 2
# TODO consistency with message / event / chat
# TODO move options / teams to seperate classes
# TODO timers optional
class DraftState():
def __init__(self, room, style, heroes, team_blue, team_red, seconds_per_turn, bonus_time, background, background_url):
self.room = room
self.style = style
self.heroes = heroes
self.team_blue = team_blue
self.team_red = team_red
self.turn = 0
self.seconds_per_turn = seconds_per_turn
self.initial_bonus_time = bonus_time
self.bonus_time = {
'1': bonus_time,
'2': bonus_time,
}
self.started = False
self.join_status = {
'0': False,
'1': False,
'2': False,
}
self.history = []
self.counter = SecondCounter(self.room, self.seconds_per_turn, self.bonus_time[self.get_current_team()], self.get_current_team())
self.background = background
self.background_url = background_url
def get_team_blue(self):
return self.team_blue
def get_team_red(self):
return self.team_red
def is_joined(self, team):
return self.join_status[team]
def has_joined(self, team):
self.join_status[team] = True
if team == '0' and self.is_ready():
self.started = True
def get_join_status(self):
return self.join_status
def get_heroes(self):
return self.heroes
def get_style(self):
# TODO Remove need of including index
return self.style
def get_history(self):
return self.history
def get_turn(self):
return self.turn
def get_current_team(self):
return self.style[self.turn]['side']
def start_counter(self):
tornado.ioloop.IOLoop.current().spawn_callback(lambda: self.counter.loop())
def stop_counter(self):
v = self.counter.finish()
if v['type'] == 'bonus':
self.bonus_time[v['team']] = v['value']
return v
def reset_counter(self):
self.counter = SecondCounter(self.room, self.seconds_per_turn, self.bonus_time[self.get_current_team()], self.get_current_team())
def next_turn(self):
self.turn += 1
def update_draft(self, event):
self.history.append(event)
self.next_turn()
if not self.is_ended():
self.reset_counter()
self.start_counter()
def is_ready(self):
return self.is_joined('1') and self.is_joined('2')
def is_started(self):
return self.started
def is_turn(self, team):
return self.get_current_team() == team
def is_ended(self):
return self.turn >= len(self.style)
# TODO change chatsocket to draft socket...
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", MainHandler),
(r"/theme", CookieHandler),
(r"/draft/([a-zA-Z0-9-_=]*)$", DraftHandler),
(r"/draftstatus/([a-zA-Z0-9-_=]*)$", DraftStatusHandler),
(r"/chatsocket/([a-zA-Z0-9-_=]*)$", ChatSocketHandler),
]
settings = dict(
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
cookie_secret=options.cookie_key,
xsrf_cookies=True,
debug=options.debug,
)
super(Application, self).__init__(handlers, **settings)
class CustomHandler(tornado.web.RequestHandler):
def get_theme(self):
if not self.get_secure_cookie("theme_cookie"):
return False
else:
return True if self.get_secure_cookie("theme_cookie").decode() == "dark" else False
class MainHandler(CustomHandler):
"""
Main request handler for the root path and for draft creation post request.
"""
def get(self):
self.render("index.html", dark=self.get_theme())
def post(self):
# TODO provide defaults
team_blue = self.get_argument('teamBlue')
team_red = self.get_argument('teamRed')
seconds_per_turn = self.get_argument('secondsPerTurn')
bonus_time = self.get_argument('bonusTime')
draft = self.get_argument('draftField')
heroesField = self.get_argument('heroesField')
background = self.get_argument('customBackground', 'off')
background_url = self.get_argument('customBackgroundField')
logging.info(background)
logging.info(background_url)
# TODO foolproofiy / validate beforehand
style = json.loads(draft)
heroes = json.loads(heroesField)
room = secrets.token_urlsafe(16)
# TODO DRY it
string_admin = "{room}|{role}".format(room=room, role='0')
hash_admin = f.encrypt(str.encode(string_admin))
string_blue = "{room}|{role}".format(room=room, role='1')
hash_blue = f.encrypt(str.encode(string_blue))
string_red = "{room}|{role}".format(room=room, role='2')
hash_red = f.encrypt(str.encode(string_red))
string_spec = "{room}|{role}".format(room=room, role='spec')
hash_spec = f.encrypt(str.encode(string_spec))
url = self.request.protocol + "://" + self.request.host + "/draft/{}"
if room not in draft_states:
draft_states[room] = DraftState(room, style, heroes, team_blue, team_red, int(seconds_per_turn), int(bonus_time), background, background_url)
self.render('invite.html', dark=self.get_theme(), room=room, admin=url.format(hash_admin.decode()), spectators=url.format(hash_spec.decode()), team_blue=url.format(hash_blue.decode()), team_red=url.format(hash_red.decode()))
class CookieHandler(tornado.web.RequestHandler):
"""
Endpoint to change the theme color.
"""
def get(self):
theme = self.read()
if theme == "dark":
self.set("light")
else:
self.set("dark")
def read(self):
if self.get_secure_cookie("theme_cookie"):
return self.get_secure_cookie("theme_cookie").decode()
def set(self, theme):
logging.info('Set theme cookie to %s', theme)
self.set_secure_cookie("theme_cookie", theme, path="/")
class DraftStatusHandler(tornado.web.RequestHandler):
"""
Endpoint to request status of a draft.
"""
def get(self, room=None):
if not room:
self.redirect('/')
return
if room not in draft_states:
self.redirect('/')
return
draft_state = draft_states[room]
response = draft_state.get_join_status()
response['ready'] = draft_state.is_ready()
self.write(response)
class DraftHandler(CustomHandler):
"""
Handler to generate the draft page.
"""
def get(self, hash=None):
if not hash:
self.redirect('/')
return
# TODO DRY it
try:
decrypted = f.decrypt(str.encode(hash)).decode()
except (InvalidToken) as e:
logging.error(e)
self.redirect('/')
return
room, role = decrypted.split("|")
draft_state = draft_states[room]
self.render("draft.html", dark=self.get_theme(), hash=hash, role=role,
team_blue=draft_state.get_team_blue(),
team_red=draft_state.get_team_red(),
draft_order=draft_state.get_style(),
heroes=draft_state.get_heroes(),
seconds_per_turn = draft_state.seconds_per_turn,
bonus_time = draft_state.initial_bonus_time,
background = draft_state.background,
background_url = draft_state.background_url
)
class ChatSocketHandler(tornado.websocket.WebSocketHandler):
"""
Handler for dealing with websockets. It receives, stores and distributes new messages.
"""
waiters = {}
@gen.engine
def open(self, hash=None):
"""
Called when socket is opened.
"""
if not hash:
self.send_update(self, self.create_message("error", "No room specified"))
self.close()
return
try:
decrypted = f.decrypt(str.encode(hash)).decode()
except (InvalidToken) as e:
logging.error(e)
self.redirect('/')
self.close()
return
room, role = decrypted.split("|")
if not room:
self.send_update(self, self.create_message("error", "No room specified"))
self.close()
return
if not role:
self.send_update(self, self.create_message("error", "No role specified"))
self.close()
return
self.room = room
self.role = role
draft_state = draft_states[room]
draft_state.has_joined(self.role)
if room in ChatSocketHandler.waiters:
# TODO fix for multiple spectators
if (role == '1' or role == '2') and role in [client['role'] for client in ChatSocketHandler.waiters[room]]:
logging.info('Error: Role already specified')
self.send_update(self, self.create_message("error", "Role already specified"))
self.room = None
self.close()
else:
ChatSocketHandler.waiters[room].append({'waiter': self, 'role': self.role})
message = draft_state.get_history()
self.send_update(self, self.create_message("history", message))
if draft_state.is_started():
self.send_updates(self.room, self.create_message("start", "Draft has started"))
draft_state.start_counter()
else:
ChatSocketHandler.waiters[room] = [{'waiter': self, 'role': self.role}]
@classmethod
def send_updates(cls, room, message):
logging.info("sending message to %d waiters in room %s", len(cls.waiters[room]), room)
logging.info(message)
for client in cls.waiters[room]:
try:
client['waiter'].write_message(message)
except:
logging.error("Error sending message", exc_info=True)
@classmethod
def send_update(cls, waiter, message):
logging.info("sending message to waiter %s", waiter)
logging.info(message)
try:
waiter.write_message(message)
except:
logging.error("Error sending message", exc_info=True)
def on_message(self, message):
"""
Callback when new message received via the socket.
"""
logging.info('Received new message %r', message)
# TODO validate message
# TODO fix this
if self.role != '1' and self.role != '2':
return
draft_state = draft_states[self.room]
logging.info(draft_state.stop_counter())
if not draft_state.is_started():
logging.info('Draft is not yet started')
self.send_update(self, self.create_message("message", "Draft is not yet started"))
return
if not draft_state.is_turn(self.role):
logging.info('Not your turn')
self.send_update(self, self.create_message("message", "Not your turn"))
return
event = self.create_message("update", message)
draft_state.update_draft(event)
event['index'] = draft_state.get_turn()
self.send_updates(self.room, event)
if draft_state.is_ended():
logging.info('Draft has ended')
self.send_updates(self.room, self.create_message("message", "Draft has ended"))
self.close()
return
draft_states[self.room] = draft_state
def on_close(self):
"""
Callback when the socket is closed. Frees up resource related to this socket.
"""
if not self.room:
return
remove_clients = [client for client in self.waiters[self.room] if client['role'] == self.role]
for client in remove_clients:
self.waiters[self.room].remove(client)
if not self.waiters[self.room]:
del self.waiters[self.room]
def create_message(self, type, message):
event = {
'time': str(datetime.now()),
'type': type,
'message': message,
}
return event
class SecondCounter():
"""
Background thread for counter (called with ioloop.spawn_callback).
"""
def __init__(self, room, value, bonus, team):
self.alive = True
self.room = room
self.value = value
self.bonus = bonus
self.team = team
@gen.coroutine
def loop(self):
while self.alive and self.value > 0:
nxt = gen.sleep(1)
self.value -= 1
yield ChatSocketHandler.send_updates(self.room, {'type': 'time', 'message': self.value})
yield nxt
while self.alive and self.bonus > 0:
nxt = gen.sleep(1)
self.bonus -= 1
yield ChatSocketHandler.send_updates(self.room, {'type': 'bonustime', 'message': self.bonus, 'team': self.team})
yield nxt
if self.alive:
ChatSocketHandler.send_updates(self.room, {'type': 'message', 'message': "Time is up!"})
def finish(self):
self.alive = False
if self.value > 0:
return {'type': 'time', 'value': self.value, 'team': self.team}
else:
return {'type': 'bonus', 'value': self.bonus, 'team': self.team}
def main():
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
from options import Options
class New_contact(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "https://www.google.com/"
self.verificationErrors = []
self.accept_next_alert = True
def test_new_contact(self):
driver = self.driver
self.login(driver, username="admin", password="secret")
self.create_new_contact(driver, Options(f_name="name1", m_name="middle1", l_name="last1", n_name="nick1", user_title="title1", user_company="comp1", user_address="address1", user_home="01", user_mobile="02",
user_work="03", user_fax="04", user_email="email1@gmail.com", user_email2="email2@gmail.com", user_email3="email3@gmail.com",
user_homepage="email4@gmail.com", user_addres2="address_sec", user_phone2="home_my", user_notes="notes"))
self.logout(driver)
def test_new_empty_contact(self):
driver = self.driver
self.login(driver, username="admin", password="secret")
self.create_new_contact(driver, Options(f_name="", m_name="", l_name="", n_name="", user_title="", user_company="", user_address="", user_home="", user_mobile="",
user_work="", user_fax="", user_email="", user_email2="", user_email3="",
user_homepage="", user_addres2="", user_phone2="", user_notes=""))
self.logout(driver)
def logout(self, driver):
driver.find_element_by_link_text("Logout").click()
def create_new_contact(self, driver, options):
# init contact creation
driver.find_element_by_link_text("add new").click()
# fill contact form
driver.find_element_by_name("firstname").click()
driver.find_element_by_name("firstname").clear()
driver.find_element_by_name("firstname").send_keys(options.f_name)
driver.find_element_by_name("middlename").click()
driver.find_element_by_name("middlename").clear()
driver.find_element_by_name("middlename").send_keys(options.m_name)
driver.find_element_by_name("lastname").click()
driver.find_element_by_name("lastname").clear()
driver.find_element_by_name("lastname").send_keys(options.l_name)
driver.find_element_by_name("nickname").click()
driver.find_element_by_name("nickname").clear()
driver.find_element_by_name("nickname").send_keys(options.n_name)
driver.find_element_by_name("theform").click()
driver.find_element_by_name("title").click()
driver.find_element_by_name("title").clear()
driver.find_element_by_name("title").send_keys(options.user_title)
driver.find_element_by_name("company").click()
driver.find_element_by_name("company").clear()
driver.find_element_by_name("company").send_keys(options.user_company)
driver.find_element_by_name("address").click()
driver.find_element_by_name("address").clear()
driver.find_element_by_name("address").send_keys(options.user_address)
driver.find_element_by_name("home").click()
driver.find_element_by_name("home").clear()
driver.find_element_by_name("home").send_keys(options.user_home)
driver.find_element_by_name("mobile").click()
driver.find_element_by_name("mobile").clear()
driver.find_element_by_name("mobile").send_keys(options.user_mobile)
driver.find_element_by_name("work").click()
driver.find_element_by_name("work").clear()
driver.find_element_by_name("work").send_keys(options.user_work)
driver.find_element_by_name("fax").click()
driver.find_element_by_name("fax").clear()
driver.find_element_by_name("fax").send_keys(options.user_fax)
driver.find_element_by_name("email").click()
driver.find_element_by_name("email").clear()
driver.find_element_by_name("email").send_keys(options.user_email)
driver.find_element_by_name("email2").click()
driver.find_element_by_name("email2").clear()
driver.find_element_by_name("email2").send_keys(options.user_email2)
driver.find_element_by_name("email3").click()
driver.find_element_by_name("email3").clear()
driver.find_element_by_name("email3").send_keys(options.user_email3)
driver.find_element_by_name("theform").click()
driver.find_element_by_name("homepage").click()
driver.find_element_by_name("homepage").clear()
driver.find_element_by_name("homepage").send_keys(options.user_homepage)
driver.find_element_by_name("address2").click()
driver.find_element_by_name("address2").clear()
driver.find_element_by_name("address2").send_keys(options.user_addres2)
driver.find_element_by_name("phone2").click()
driver.find_element_by_name("phone2").clear()
driver.find_element_by_name("phone2").send_keys(options.user_phone2)
driver.find_element_by_name("notes").click()
driver.find_element_by_name("notes").clear()
driver.find_element_by_name("notes").send_keys(options.user_notes)
driver.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
# return to home page
driver.find_element_by_link_text("home page").click()
def login(self, driver, username, password):
driver.get("http://localhost/addressbook/")
driver.find_element_by_name("user").click()
driver.find_element_by_name("user").clear()
driver.find_element_by_name("user").send_keys(username)
driver.find_element_by_id("content").click()
driver.find_element_by_name("pass").click()
driver.find_element_by_name("pass").clear()
driver.find_element_by_name("pass").send_keys(password)
driver.find_element_by_xpath("//input[@value='Login']").click()
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException as e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException as e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D,MaxPooling2D,UpSampling2D,BatchNormalization,Reshape,Permute,Activation
from tensorflow.keras.optimizers import Adam
from cfg import *
img_w = 256
img_h = 256
lr_init = 0.01
lr_decay = 0.00001
def SegNet():
model = Sequential()
#encoder
model.add(Conv2D(64,(3,3),strides=(1,1),input_shape=(img_w,img_h,3),padding='same',activation='relu',data_format='channels_last'))
model.add(BatchNormalization())
model.add(Conv2D(64,(3,3),strides=(1,1),padding='same',activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2,2)))
#(128,128)
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2,2)))
#(64,64)
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(32,32)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(16,16)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(8,8)
#decoder
model.add(UpSampling2D(size=(2,2)))
#(16,16)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(32,32)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(64,64)
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(128,128)
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(256,256)
model.add(Conv2D(64, (3, 3), strides=(1, 1), input_shape=(img_w, img_h,3), padding='same', activation='relu',data_format='channels_last'))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(n_label, (1, 1), strides=(1, 1), padding='same'))
model.add(Activation('softmax'))
model.compile(optimizer=Adam(lr=lr_init, decay=lr_decay),
loss='categorical_crossentropy',
metrics=['categorical_accuracy'])
# model.summary()
return model
def unet(num_classes, input_shape, lr_init, lr_decay):
img_input = Input(input_shape)
# Block 1
x = Conv2D(64, (3, 3), padding='same', name='block1_conv1')(img_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(64, (3, 3), padding='same', name='block1_conv2')(x)
x = BatchNormalization()(x)
block_1_out = Activation('relu')(x)
x = MaxPooling2D()(block_1_out)
# Block 2
x = Conv2D(128, (3, 3), padding='same', name='block2_conv1')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(128, (3, 3), padding='same', name='block2_conv2')(x)
x = BatchNormalization()(x)
block_2_out = Activation('relu')(x)
x = MaxPooling2D()(block_2_out)
# Block 3
x = Conv2D(256, (3, 3), padding='same', name='block3_conv1')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(256, (3, 3), padding='same', name='block3_conv2')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(256, (3, 3), padding='same', name='block3_conv3')(x)
x = BatchNormalization()(x)
block_3_out = Activation('relu')(x)
x = MaxPooling2D()(block_3_out)
# Block 4
x = Conv2D(512, (3, 3), padding='same', name='block4_conv1')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(512, (3, 3), padding='same', name='block4_conv2')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(512, (3, 3), padding='same', name='block4_conv3')(x)
x = BatchNormalization()(x)
block_4_out = Activation('relu')(x)
x = MaxPooling2D()(block_4_out)
# Block 5
x = Conv2D(512, (3, 3), padding='same', name='block5_conv1')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(512, (3, 3), padding='same', name='block5_conv2')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(512, (3, 3), padding='same', name='block5_conv3')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# UP 1
x = Conv2DTranspose(512, (2, 2), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = concatenate([x, block_4_out])
x = Conv2D(512, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(512, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# UP 2
x = Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = concatenate([x, block_3_out])
x = Conv2D(256, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(256, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# UP 3
x = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = concatenate([x, block_2_out])
x = Conv2D(128, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(128, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# UP 4
x = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = concatenate([x, block_1_out])
x = Conv2D(64, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(64, (3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# last conv
x = Conv2D(num_classes, (3, 3), activation='softmax', padding='same')(x)
model = Model(img_input, x)
model.compile(optimizer=Adam(lr=lr_init, decay=lr_decay),
loss='categorical_crossentropy',
metrics=['categorical_accuracy'])
model.summary()
return model
if __name__ == "__main__":
img_w = 256
img_h = 256
lr_init = 0.01
lr_decay = 0.00001
model = SegNet()
model.summary()
print(1)
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('profile/<int:user_id>/', views.profile, name='profile'),
path('profile/devices/', views.devices, name='devices'),
path('profile/new_device/', views.new_device, name='new_device'),
path('profile/delete_device/<int:device_id>/', views.delete_device, name='delete_device'),
path('profile/edit_device/<int:device_id>/', views.edit_device, name='edit_device'),
path('profile/detail_device/<int:device_id>/', views.detail_device, name='detail_device'),
]
|
# Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Any, Dict, Iterable, List, Optional, Tuple, TypeVar, Union
import tensorflow as tf
import torch
from fastestimator.backend.binary_crossentropy import binary_crossentropy
from fastestimator.backend.categorical_crossentropy import categorical_crossentropy
from fastestimator.backend.sparse_categorical_crossentropy import sparse_categorical_crossentropy
from fastestimator.op.tensorop.loss.loss import LossOp
from fastestimator.util.traceability_util import traceable
Tensor = TypeVar('Tensor', tf.Tensor, torch.Tensor)
@traceable()
class CrossEntropy(LossOp):
"""Calculate Element-Wise CrossEntropy (binary, categorical or sparse categorical).
Args:
inputs: A tuple or list like: [<y_pred>, <y_true>].
outputs: String key under which to store the computed loss value.
mode: What mode(s) to execute this Op in. For example, "train", "eval", "test", or "infer". To execute
regardless of mode, pass None. To execute in all modes except for a particular one, you can pass an argument
like "!infer" or "!train".
ds_id: What dataset id(s) to execute this Op in. To execute regardless of ds_id, pass None. To execute in all
ds_ids except for a particular one, you can pass an argument like "!ds1".
from_logits: Whether y_pred is logits (without softmax).
average_loss: Whether to average the element-wise loss after the Loss Op.
form: What form of cross entropy should be performed ('binary', 'categorical', 'sparse', or None). None will
automatically infer the correct form based on tensor shape.
class_weights: Dictionary mapping class indices to a weight for weighting the loss function. Useful when you
need to pay more attention to samples from an under-represented class.
Raises:
AssertionError: If `class_weights` or it's keys and values are of unacceptable data types.
"""
def __init__(self,
inputs: Union[Tuple[str, str], List[str]],
outputs: str,
mode: Union[None, str, Iterable[str]] = "!infer",
ds_id: Union[None, str, Iterable[str]] = None,
from_logits: bool = False,
average_loss: bool = True,
form: Optional[str] = None,
class_weights: Optional[Dict[int, float]] = None):
super().__init__(inputs=inputs, outputs=outputs, mode=mode, ds_id=ds_id, average_loss=average_loss)
self.from_logits = from_logits
self.form = form
self.cross_entropy_fn = {
"binary": binary_crossentropy,
"categorical": categorical_crossentropy,
"sparse": sparse_categorical_crossentropy
}
if class_weights:
assert isinstance(class_weights, dict), \
"class_weights should be a dictionary or have None value, got {}".format(type(class_weights))
assert all(isinstance(key, int) for key in class_weights.keys()), \
"Please ensure that the keys of the class_weight dictionary are of type: int"
assert all(isinstance(value, float) for value in class_weights.values()), \
"Please ensure that the values of the class_weight dictionary are of type: float"
self.class_weights = class_weights
self.class_dict = None
def build(self, framework: str, device: Optional[torch.device] = None) -> None:
if self.class_weights:
if framework == 'tf':
keys_tensor = tf.constant(list(self.class_weights.keys()))
vals_tensor = tf.constant(list(self.class_weights.values()))
self.class_dict = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), default_value=1.0)
elif framework == 'torch':
self.class_dict = self.class_weights
else:
raise ValueError("unrecognized framework: {}".format(framework))
def forward(self, data: List[Tensor], state: Dict[str, Any]) -> Tensor:
y_pred, y_true = data
form = self.form
if form is None:
if len(y_pred.shape) == 2 and y_pred.shape[-1] > 1:
if len(y_true.shape) == 2 and y_true.shape[-1] > 1:
form = "categorical"
else:
form = "sparse"
else:
form = "binary"
loss = self.cross_entropy_fn[form](y_pred,
y_true,
from_logits=self.from_logits,
average_loss=self.average_loss,
class_weights=self.class_dict)
return loss
|
from __future__ import print_function
from astropy.convolution import Gaussian2DKernel, convolve, convolve_fft
from astropy.io import fits
import numpy as np
#FWHM/2 = Radio equivalente = (a*b)**0.5 -> radio que corresponde a la misma area
# de una elipse con semieje mayor a y semieje menor b.
#FWHM->Desviacion estandar: FWHM=2*raiz(2*log(2))*stddev = 2.35482*stddev
a_beam = 0.19 / 2 #in arcsecs
b_beam = 0.19 / 2
sizeau = 500. #Size along each (x,y) direction
npix = 256. #Number of pixels along each (x,y) direction
dpc = 1000. #Distance in parsecs
Resolucion = (sizeau / npix) / dpc #in arcsecs
a_pix = a_beam / Resolucion #in pxls
b_pix = b_beam / Resolucion
R_gauss = np.sqrt(a_pix * b_pix)
print ('Rgauss:',R_gauss)
HWHM=R_gauss
FWHM=2*HWHM
stddev=FWHM/(2.35482)
gaussian_2D_kernel = Gaussian2DKernel(stddev, stddev) #astropy requiere stddev
areaBeamPix=1.442*np.pi*HWHM**2
#2*np.log(2)=1.386 para Func.Gaussiana
#1.442 para Func.Bessel
#----------------
#READING PROCESS
#----------------
data_cont, header_cont = fits.getdata('img_cont.fits', header = True)
ch = 0
print ("Convolving continuum image...")
result_cont = areaBeamPix*convolve(data_cont.squeeze(),gaussian_2D_kernel)
semiaxis_deg = 2*HWHM*Resolucion/3600
header_cont['BUNIT'] = 'Jy/beam'
header_cont['BPA'] = 0
header_cont['BMIN'] = semiaxis_deg #Eje menor en degrees
header_cont['BMAJ'] = semiaxis_deg #Eje mayor en degrees
fits.writeto('img_cont.fits-CONV.fits',result_cont,header_cont,overwrite=True)
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from onmt.modules.relative_attention import RelPartialLearnableMultiHeadAttn
from onmt.models.transformer_layers import PositionalEncoding, PrePostProcessing
from onmt.models.transformer_layers import EncoderLayer, DecoderLayer
from onmt.models.transformers import TransformerEncoder, TransformerDecoder, TransformerDecodingState
import onmt
from onmt.modules.bottle import Bottle
from onmt.modules.dropout import embedded_dropout
from onmt.models.transformer_layers import XavierLinear, MultiHeadAttention, FeedForward, PrePostProcessing
from onmt.models.transformer_layers import EncoderLayer, DecoderLayer
from onmt.models.relative_transformer_layers import RelativeTransformerEncoderLayer, RelativeTransformerDecoderLayer
from onmt.models.unified_transformer import UnifiedTransformer
from onmt.models.relative_transformer import SinusoidalPositionalEmbedding, StreamState, \
StreamDecodingState, RelativeTransformerDecoder
from onmt.utils import flip, expected_length
from collections import defaultdict
import math
class TransformerXLDecoderLayer(nn.Module):
def __init__(self, h, d_model, p, d_ff, attn_p=0.1, version=1.0, ignore_source=False,
variational=False, death_rate=0.0):
super(TransformerXLDecoderLayer, self).__init__()
self.version = version
self.ignore_source = ignore_source
self.variational = variational
self.death_rate = death_rate
self.preprocess_attn = PrePostProcessing(d_model, p, sequence='n')
self.postprocess_attn = PrePostProcessing(d_model, p, sequence='da', variational=self.variational)
self.preprocess_ffn = PrePostProcessing(d_model, p, sequence='n')
self.postprocess_ffn = PrePostProcessing(d_model, p, sequence='da', variational=self.variational)
d_head = d_model // h
self.multihead_tgt = RelPartialLearnableMultiHeadAttn(h, d_model, d_head, dropatt=attn_p)
if onmt.constants.activation_layer == 'linear_relu_linear':
ff_p = p
feedforward = FeedForward(d_model, d_ff, ff_p, variational=self.variational)
elif onmt.constants.activation_layer == 'maxout':
k = int(math.ceil(d_ff / d_model))
feedforward = MaxOut(d_model, d_model, k)
elif onmt.constants.activation_layer == 'linear_swish_linear':
ff_p = p
feedforward = FeedForwardSwish(d_model, d_ff, ff_p)
else:
raise NotImplementedError
self.feedforward = Bottle(feedforward)
def forward(self, input_, context, pos_emb, mask_tgt, mask_src, mems=None,
incremental=False, incremental_cache=None):
# incremental=False, incremental_cache=None, reuse_source=True):
""" Self attention layer with memory
layernorm > attn > dropout > residual
"""
assert context is None, "This model does not have an context encoder"
coin = True
if self.training and self.death_rate > 0:
coin = (torch.rand(1)[0].item() >= self.death_rate)
if coin:
# input and context should be time first ?
query = self.preprocess_attn(input_)
if mems is not None and mems.size(0) > 0:
mems = self.preprocess_attn(mems)
else:
mems = None
# out, _ = self.multihead_tgt(query, pos_emb, r_w_bias, r_r_bias, attn_mask=mask_tgt)
out, _, incremental_cache = self.multihead_tgt(query, pos_emb, attn_mask=mask_tgt, mems=mems,
incremental=incremental, incremental_cache=incremental_cache)
# rescaling before residual
if self.training and self.death_rate > 0:
out = out / (1 - self.death_rate)
input_ = self.postprocess_attn(out, input_)
""" Context Attention layer
layernorm > attn > dropout > residual
"""
coverage = None
""" Feed forward layer
layernorm > ffn > dropout > residual
"""
out = self.feedforward(self.preprocess_ffn(input_))
# rescaling before residual
if self.training and self.death_rate > 0:
out = out / (1 - self.death_rate)
input_ = self.postprocess_ffn(out, input_)
else:
coverage = None
if incremental:
return input_, coverage, incremental_cache
return input_, coverage
class TransformerXL(RelativeTransformerDecoder):
"""
This class combines the encoder and the decoder into one single sequence
Joined attention between encoder and decoder parts
"""
def __init__(self, opt, tgt_embedding, generator,
language_embeddings=None, **kwargs):
# self.tgt_embedding = tgt_embedding
self.model_size = opt.model_size
# build_modules will be called from the inherited constructor
super().__init__(opt, tgt_embedding,
None,
language_embeddings=language_embeddings,
ignore_source=True)
self.tgt_embedding = tgt_embedding
self.generator = generator
self.ignore_source = True
self.same_length = False
self.clamp_len = 0
self.d_head = self.model_size // self.n_heads
def build_modules(self):
e_length = expected_length(self.layers, self.death_rate)
print("* Transformer LM Decoder with Relative Attention with %.2f expected layers" % e_length)
self.layer_modules = nn.ModuleList()
for l in range(self.layers):
# linearly decay the death rate
death_r = (l + 1.0) / self.layers * self.death_rate
block = TransformerXLDecoderLayer(self.n_heads, self.model_size,
self.dropout, self.inner_size, self.attn_dropout,
ignore_source=True,
variational=self.variational_dropout, death_rate=death_r)
self.layer_modules.append(block)
def reset_states(self):
return
def tie_weights(self):
self.generator[0].linear.weight = self.tgt_embedding.weight
def forward(self, batch, target_mask=None, streaming=False, **kwargs):
tgt = batch.get('target_input')
tgt_lang = batch.get('target_lang')
if streaming:
streaming_state = kwargs.get('streaming_state', None)
mems = streaming_state.tgt_mems
else:
mems = None
qlen = tgt.size(0)
word_emb = embedded_dropout(self.tgt_embedding, tgt, dropout=self.word_dropout if self.training else 0)
word_emb.mul_(self.model_size ** 0.5)
if self.use_language_embedding:
lang_emb = self.language_embeddings(tgt_lang) # B x H
if self.language_embedding_type in ['sum', 'all_sum']:
word_emb = word_emb + lang_emb
else:
raise NotImplementedError
mlen = mems[0].size(0) if mems is not None else 0
# total length: memory + current input
klen = mlen + qlen
# all units having the same attention range
if self.same_length:
all_ones = word_emb.new_ones(qlen, klen)
mask_len = klen - self.mem_len
if mask_len > 0:
mask_shift_len = qlen - mask_len
else:
mask_shift_len = qlen
dec_attn_mask = (torch.triu(all_ones, 1 + mlen)
+ torch.tril(all_ones, -mask_shift_len)).byte()[:, :, None] # -1
else:
dec_attn_mask = torch.triu(
word_emb.new_ones(qlen, klen), diagonal=1 + mlen).byte()[:, :, None]
dec_attn_mask = dec_attn_mask.bool()
pos = torch.arange(klen - 1, -1, -1.0, device=word_emb.device,
dtype=word_emb.dtype)
if self.clamp_len > 0:
pos_seq.clamp_(max=self.clamp_len)
pos_emb = self.positional_encoder(pos)
# Applying dropout
output = self.preprocess_layer(word_emb)
if streaming:
hids = [output]
pos_emb = self.preprocess_layer(pos_emb)
# FORWARD PASS
coverage = None
for i, layer in enumerate(self.layer_modules):
mems_i = None if mems is None else mems[i]
output, coverage = layer(output, None, pos_emb, dec_attn_mask, None,
mems=mems_i) # context and context_mask are None
if streaming:
hids.append(output)
# Final normalization
output = self.postprocess_layer(output)
output_dict = {'hidden': output, 'coverage': coverage, 'context': None, 'src': None,
'target_mask': target_mask}
output_dict = defaultdict(lambda: None, output_dict)
# final layer: computing log probabilities
logprobs = self.generator[0](output_dict)
output_dict['logprobs'] = logprobs
if streaming:
streaming_state.update_tgt_mems(hids, qlen)
output_dict['streaming_state'] = streaming_state
return output_dict
def init_stream(self):
param = next(self.parameters())
layers = self.layers
streaming_state = StreamState(layers, self.max_memory_size, param.device, param.dtype)
return streaming_state
# make a simple sampling sequence from some input
def sample(self, input):
return
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class IotHubProperties(Model):
"""The properties of an IoT hub.
Variables are only populated by the server, and will be ignored when
sending a request.
:param authorization_policies: The shared access policies you can use to
secure a connection to the IoT hub.
:type authorization_policies:
list[~azure.mgmt.iothub.models.SharedAccessSignatureAuthorizationRule]
:param ip_filter_rules: The IP filter rules.
:type ip_filter_rules: list[~azure.mgmt.iothub.models.IpFilterRule]
:ivar provisioning_state: The provisioning state.
:vartype provisioning_state: str
:ivar state: Thehub state state.
:vartype state: str
:ivar host_name: The name of the host.
:vartype host_name: str
:param event_hub_endpoints: The Event Hub-compatible endpoint properties.
The possible keys to this dictionary are events and
operationsMonitoringEvents. Both of these keys have to be present in the
dictionary while making create or update calls for the IoT hub.
:type event_hub_endpoints: dict[str,
~azure.mgmt.iothub.models.EventHubProperties]
:param routing:
:type routing: ~azure.mgmt.iothub.models.RoutingProperties
:param storage_endpoints: The list of Azure Storage endpoints where you
can upload files. Currently you can configure only one Azure Storage
account and that MUST have its key as $default. Specifying more than one
storage account causes an error to be thrown. Not specifying a value for
this property when the enableFileUploadNotifications property is set to
True, causes an error to be thrown.
:type storage_endpoints: dict[str,
~azure.mgmt.iothub.models.StorageEndpointProperties]
:param messaging_endpoints: The messaging endpoint properties for the file
upload notification queue.
:type messaging_endpoints: dict[str,
~azure.mgmt.iothub.models.MessagingEndpointProperties]
:param enable_file_upload_notifications: If True, file upload
notifications are enabled.
:type enable_file_upload_notifications: bool
:param cloud_to_device:
:type cloud_to_device: ~azure.mgmt.iothub.models.CloudToDeviceProperties
:param comments: IoT hub comments.
:type comments: str
:param operations_monitoring_properties:
:type operations_monitoring_properties:
~azure.mgmt.iothub.models.OperationsMonitoringProperties
:param features: The capabilities and features enabled for the IoT hub.
Possible values include: 'None', 'DeviceManagement'
:type features: str or ~azure.mgmt.iothub.models.Capabilities
"""
_validation = {
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'host_name': {'readonly': True},
}
_attribute_map = {
'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'},
'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'},
'routing': {'key': 'routing', 'type': 'RoutingProperties'},
'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'},
'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'},
'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'},
'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'},
'comments': {'key': 'comments', 'type': 'str'},
'operations_monitoring_properties': {'key': 'operationsMonitoringProperties', 'type': 'OperationsMonitoringProperties'},
'features': {'key': 'features', 'type': 'str'},
}
def __init__(self, *, authorization_policies=None, ip_filter_rules=None, event_hub_endpoints=None, routing=None, storage_endpoints=None, messaging_endpoints=None, enable_file_upload_notifications: bool=None, cloud_to_device=None, comments: str=None, operations_monitoring_properties=None, features=None, **kwargs) -> None:
super(IotHubProperties, self).__init__(**kwargs)
self.authorization_policies = authorization_policies
self.ip_filter_rules = ip_filter_rules
self.provisioning_state = None
self.state = None
self.host_name = None
self.event_hub_endpoints = event_hub_endpoints
self.routing = routing
self.storage_endpoints = storage_endpoints
self.messaging_endpoints = messaging_endpoints
self.enable_file_upload_notifications = enable_file_upload_notifications
self.cloud_to_device = cloud_to_device
self.comments = comments
self.operations_monitoring_properties = operations_monitoring_properties
self.features = features
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.