repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
fxdemolisher/frano
|
frano/main/models.py
|
<reponame>fxdemolisher/frano
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import random
import string
from datetime import datetime
from django.db import models
#-------------\
# CONSTANTS |
#-------------/
TOKEN_LETTERS = string.digits + string.uppercase + string.lowercase
#----------\
# MODELS |
#----------/
class User(models.Model):
open_id = models.CharField(max_length = 255, unique = True)
email = models.CharField(max_length = 255, unique = True, null = True)
create_date = models.DateTimeField()
class Meta:
db_table = 'user'
def __unicode__(self):
return "%s - %s" % (self.email, self.open_id)
class Portfolio(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length = 30)
read_only_token = models.CharField(max_length = 20, unique = True)
create_date = models.DateTimeField()
class Meta:
db_table = 'portfolio'
def __unicode__(self):
return "%s" % (self.name)
#------------\
# SERVICES |
#------------/
def create_user(open_id, email):
"""Create and save a new user with the given open ID and email address."""
user = User()
user.open_id = open_id
user.email = email
user.create_date = datetime.now()
user.save()
return user
def create_portfolio(user, name):
"""Create and save a new portfolio for the given user and with the given name."""
read_only_token = ''
for i in range(10):
read_only_token += random.choice(TOKEN_LETTERS)
portfolio = Portfolio()
portfolio.user = user
portfolio.name = name
portfolio.read_only_token = read_only_token
portfolio.create_date = datetime.now()
portfolio.save()
return portfolio
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/main/decorators.py
|
<gh_stars>0
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from django.shortcuts import redirect
from models import Portfolio
from models import User
#-------------\
# CONSTANTS |
#-------------/
#---------------------\
# EXPOSED FUNCTIONS |
#---------------------/
def portfolio_manipulation_decorator(view_function):
def view_function_decorated(request, portfolio_id, read_only = False, **args):
portfolio = Portfolio.objects.filter(id = int(portfolio_id))[0]
sample_portfolio_id = request.session.get('sample_portfolio_id')
user_id = request.session.get('user_id')
is_sample = (portfolio.id == sample_portfolio_id)
if is_sample or portfolio.user.id == user_id or read_only:
return view_function(request, portfolio = portfolio, is_sample = is_sample, read_only = read_only, **args)
return redirect("/index.html")
return view_function_decorated
def login_required_decorator(view_function):
def view_function_decorated(request, **args):
user_id = request.session.get('user_id')
if user_id == None:
return redirect("/index.html")
else:
user = User.objects.filter(id = user_id)[0]
args['user'] = user
return view_function(request, **args)
return view_function_decorated
def read_only_decorator(view_function):
def view_function_decorated(request, read_only_token, **args):
portfolio = Portfolio.objects.filter(read_only_token__exact = read_only_token)[0]
return view_function(request, portfolio = portfolio, **args)
return view_function_decorated
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/main/demo.py
|
<filename>frano/main/demo.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from datetime import timedelta
from math import ceil
from math import floor
from random import choice
from random import randint
from random import random
from random import sample
from quotes.models import CASH_SYMBOL
from quotes.models import quotes_by_symbols
from transactions.models import Transaction
#-------------\
# CONSTANTS |
#-------------/
# set of instruments a demo portfolio will choose from
DEMO_INSTRUMENTS = [
'AA', # Alcoa
'AAPL', # Apple
'ACWI', # MSCI ACWI
'AGG', # Barclays Aggregate Bond Fund
'BND', # Vanguard Total Bond Market
'DBC', # PowerShares DB Commodity Index
'DBO', # PowerShares DB Oil Fund
'DIA', # Dow Jones Industrial Average
'EEM', # MSCI Emerging Markets
'EFA', # MSCI EAFE
'EMB', # JP Morgan USD Emerging Markets
'FFNOX', # Fidelity Four-in-One
'GE', # General Electric
'GLD', # GLD Trust
'GOOG', # Google
'IJH', # S&P MidCap 400
'INTC', # Intel
'IWM', # Rusell 2000
'IWV', # Russell 3000
'IYR', # Dow Jones US Real Estate
'MSFT', # Microsoft
'QQQ', # PowerShares QQQ (Nasdaq)
'SCZ', # MSCI EAFE Small Cap
'SLV', # Silver Trust
'SPY', # S&P 500
'TIP', # Barclays TIPS Bond fund
'XOM', # Exxon Mobil
'YACKX', # Yacktman Fund
]
# demo portfolio commissions, chosen at random once per portfolio
DEMO_COMMISSIONS = [ 4.5, 7.99, 9.99 ]
# Min/max number of instruments in a demo portfolio
DEMO_MIN_INSTRUMENTS = 4
DEMO_MAX_INSTRUMENTS = 8
# Min/max investment in a demo portfolio
DEMO_MIN_INVESTMENT = 10000
DEMO_MAX_INVESTMENT = 100000
# Number of demo portfolios to generate
DEMO_PORTFOLIOS = 10
# Nunmber of days to go back in history to make the demo portfolios
DEMO_DAYS_CUTOFF = 365
# Approximate number of transactions to have in a demo portfolios
DEMO_TARGET_TRANSACTIONS = 40
# Approximate number of deposits in a demo portfolio
DEMO_TARGET_DEPOSITS = 5
# Ratio of buys vs. sells
DEMO_BUY_SELL_RATIO = 4
# List of lists of demo transactions
DEMO = []
#------------\
# SERVICES |
#------------/
def get_demo_transactions():
global DEMO
if len(DEMO) == 0:
print '==============GEN==============='
DEMO = _generate_demo_portfolios(DEMO_PORTFOLIOS)
return choice(DEMO)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _generate_demo_portfolios(count):
out = []
for i in range(count):
instruments = sample(DEMO_INSTRUMENTS, randint(DEMO_MIN_INSTRUMENTS, DEMO_MAX_INSTRUMENTS))
total_investment = randint(DEMO_MIN_INVESTMENT, DEMO_MAX_INVESTMENT)
commission = choice(DEMO_COMMISSIONS)
out.append(_generate_random_transactions(instruments,
total_investment,
commission)
)
return out
def _generate_random_transactions(instruments, total_amount, commission):
# Load historic prices
quotes = quotes_by_symbols(instruments)
cutoff_date = datetime.now().date() - timedelta(days = DEMO_DAYS_CUTOFF)
prices = dict([ (quote.symbol, {}) for quote in quotes ])
dates = set([])
quote_map = { }
for quote in quotes:
quote_map[quote.symbol] = quote
for history in quote.pricehistory_set.filter(as_of_date__gte = cutoff_date).order_by('as_of_date'):
cur_date = history.as_of_date.date()
prices.get(quote.symbol)[cur_date] = history.price
dates.add(cur_date)
# portfolio probabilities
transaction_probability = DEMO_TARGET_TRANSACTIONS / float(len(dates))
deposit_probability = DEMO_TARGET_DEPOSITS / float(DEMO_TARGET_TRANSACTIONS)
buy_sell_probability = DEMO_BUY_SELL_RATIO / float(DEMO_BUY_SELL_RATIO + 1)
# generate transactions
transactions = []
quantities = dict([ (symbol, 0.0) for symbol in instruments ])
undeposited_cash = total_amount
cash = 0
for date in sorted(dates):
sell_candidates = [ q[0] for q in quantities.items() if q[1] > 0 ]
# see if there is a transaction today or if we are just starting out
if random() <= transaction_probability or len(transactions) == 0:
# deposits
if undeposited_cash > 1 and random() <= deposit_probability:
deposit = min([ undeposited_cash, round(undeposited_cash * (randint(10, 100) / 100.0), -2), total_amount * 0.5 ])
undeposited_cash -= deposit
cash += deposit
transactions.append(Transaction(type = 'DEPOSIT',
as_of_date = date,
symbol = CASH_SYMBOL,
quantity = deposit,
price = 1.0,
total = deposit,
))
# buys - if we have any cash
elif random() <= buy_sell_probability:
amount = min([ cash, round(cash * (randint(20, 100) / 100.0)), total_amount * 0.1 ])
symbol = choice(instruments)
price = (prices.get(symbol).get(date) if not quote_map.get(symbol).cash_equivalent else 1.0)
quantity = floor((amount - commission) / price)
if quantity > 0:
total = (quantity * price) + commission
cash -= total
quantities[symbol] = quantities.get(symbol) + quantity
transactions.append(Transaction(type = 'BUY',
as_of_date = date,
symbol = symbol,
quantity = quantity,
price = price,
total = total,
))
# sells - if there is anything to sell
elif len(sell_candidates) > 0:
symbol = choice(sell_candidates)
price = (prices.get(symbol).get(date) if not quote_map.get(symbol).cash_equivalent else 1.0)
available_quantity = quantities.get(symbol)
quantity = min(available_quantity, round(available_quantity * (randint(20, 100) / 100.0)))
if quantity > 0:
total = (quantity * price) - commission
cash += total
quantities[symbol] = quantities.get(symbol) - quantity
transactions.append(Transaction(type = 'SELL',
as_of_date = date,
symbol = symbol,
quantity = quantity,
price = price,
total = total,
))
return transactions
|
fxdemolisher/frano
|
frano/positions/models.py
|
<filename>frano/positions/models.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from bisect import insort
from datetime import datetime
from django.db import models
from main.models import Portfolio
from quotes.models import CASH_SYMBOL
from transactions.models import Transaction
#-------------\
# CONSTANTS |
#-------------/
QUANTITY_TOLERANCE = 0.000001
#----------\
# MODELS |
#----------/
class Position(models.Model):
portfolio = models.ForeignKey(Portfolio)
as_of_date = models.DateField()
symbol = models.CharField(max_length = 10)
quantity = models.FloatField()
cost_price = models.FloatField()
cost_basis = models.FloatField()
realized_pl = models.FloatField()
class Meta:
db_table = 'position'
def __unicode__(self):
return "%.2f of %s on %s @ %.2f" % (self.quantity, self.symbol, self.as_of_date.strftime('%m/%d/%Y'), self.cost_price)
class Lot(models.Model):
position = models.ForeignKey(Position)
as_of_date = models.DateField(null = True)
quantity = models.FloatField()
price = models.FloatField()
total = models.FloatField()
sold_as_of_date = models.DateField(null = True)
sold_quantity = models.FloatField()
sold_price = models.FloatField()
sold_total = models.FloatField()
class Meta:
db_table = 'lot'
def __unicode__(self):
return "Lot: Bought %.4f @ %.4f on %s (%.4f), Sold %.4f @ %.4f on %s (%.4f)" % (
self.quantity,
self.price,
self.as_of_date.strftime('%m/%d/%Y') if self.as_of_date != None else None,
self.total,
self.sold_quantity,
self.sold_price,
self.sold_as_of_date.strftime('%m/%d/%Y') if self.sold_as_of_date != None else None,
self.sold_total
)
def __cmp__(self, other):
my_date = min([ date for date in [self.as_of_date, self.sold_as_of_date] if date is not None ])
other_date = min([ date for date in [other.as_of_date, other.sold_as_of_date] if date is not None ])
if my_date == other_date:
return 0
else:
return (-1 if my_date < other_date else 1)
#------------\
# SERVICES |
#------------/
def latest_positions(portfolio):
"""Retrieve a list of latest positions for the given portfolio."""
latest_date = Position.objects.filter(portfolio__id__exact = portfolio.id).dates('as_of_date', 'day', order = 'DESC')[0:1]
if latest_date.count() > 0:
return Position.objects.filter(portfolio__id__exact = portfolio.id, as_of_date = latest_date[0]).order_by('symbol')
else:
return []
def decorate_position_with_prices(position, price, previous_price):
"""Decorate the given position with various pieces of data that require pricing (p/l, market_value)"""
position.price = price
position.previous_price = previous_price
position.market_value = position.quantity * position.price
position.previous_market_value = position.quantity * position.previous_price
position.pl = (position.market_value - position.cost_basis)
position.pl_percent = (((position.pl / position.cost_basis) * 100) if position.cost_basis != 0 else 0)
position.day_pl = (position.market_value - position.previous_market_value)
position.day_pl_percent = (((position.day_pl / position.previous_market_value) * 100) if position.previous_market_value != 0 else 0)
def refresh_positions(portfolio, transactions = None, force = False):
"""Refresh all positions in the given portfolio if needed or if the caller requests a forced refresh"""
if transactions == None:
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id)
positions = Position.objects.filter(portfolio__id__exact = portfolio.id)
if (transactions.count() > 0 and positions.count() == 0) or force:
positions.delete()
_refresh_positions_from_transactions(transactions)
#-----------------\
# VALUE OBJECTS |
#-----------------/
class LotBuilder:
def __init__(self):
self.long_half_lots = []
self.short_half_lots = []
self.closed_lots = []
def __repr__(self):
return "Open (Long):\n %s\n\nOpen (Short):\n %s\n\nClosed:\n %s" % (self.long_lots, self.short_lots, self.closed_lots)
def add_transaction(self, transaction):
fees = transaction.total - (transaction.quantity * transaction.price)
quantity = transaction.quantity
poll = self.short_half_lots
push = self.long_half_lots
if transaction.type == 'SELL':
push = poll
poll = self.long_half_lots
while quantity > 0 and len(poll) > 0:
half_lot = poll.pop(0)
if (quantity - half_lot.quantity) > -QUANTITY_TOLERANCE:
partial_fees = fees * (half_lot.quantity / quantity)
fees = fees - partial_fees
quantity -= half_lot.quantity
insort(self.closed_lots, half_lot.close(transaction.as_of_date, transaction.price, partial_fees))
else:
closed = half_lot.partial_close(transaction.as_of_date, quantity, transaction.price, fees)
poll.insert(0, half_lot)
insort(self.closed_lots, closed)
quantity = 0
fees = 0
if quantity > QUANTITY_TOLERANCE:
push.append(HalfLot(type = transaction.type,
as_of_date = transaction.as_of_date,
quantity = quantity,
price = transaction.price,
total = (quantity * transaction.price) + fees
))
return self
def get_lots(self):
out = []
for lot in self.closed_lots:
insort(out, _clone_lot(lot))
for half_lot in (self.long_half_lots + self.short_half_lots):
insort(out, half_lot.to_lot(None, 0.0, 0.0, 0.0))
return out
class HalfLot():
def __init__(self, type, as_of_date, quantity, price, total):
self.type = type
self.as_of_date = as_of_date
self.quantity = quantity
self.price = price
self.total = total
def __repr__(self):
return "%s: %.4f @ %.4f on %s (%.4f)" % (
self.type,
self.quantity,
self.price,
self.as_of_date.strftime('%m/%d/%Y'),
self.total,
)
def close(self, as_of_date, price, fees):
return self.to_lot(as_of_date, self.quantity, price, fees)
def partial_close(self, as_of_date, quantity, price, fees):
split_lot = HalfLot(type = self.type,
as_of_date = self.as_of_date,
quantity = quantity,
price = self.price,
total = None,
)
split_lot.total = self.total * (split_lot.quantity / self.quantity)
self.total -= split_lot.total
self.quantity -= quantity
return split_lot.close(as_of_date, price, fees)
def to_lot(self, as_of_date, quantity, price, fees):
lot = None
if self.type == 'BUY':
lot = Lot(as_of_date = self.as_of_date,
quantity = self.quantity,
price = self.price,
total = self.total,
sold_as_of_date = as_of_date,
sold_quantity = quantity,
sold_price = price,
sold_total = (quantity * price) + fees,
)
else:
lot = Lot(as_of_date = as_of_date,
quantity = quantity,
price = price,
total = (quantity * price) + fees,
sold_as_of_date = self.as_of_date,
sold_quantity= self.quantity,
sold_price = self.price,
sold_total = self.total,
)
return lot
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _refresh_positions_from_transactions(transactions):
if len(transactions) == 0:
return
# presort and bucket transactions
transactions = sorted(transactions, key = (lambda transaction: transaction.id))
transactions = sorted(transactions, key = (lambda transaction: transaction.as_of_date))
dates = sorted(set([ t.as_of_date for t in transactions]))
transactions_by_date = dict([(date, []) for date in dates])
for transaction in transactions:
transactions_by_date.get(transaction.as_of_date).append(transaction)
# prepare trackers
builder_by_symbol = { }
cash = Position(portfolio = transactions[0].portfolio,
as_of_date = datetime.now().date(),
symbol = CASH_SYMBOL,
quantity = 0,
cost_price = 1.0,
cost_basis = 0.0,
realized_pl = 0.0
)
# go through days and build positions
lots = None
positions = []
for date in dates:
current_transactions = transactions_by_date.get(date)
# process transactions either through cash or lot builder
for transaction in current_transactions:
if transaction.type == 'DEPOSIT' or transaction.type == 'SELL':
cash.quantity += transaction.total
elif transaction.type == 'WITHDRAW' or transaction.type == 'BUY':
cash.quantity -= transaction.total
elif transaction.type == 'ADJUST':
cash.quantity += transaction.total
cash.realized_pl += transaction.total
if transaction.type == 'BUY' or transaction.type == 'SELL':
builder = builder_by_symbol.get(transaction.symbol, None)
if builder == None:
builder = LotBuilder()
builder_by_symbol[transaction.symbol] = builder
builder.add_transaction(transaction)
# add current cash to positions.
days_cash = _clone_position(cash, date)
days_cash.cost_basis = days_cash.quantity
positions.append(days_cash)
# compose current lots into a position.
lots = []
for symbol, builder in builder_by_symbol.items():
position = Position(portfolio = transactions[0].portfolio,
as_of_date = date,
symbol = symbol,
quantity = 0.0,
cost_price = 0.0,
cost_basis = 0.0,
realized_pl = 0.0,
)
for lot in builder.get_lots():
lot.position = position
lots.append(lot)
quantity = (lot.quantity - lot.sold_quantity)
if abs(quantity) < QUANTITY_TOLERANCE:
quantity = 0.0
if abs(quantity) > QUANTITY_TOLERANCE:
position.cost_basis += (lot.total - lot.sold_total)
total = (position.quantity * position.cost_price) + (quantity * lot.price)
position.quantity += quantity
position.cost_price = (total / position.quantity if quantity <> 0.0 else 0.0)
else:
position.realized_pl += lot.sold_total - lot.total
if abs(position.quantity) < QUANTITY_TOLERANCE:
position.quantity = 0.0
positions.append(position)
# save positions
for position in positions:
position.save()
# save latest lots
for lot in lots:
if abs(lot.quantity) < QUANTITY_TOLERANCE:
lot.quantity = 0.0
if abs(lot.sold_quantity) < QUANTITY_TOLERANCE:
lot.sold_quantity = 0.0
lot.position = lot.position # hack to reset position_id in lot
lot.save()
def _clone_lot(lot):
return Lot(as_of_date = lot.as_of_date,
quantity = lot.quantity,
price = lot.price,
total = lot.total,
sold_as_of_date = lot.sold_as_of_date,
sold_quantity = lot.sold_quantity,
sold_price = lot.sold_price,
sold_total = lot.sold_total
)
def _clone_position(position, new_as_of_date = None):
out = Position()
out.portfolio = position.portfolio
out.as_of_date = (position.as_of_date if new_as_of_date == None else new_as_of_date)
out.symbol = position.symbol
out.quantity = position.quantity
out.cost_price = position.cost_price
out.realized_pl = position.realized_pl
return out
|
fxdemolisher/frano
|
frano/quotes/models.py
|
<filename>frano/quotes/models.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import json
from datetime import date
from datetime import datetime
from datetime import timedelta
from urllib import quote_plus
from urllib import urlopen
from django.db import models
#-------------\
# CONSTANTS |
#-------------/
CASH_SYMBOL = '*CASH'
PRICE_HISTORY_LIMIT_IN_DAYS = 365 * 10
#----------\
# MODELS |
#----------/
class Quote(models.Model):
symbol = models.CharField(max_length = 10, unique = True)
name = models.CharField(max_length = 255)
price = models.FloatField()
last_trade = models.DateTimeField()
cash_equivalent = models.BooleanField()
class Meta:
db_table = 'quote'
def __unicode__(self):
return '%s - %s' % (self.symbol, self.name)
class PriceHistory(models.Model):
quote = models.ForeignKey(Quote)
as_of_date = models.DateTimeField()
price = models.FloatField()
class Meta:
db_table = 'price_history'
unique_together = ( 'quote', 'as_of_date' )
def __unicode__(self):
return '%s @ %.2f on %s' % (self.quote.symbol, self.price, self.as_of_date.strftime('%Y-%m-%d'))
#------------\
# SERVICES |
#------------/
def price_as_of(quote, as_of):
"""Get the price for quote as of a specific date."""
if quote.cash_equivalent or quote.last_trade.date() == as_of:
return quote.price
else:
candidates = quote.pricehistory_set.filter(as_of_date__lte = as_of.strftime('%Y-%m-%d')).order_by('-as_of_date')[0:1]
return (candidates[0].price if candidates.count() > 0 else 0)
def previous_close_price(quote):
"""Get the previous close price for a quote."""
return price_as_of(quote, quote.last_trade.date() - timedelta(days = 1))
def quote_by_symbol(symbol):
"""Retrieve a quote by symbol."""
return quotes_by_symbols([ symbol ])[0]
def quotes_by_symbols(symbols, force_retrieve = False):
"""Retrieve a quotes by a list of symbols."""
# load or prime quotes for each symbol
existing_quotes = dict([ (q.symbol, q) for q in Quote.objects.filter(symbol__in = symbols) ])
quotes = { }
symbols_to_retrieve = []
for symbol in symbols:
quote = existing_quotes.get(symbol, None)
exists = True
if quote == None:
quote = Quote(symbol = symbol, last_trade = datetime.now())
exists = False
quotes[symbol] = quote
if symbol == CASH_SYMBOL and not exists:
quote.name = '<NAME>ollars'
quote.price = 1.0
quote.cash_equivalent = True
quote.changed = True
elif not exists or force_retrieve:
quote.price = 0.0
quote.changed = True
symbols_to_retrieve.append(symbol)
else:
quote.changed = False
# retrieve fresh prices from yahoo
if len(symbols_to_retrieve) > 0:
csv_url = ('http://download.finance.yahoo.com/d/quotes.csv?s=%s&f=sl1d1t1n&e=.csv' % (','.join(symbols_to_retrieve)))
csv_columns = 'symbol,price,date,time,name'
for row in _yql_csv_to_json(csv_url, csv_columns):
price = row['price']
tradeDate = row['date']
tradeTime = row['time']
quote = quotes.get(row['symbol'])
quote.cash_equivalent = price.endswith('%')
quote.price = (1.0 if quote.cash_equivalent else float(price))
quote.name = row['name']
if tradeDate != 'N/A' and tradeTime != 'N/A':
month, day, year = [int(f) for f in tradeDate.split('/')]
time = datetime.strptime(tradeTime, '%I:%M%p')
quote.last_trade = datetime(year, month, day, time.hour, time.minute, time.second)
# save all changes
for quote in quotes.values():
if quote.changed:
quote.save()
if not quote.cash_equivalent and quote.price > 0.0 and quote.pricehistory_set.count() == 0:
refresh_price_history(quote)
return quotes.values()
def refresh_price_history(quote):
start_date = quote.last_trade + timedelta(days = 0 - PRICE_HISTORY_LIMIT_IN_DAYS)
end_date = quote.last_trade + timedelta(days = 1)
csv_columns = 'date,open,high,low,close,volume,adj_close'
csv_url = ('http://ichart.finance.yahoo.com/table.csv?s=%s&a=%.2d&b=%.2d&c=%.4d&d=%.2d&e=%.2d&f=%.4d&g=d&ignore=.csv' % (
quote.symbol,
(start_date.month - 1),
start_date.day,
start_date.year,
(end_date.month - 1),
end_date.day,
end_date.year)
)
to_remove = { }
for history in quote.pricehistory_set.filter(as_of_date__gte = start_date.date()):
to_remove[history.as_of_date.date()] = history
to_save = []
for row in _yql_csv_to_json(csv_url, csv_columns, PRICE_HISTORY_LIMIT_IN_DAYS, 2):
as_of_date = datetime.strptime(row['date'], '%Y-%m-%d')
price = float(row['adj_close'])
history = to_remove.get(as_of_date.date())
if history == None:
history = PriceHistory()
history.quote = quote
history.as_of_date = as_of_date
history.price = price
else:
del(to_remove[as_of_date.date()])
if abs(history.price - price) > 0.0001:
history.price = price
else:
continue
to_save.append(history)
if len(to_remove) > 0:
ids = [ h.id for h in to_remove.values() ]
PriceHistory.objects.filter(id__in = ids).delete()
if len(to_save) > 0:
for history in to_save:
history.save()
quote.history_date = datetime.now()
quote.save()
return quote
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _yql_csv_to_json(csv_url, csv_columns, limit = None, offset = None):
u = None
try:
yql_suffix = ''
if limit != None and offset != None:
yql_suffix = yql_suffix + (' limit %d offset %d' % (limit, offset))
yql_query = ("select * from csv where url='%s' and columns='%s' %s" % (csv_url, csv_columns, yql_suffix))
u = urlopen('http://query.yahooapis.com/v1/public/yql?q=%s&format=json&callback=' % quote_plus(yql_query))
packet = json.loads(u.read())
out = [ ]
if packet.has_key('query'):
count = packet['query']['count']
if count == 1:
out.append(packet['query']['results']['row'])
elif count > 0:
out = packet['query']['results']['row']
return out
finally:
if u != None:
u.close()
|
fxdemolisher/frano
|
frano/management/commands/refresh_quotes.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from sys import stdout
from django.core.management.base import BaseCommand
from frano.main.demo import DEMO_INSTRUMENTS
from frano.quotes.models import Quote
from frano.quotes.models import quotes_by_symbols
class Command(BaseCommand):
help = 'Refreshes all quotes from yahoo finance'
def handle(self, *args, **options):
symbols = set([ quote.symbol for quote in Quote.objects.all()] + DEMO_INSTRUMENTS)
stdout.write('Found %d quotes to refresh\nStarting...\n' % len(symbols))
quotes_by_symbols(symbols, True)
stdout.write('Successfully refreshed quotes\n')
|
fxdemolisher/frano
|
frano/transactions/models.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import codecs
import csv
from exceptions import Exception
from django.db import models
from parsers import FRANO_TRANSACTION_EXPORT_HEADER
from parsers import GOOGLE_TRANSACTION_EXPORT_HEADER
from parsers import AMERITRADE_TRANSACTION_EXPORT_HEADER
from parsers import ZECCO_TRANSACTION_EXPORT_HEADER
from parsers import SCOTTRADE_TRANSACTION_EXPORT_HEADER
from parsers import CHARLES_TRANSACTION_EXPORT_HEADER
from parsers import FIDELITY_TRANSACTION_EXPORT_HEADER
from parsers import MERCER_401_TRANSACTION_EXPORT_HEADER
from parsers import parse_frano_transactions
from parsers import parse_google_transactions
from parsers import parse_ameritrade_transactions
from parsers import parse_zecco_transactions
from parsers import parse_scottrade_transactions
from parsers import parse_charles_transactions
from parsers import parse_fidelity_transactions
from parsers import parse_mercer_401_transactions
from main.models import Portfolio
#-------------\
# CONSTANTS |
#-------------/
HEADER_TO_IMPORT_TYPE_MAP = {
",".join(FRANO_TRANSACTION_EXPORT_HEADER) : 'FRANO',
",".join(GOOGLE_TRANSACTION_EXPORT_HEADER) : 'GOOGLE',
",".join(AMERITRADE_TRANSACTION_EXPORT_HEADER) : 'AMERITRADE',
",".join(ZECCO_TRANSACTION_EXPORT_HEADER) : 'ZECCO',
",".join(SCOTTRADE_TRANSACTION_EXPORT_HEADER) : 'SCOTTRADE',
",".join([ ('"%s"' % v) for v in CHARLES_TRANSACTION_EXPORT_HEADER]) : 'CHARLES',
",".join(FIDELITY_TRANSACTION_EXPORT_HEADER) : 'FIDELITY',
",".join(MERCER_401_TRANSACTION_EXPORT_HEADER) : 'MERCER_401',
}
TRANSACTION_TYPES = (
('BUY', 'Buy'),
('SELL', 'Sell'),
('DEPOSIT', 'Deposit'),
('WITHDRAW', 'Withdraw'),
('ADJUST', 'Adjust'),
)
#----------\
# MODELS |
#----------/
class Transaction(models.Model):
portfolio = models.ForeignKey(Portfolio)
type = models.CharField(max_length = 10, choices = TRANSACTION_TYPES)
as_of_date = models.DateField()
symbol = models.CharField(max_length = 10)
quantity = models.FloatField()
price = models.FloatField()
total = models.FloatField()
linked_symbol = models.CharField(max_length = 10, null = True)
class Meta:
db_table = 'transaction'
ordering = [ '-as_of_date', 'symbol' ]
def __unicode__(self):
return "%.2f-%s @ %.2f on %s" % (self.quantity, self.symbol, self.price, self.as_of_date.strftime('%m/%d/%Y'))
#------------\
# SERVICES |
#------------/
def clone_transaction(transaction, new_portfolio = None):
"""Return a copy of the given transaction, optionally overriding which portfolio the transaction belongs to."""
out = Transaction()
out.portfolio = (new_portfolio if new_portfolio != None else transaction.portfolio)
out.type = transaction.type
out.as_of_date = transaction.as_of_date
out.symbol = transaction.symbol
out.quantity = transaction.quantity
out.price = transaction.price
out.total = transaction.total
out.linked_symbol = transaction.linked_symbol
return out
def transactions_as_csv(target, portfolio):
writer = csv.writer(target)
writer.writerow(FRANO_TRANSACTION_EXPORT_HEADER)
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id).order_by('-as_of_date', '-id')
for transaction in transactions:
writer.writerow([transaction.as_of_date.strftime('%m/%d/%Y'), transaction.type, transaction.symbol, transaction.quantity, transaction.price, transaction.total, transaction.linked_symbol])
def parse_transactions(type, file):
# Filter the file for utf sigs at the front of the file or null bytes.
file = _null_byte_line_filter(codecs.iterdecode(file, 'utf_8_sig'))
parsed = None
if type == 'FRANO':
reader = csv.reader(file)
_verify_transaction_file_header(reader, FRANO_TRANSACTION_EXPORT_HEADER)
parsed = parse_frano_transactions(reader)
elif type == 'GOOGLE':
reader = csv.reader(codecs.iterdecode(file, 'utf_8_sig'))
_verify_transaction_file_header(reader, GOOGLE_TRANSACTION_EXPORT_HEADER)
parsed = parse_google_transactions(reader)
elif type == 'AMERITRADE':
reader = csv.reader(file)
_verify_transaction_file_header(reader, AMERITRADE_TRANSACTION_EXPORT_HEADER)
parsed = parse_ameritrade_transactions(reader)
elif type == 'ZECCO':
reader = csv.reader(file)
_verify_transaction_file_header(reader, ZECCO_TRANSACTION_EXPORT_HEADER)
parsed = parse_zecco_transactions(reader)
elif type == 'SCOTTRADE':
reader = csv.reader(_null_byte_line_filter(file))
_verify_transaction_file_header(reader, SCOTTRADE_TRANSACTION_EXPORT_HEADER)
parsed = parse_scottrade_transactions(reader)
elif type == 'CHARLES':
reader = csv.reader(_null_byte_line_filter(file))
reader.next() # skip header line
_verify_transaction_file_header(reader, CHARLES_TRANSACTION_EXPORT_HEADER)
parsed = parse_charles_transactions(reader)
elif type == 'FIDELITY':
reader = csv.reader(file)
# fidelity leaves three blank lines on top of the file...go figure
for x in range(3):
reader.next()
_verify_transaction_file_header(reader, FIDELITY_TRANSACTION_EXPORT_HEADER)
parsed = parse_fidelity_transactions(reader)
elif type == 'MERCER_401':
reader = csv.reader(file)
_verify_transaction_file_header(reader, MERCER_401_TRANSACTION_EXPORT_HEADER)
parsed = parse_mercer_401_transactions(reader)
transactions = []
for row in parsed:
transaction = Transaction()
transaction.as_of_date = row['date']
transaction.type = row['type']
transaction.symbol = row['symbol'].upper()
transaction.quantity = row['quantity']
transaction.price = row['price']
transaction.total = row['total']
transaction.linked_symbol = row.get('linked_symbol', None)
transactions.append(transaction)
return transactions
def detect_transaction_file_type(file):
# Filter the file for utf sigs at the front of the file or null bytes.
file = _null_byte_line_filter(codecs.iterdecode(file, 'utf_8_sig'))
first_line = None
for line in file:
first_line = line
if first_line != None and not first_line.startswith('"Transactions for account') and len(first_line.strip()) != 0:
break
return HEADER_TO_IMPORT_TYPE_MAP.get(line.strip(), None)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _null_byte_line_filter(stream):
for line in stream:
yield line.replace('\x00', '')
def _verify_transaction_file_header(reader, required_header):
header = reader.next()
if len(header) != len(required_header):
raise Exception('Header mismatch for transaction file')
for i in range(len(required_header)):
if header[i] != required_header[i]:
raise Exception("Header mismatch at %d: %s <> %s" % (i, header[i], required_header[i]))
|
fxdemolisher/frano
|
frano/management/commands/cleanup_sample_portfolios.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime, timedelta
from sys import stdout
from django.core.management.base import BaseCommand
from frano.main.models import Portfolio
from frano.main.models import User
from frano.main.view_utils import get_demo_user
class Command(BaseCommand):
help = 'Cleanup sample portfolios older than two weeks'
def handle(self, *args, **options):
user = get_demo_user()
cutoff_date = datetime.now() - timedelta(weeks = 2)
portfolios = Portfolio.objects.filter(user__id__exact = user.id, create_date__lte = cutoff_date)
stdout.write('Found %d sample portfolios to clean up\n' % portfolios.count())
portfolios.delete()
stdout.write('Successfully removed old sample portfolios\n')
|
fxdemolisher/frano
|
frano/positions/views.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from datetime import timedelta
from django.db import connection
from django.db import transaction
from main.decorators import portfolio_manipulation_decorator
from main.decorators import read_only_decorator
from main.models import Portfolio
from main.view_utils import render_page
from models import decorate_position_with_prices
from models import latest_positions
from models import refresh_positions
from quotes.models import CASH_SYMBOL
from quotes.models import quote_by_symbol
from quotes.models import quotes_by_symbols
from quotes.models import previous_close_price
from transactions.models import Transaction
#-------------\
# CONSTANTS |
#-------------/
DAYS_IN_PERFORMANCE_HISTORY = 90
PERFORMANCE_BENCHMARK_SYMBOL = 'ACWI'
#---------\
# VIEWS |
#---------/
@portfolio_manipulation_decorator
def positions(request, portfolio, is_sample, read_only):
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id).order_by('-as_of_date', '-id')
refresh_positions(portfolio, transactions)
positions = latest_positions(portfolio)
_decorate_positions_for_display(positions, request.GET.get("showClosedPositions", False))
_decorate_positions_with_lots(positions)
summary = _get_summary(positions, transactions)
performance_history = _get_performance_history(portfolio, DAYS_IN_PERFORMANCE_HISTORY)
context = {
'read_only' : read_only,
'is_sample' : is_sample,
'positions': positions,
'summary' : summary,
'current_tab' : 'positions',
'performance_history' : performance_history,
'benchmark_symbol' : PERFORMANCE_BENCHMARK_SYMBOL,
}
return render_page('positions.html', request, portfolio = portfolio, extra_dictionary = context)
@read_only_decorator
def read_only_positions(request, portfolio):
return positions(request, portfolio.id, read_only = True)
@portfolio_manipulation_decorator
def allocation(request, portfolio, is_sample, read_only):
refresh_positions(portfolio)
positions = latest_positions(portfolio)
_decorate_positions_for_display(positions, False)
context = {
'positions': positions,
'current_tab' : 'allocation',
}
return render_page('allocation.html', request, portfolio = portfolio, extra_dictionary = context)
@portfolio_manipulation_decorator
def income(request, portfolio, is_sample, read_only):
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id).order_by('-as_of_date', '-id')
refresh_positions(portfolio, transactions)
positions = latest_positions(portfolio)
_decorate_positions_for_display(positions, request.GET.get("showClosedPositions", False))
summary_map = {}
for position in positions:
summary_map[position.symbol] = IncomeSummary(position.symbol, position.market_value, position.cost_basis, position.pl, position.pl_percent, position.realized_pl, position.show)
total_summary = IncomeSummary('*TOTAL*', 0.0, 0.0, 0.0, 0.0, 0.0, True)
for transaction in transactions:
if transaction.type != 'ADJUST':
continue
symbol = transaction.linked_symbol
if symbol == None or symbol == '':
symbol = transaction.symbol
summary = summary_map.get(symbol)
summary.add_income(transaction.as_of_date, transaction.total)
if summary.show:
total_summary.add_income(transaction.as_of_date, transaction.total)
summaries = sorted(summary_map.values(), key = (lambda summary: summary.symbol))
for summary in summaries:
total_summary.market_value = total_summary.market_value + summary.market_value
total_summary.cost_basis = total_summary.cost_basis + summary.cost_basis
total_summary.unrealized_pl = total_summary.unrealized_pl + summary.unrealized_pl
total_summary.realized_pl = total_summary.realized_pl + summary.realized_pl
total_summary.unrealized_pl_percent = ((total_summary.market_value / total_summary.cost_basis) - 1) * 100
context = {
'read_only' : read_only,
'summaries': summaries,
'total_summary' : total_summary,
'current_tab' : 'income',
}
return render_page('income.html', request, portfolio = portfolio, extra_dictionary = context)
@read_only_decorator
def read_only_income(request, portfolio):
return income(request, portfolio.id, read_only = True)
#---------\
# FORMS |
#---------/
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _decorate_positions_for_display(positions, showClosedPositions):
symbols = [ position.symbol for position in positions ] + [ CASH_SYMBOL ]
quotes = dict((quote.symbol, quote) for quote in quotes_by_symbols(symbols))
as_of_date = min([quote.last_trade.date() for symbol, quote in quotes.items()])
total_market_value = 0
for position in positions:
price = (1.0 if position.symbol == CASH_SYMBOL else quotes[position.symbol].price)
previous_price = (1.0 if position.symbol == CASH_SYMBOL else previous_close_price(quotes[position.symbol]))
decorate_position_with_prices(position, price, previous_price)
position.show = (showClosedPositions or abs(position.quantity) > 0.01 or position.symbol == CASH_SYMBOL)
total_market_value += position.market_value
for position in positions:
position.allocation = ((position.market_value / total_market_value * 100) if total_market_value != 0 else 0)
position.effective_as_of_date = as_of_date
def _decorate_positions_with_lots(positions):
for position in positions:
lots = []
for lot in position.lot_set.order_by('-as_of_date'):
total = max(lot.total, lot.sold_total)
days_open = (datetime.now().date() - (lot.as_of_date if lot.as_of_date != None else lot.sold_as_of_date)).days
if abs(lot.quantity - lot.sold_quantity) < 0.0001:
lot.status = 'Closed'
lot.pl = lot.sold_total - lot.total
else:
lot.status = 'Open'
lot.pl = ((lot.quantity - lot.sold_quantity) * position.price) - (lot.total - lot.sold_total)
lot.pl_percent = (((lot.pl / total) * 100) if total <> 0.0 else 0)
lots.append(lot)
position.lots = lots
def _get_summary(positions, transactions):
as_of_date = max([position.effective_as_of_date for position in positions]) if len(positions) > 0 else datetime.now().date()
start_date = min([transaction.as_of_date for transaction in transactions]) if len(transactions) > 0 else datetime.now().date()
risk_capital = 0
for transaction in transactions:
if transaction.type == 'DEPOSIT':
risk_capital = risk_capital + transaction.total
elif transaction.type == 'WITHDRAW':
risk_capital = risk_capital - transaction.total
market_value = 0
cost_basis = 0
realized_pl = 0
previous_market_value = 0
for position in positions:
market_value += position.market_value
cost_basis += position.cost_basis
realized_pl += position.realized_pl
previous_market_value += position.previous_market_value
return Summary(as_of_date, start_date, market_value, cost_basis, risk_capital, realized_pl, previous_market_value)
def _get_performance_history(portfolio, days):
query = """
SELECT D.portfolio_date,
D.deposit,
D.withdrawal,
SUM(P.quantity * ((CASE WHEN P.symbol = '*CASH' OR Q.cash_equivalent = '1' THEN 1.0 ELSE H.price END))) as market_value
FROM position P
JOIN
(
SELECT D.portfolio_id,
D.portfolio_date,
D.position_date,
SUM(CASE WHEN T.type = 'DEPOSIT' THEN T.total ELSE 0 END) as deposit,
SUM(CASE WHEN T.type = 'WITHDRAW' THEN T.total ELSE 0 END) as withdrawal
FROM (
SELECT P.portfolio_id,
D.portfolio_date,
MAX(P.as_of_date) as position_date
FROM (
SELECT DISTINCT
D.portfolio_id,
DATE(H.as_of_date) as portfolio_date
FROM (
SELECT DISTINCT
P.portfolio_id,
P.symbol
FROM position P
WHERE P.symbol != '*CASH'
AND P.portfolio_id = %(portfolio_id)s
) S
JOIN quote Q ON (Q.symbol = S.symbol)
JOIN price_history H ON (H.quote_id = Q.id)
JOIN
(
SELECT P.portfolio_id,
MIN(as_of_date) as start_date
FROM position P
WHERE P.portfolio_id = %(portfolio_id)s
GROUP BY P.portfolio_id
) D ON (D.portfolio_id = S.portfolio_id AND H.as_of_date >= D.start_date)
WHERE DATEDIFF(NOW(), H.as_of_date) < %(days)s
) D
JOIN
(
SELECT DISTINCT
P.portfolio_id,
P.as_of_date
FROM position P
WHERE P.portfolio_id = %(portfolio_id)s
) P ON (P.portfolio_id = D.portfolio_id AND P.as_of_date <= D.portfolio_date)
GROUP BY D.portfolio_id,
D.portfolio_date
) D
LEFT JOIN
(
SELECT T.portfolio_id,
T.as_of_date,
T.type,
T.total
FROM transaction T
WHERE T.type IN ('DEPOSIT', 'WITHDRAW')
AND T.portfolio_id = %(portfolio_id)s
) T ON (T.portfolio_id = D.portfolio_id AND T.as_of_date = D.portfolio_date)
GROUP BY D.portfolio_id,
D.portfolio_date
) D ON (P.portfolio_id = D.portfolio_id AND P.as_of_date = D.position_date)
LEFT JOIN quote Q ON (Q.symbol = P.symbol)
LEFT JOIN price_history H ON (H.quote_id = Q.id AND H.as_of_date = D.portfolio_date)
WHERE P.quantity <> 0
AND P.portfolio_id = %(portfolio_id)s
GROUP BY D.portfolio_date
ORDER BY D.portfolio_date"""
cursor = connection.cursor()
cursor.execute(query, { 'days' : days, 'portfolio_id' : portfolio.id })
benchmark_quote = quote_by_symbol(PERFORMANCE_BENCHMARK_SYMBOL)
cutoff_date = datetime.now().date() - timedelta(days = DAYS_IN_PERFORMANCE_HISTORY)
benchmark_price = {}
for history in benchmark_quote.pricehistory_set.filter(as_of_date__gte = cutoff_date).order_by('as_of_date'):
benchmark_price[history.as_of_date.date()] = history.price
shares = None
last_price = None
first_benchmark = None
out = []
for row in cursor.fetchall():
as_of_date = row[0]
deposit = float(row[1])
withdraw = float(row[2])
market_value = float(row[3])
benchmark = benchmark_price.get(as_of_date, 0.0)
if first_benchmark == None:
first_benchmark = benchmark
performance = 0
if shares == None:
shares = market_value
else:
net_inflow = deposit - withdraw
shares += net_inflow / last_price
performance = (((market_value / shares) - 1) if shares <> 0 else 0)
last_price = ((market_value / shares) if shares <> 0 else 1.0)
benchmark_performance = (((benchmark / first_benchmark) - 1) if first_benchmark <> 0 else 0)
out.append(PerformanceHistory(as_of_date, performance, benchmark_performance))
cursor.close()
return out
#-----------------\
# VALUE OBJECTS |
#-----------------/
class Summary:
def __init__(self, as_of_date, start_date, market_value, cost_basis, risk_capital, realized_pl, previous_market_value):
self.as_of_date = as_of_date
self.start_date = start_date
self.market_value = market_value
self.cost_basis = cost_basis
self.risk_capital = risk_capital
self.realized_pl = realized_pl
self.previous_market_value = previous_market_value
self.day_pl = market_value - previous_market_value
self.day_pl_percent = ((self.day_pl / previous_market_value) * 100) if previous_market_value != 0 else 0
self.pl = market_value - cost_basis
self.pl_percent = ((self.pl / cost_basis) * 100) if cost_basis != 0 else 0
self.risk_capital_pl = market_value - risk_capital
self.risk_capital_pl_percent = ((self.risk_capital_pl / risk_capital) * 100) if risk_capital != 0 else 0
class PerformanceHistory:
def __init__(self, as_of_date, percent, benchmark_percent):
self.as_of_date = as_of_date
self.percent = percent
self.benchmark_percent = benchmark_percent
class IncomeSummary:
def __init__(self, symbol, market_value, cost_basis, unrealized_pl, unrealized_pl_percent, realized_pl, show):
self.symbol = symbol
self.market_value = market_value
self.cost_basis = cost_basis
self.unrealized_pl = unrealized_pl
self.unrealized_pl_percent = unrealized_pl_percent
self.realized_pl = realized_pl
self.show = show
self.income_one_month = 0.0
self.income_three_months = 0.0
self.income_six_months = 0.0
self.income_one_year = 0.0
self.total_income = 0.0
def add_income(self, as_of_date, amount):
current = datetime.now().date()
self.total_income += amount
if (current - as_of_date).days < 365:
self.income_one_year += amount
if (current - as_of_date).days < 180:
self.income_six_months += amount
if (current - as_of_date).days < 90:
self.income_three_months += amount
if (current - as_of_date).days < 30:
self.income_one_month += amount
|
fxdemolisher/frano
|
frano/main/views.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import json
import random
from StringIO import StringIO
from urllib import urlopen
from django.shortcuts import redirect
from demo import get_demo_transactions
from models import Portfolio
from models import User
from models import create_portfolio
from models import create_user
from transactions.models import clone_transaction
from settings import JANRAIN_API_KEY
from view_utils import get_demo_user
from view_utils import logout_user
from view_utils import redirect_to_portfolio_action
from view_utils import render_page
#-------------\
# CONSTANTS |
#-------------/
#---------\
# VIEWS |
#---------/
def index(request):
user_id = request.session.get('user_id')
portfolio = None
if user_id != None and request.GET.get('demo') == None:
portfolio = Portfolio.objects.filter(user__id__exact = user_id)[0]
else:
portfolio_id = request.session.get('sample_portfolio_id')
portfolio = _get_demo_portfolio(portfolio_id)
request.session['sample_portfolio_id'] = portfolio.id
return redirect("/%s/positions.html" % portfolio.id)
def read_only(request, read_only_token):
return redirect("/%s/positions.html" % read_only_token)
def legal(request):
return render_page('legal.html', request)
def feedback(request):
return render_page('feedback.html', request)
def login(request):
token = None
if request.method == 'POST':
token = request.POST.get('token')
else:
token = request.GET.get('token')
if token == None:
return redirect("/demo.html?loginFailed=true")
u = None
try:
u = urlopen('https://rpxnow.com/api/v2/auth_info?apiKey=%s&token=%s' % (JANRAIN_API_KEY, token))
auth_info = json.loads(u.read())
status = auth_info['stat']
if status != 'ok':
return redirect("/demo.html?loginFailed=true")
profile = auth_info['profile']
identifier = profile['identifier']
email = profile['email'] if profile.has_key('email') else None
candidate = User.objects.filter(open_id = identifier)
user = None
portfolio = None
target = 'transactions'
if candidate.count() == 0:
user = create_user(identifier, email)
portfolio = create_portfolio(user, 'Default')
else:
user = candidate[0]
portfolio = Portfolio.objects.filter(user__id__exact = user.id)[0]
target = 'positions'
request.session['user_id'] = user.id
return redirect_to_portfolio_action(target, portfolio)
finally:
if u != None:
u.close()
def logout(request):
return logout_user(request)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _get_demo_portfolio(portfolio_id):
if portfolio_id != None:
candidate = Portfolio.objects.filter(id = portfolio_id)
if candidate.count() == 1:
return candidate[0]
else:
portfolio = create_portfolio(get_demo_user(), ('SAMPLE #%d' % random.randint(100000000, 999999999)))
for sample_transaction in get_demo_transactions():
transaction = clone_transaction(sample_transaction, portfolio);
transaction.save()
return portfolio
|
fxdemolisher/frano
|
frano/urls.py
|
<filename>frano/urls.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import os
from django.conf.urls.defaults import *
from django.views.static import serve
from settings import SERVE_STATICS
# Main views
urlpatterns = patterns('frano.main.views',
(r'^$', 'index'),
(r'^index.html', 'index'),
(r'^(?P<read_only_token>\w{10})/$', 'read_only'),
(r'^legal.html', 'legal'),
(r'^feedback.html', 'feedback'),
(r'^login.html', 'login'),
(r'^logout.html', 'logout'),
)
# Quote views
urlpatterns += patterns('frano.quotes.views',
(r'^priceQuote.json', 'price_quote'),
)
# Account views
urlpatterns += patterns('frano.account.views',
(r'^account.html', 'account'),
(r'^removeAccount.html', 'remove'),
(r'^createPortfolio.html', 'new_portfolio'),
(r'^(?P<portfolio_id>\d+)/setName.html', 'set_portfolio_name'),
(r'^(?P<portfolio_id>\d+)/remove.html', 'remove_portfolio'),
)
# Transaction views
urlpatterns += patterns('frano.transactions.views',
(r'^(?P<portfolio_id>\d+)/transactions.html', 'transactions'),
(r'^(?P<portfolio_id>\d+)/addTransaction.html', 'add'),
(r'^(?P<portfolio_id>\d+)/(?P<transaction_id>\d+)/remove.html', 'remove'),
(r'^(?P<portfolio_id>\d+)/removeAllTransactions.html', 'remove_all'),
(r'^(?P<portfolio_id>\d+)/(?P<transaction_id>\d+)/update.json', 'update'),
(r'^(?P<portfolio_id>\d+)/exportTransactions.(?P<format>\w{3})', 'export'),
(r'^(?P<portfolio_id>\d+)/importTransactions.html', 'import_form'),
(r'^(?P<portfolio_id>\d+)/processImportTransactions.html', 'process_import'),
(r'^(?P<portfolio_id>\d+)/requestImportType.html', 'request_import_type'),
)
# Position views
urlpatterns += patterns('frano.positions.views',
(r'^(?P<portfolio_id>\d+)/positions.html', 'positions'),
(r'^(?P<read_only_token>\w{10})/positions.html', 'read_only_positions'),
(r'^(?P<portfolio_id>\d+)/allocation.html', 'allocation'),
(r'^(?P<portfolio_id>\d+)/income.html', 'income'),
(r'^(?P<read_only_token>\w{10})/income.html', 'read_only_income'),
)
if SERVE_STATICS:
dir = os.path.realpath(os.path.dirname(__file__)) + "/static/"
urlpatterns += patterns('django.views.static',
(r'^css/(?P<path>.*)$', 'serve', { 'document_root' : dir + '/css' }),
(r'^img/(?P<path>.*)$', 'serve', { 'document_root' : dir + '/img' }),
(r'^js/(?P<path>.*)$', 'serve', { 'document_root' : dir + '/js' }),
)
|
fxdemolisher/frano
|
frano/management/commands/cleanup_unused_quotes.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from sys import stdout
from django.core.management.base import BaseCommand
from django.db import connection
from frano.main.demo import DEMO_INSTRUMENTS
from frano.quotes.models import Quote
from frano.positions.views import PERFORMANCE_BENCHMARK_SYMBOL
class Command(BaseCommand):
help = 'Cleanup any quotes and price history that are unused'
def handle(self, *args, **options):
query = '''
SELECT DISTINCT
Q.symbol
FROM quote Q
WHERE Q.symbol NOT IN
(
SELECT DISTINCT
T.symbol
FROM transaction T
)
'''
symbols = set([])
cursor = connection.cursor()
cursor.execute(query)
for row in cursor.fetchall():
symbols.add(row[0])
cursor.close()
symbols.difference_update(DEMO_INSTRUMENTS + [ PERFORMANCE_BENCHMARK_SYMBOL ])
unused = Quote.objects.filter(symbol__in = symbols)
stdout.write('Found %d unused quotes\n' % unused.count())
for quote in unused:
stdout.write('Removing quote and price history for: %s\n' % quote.symbol)
quote.delete()
stdout.write('Successfully removed unused quotes and price history\n')
|
fxdemolisher/frano
|
frano/transactions/parsers.py
|
<reponame>fxdemolisher/frano<gh_stars>0
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from exceptions import Exception
from quotes.models import CASH_SYMBOL
from quotes.models import quote_by_symbol
from quotes.models import price_as_of
#-------------\
# CONSTANTS |
#-------------/
FRANO_TRANSACTION_EXPORT_HEADER = [ 'DATE', 'TYPE', 'SYMBOL', 'QUANTITY', 'PRICE', 'TOTAL', 'LINKED_SYMBOL' ]
GOOGLE_TRANSACTION_EXPORT_HEADER = [ 'Symbol', 'Name', 'Type', 'Date', 'Shares', 'Price', 'Cash value', 'Commission', 'Notes' ]
AMERITRADE_TRANSACTION_EXPORT_HEADER = [ 'DATE', 'TRANSACTION ID', 'DESCRIPTION', 'QUANTITY', 'SYMBOL', 'PRICE', 'COMMISSION', 'AMOUNT', 'NET CASH BALANCE', 'REG FEE', 'SHORT-TERM RDM FEE', 'FUND REDEMPTION FEE', ' DEFERRED SALES CHARGE' ]
ZECCO_TRANSACTION_EXPORT_HEADER = [ 'TradeDate', 'AccountTypeDescription', 'TransactionType', 'Symbol', 'Cusip', 'ActivityDescription', 'SecuritySubDescription', 'Quantity', 'Price', 'Currency', 'PrincipalAmount', 'NetAmount', 'TradeNumber' ]
SCOTTRADE_TRANSACTION_EXPORT_HEADER = [ 'Symbol', 'Quantity', 'Price', 'ActionNameUS', 'TradeDate', 'SettledDate', 'Interest', 'Amount', 'Commission', 'Fees', 'CUSIP', 'Description', 'ActionId', 'TradeNumber', 'RecordType', 'TaxLotNumber' ]
CHARLES_TRANSACTION_EXPORT_HEADER = [ 'Date', 'Action', 'Quantity', 'Symbol', 'Description', 'Price', 'Amount', 'Fees & Comm' ]
FIDELITY_TRANSACTION_EXPORT_HEADER = [ 'Trade Date', 'Action', 'Symbol', 'Security Description', 'Security Type', 'Quantity', 'Price ($)', 'Commission ($)', 'Fees ($)', 'Accrued Interest ($)', 'Amount ($)', 'Settlement Date' ]
MERCER_401_TRANSACTION_EXPORT_HEADER = [ 'Date', 'Source', 'Transaction', 'Ticker', 'Investment', 'Amount', 'Price', 'Shares/Units' ]
GOOGLE_TRANSACTION_TYPE_MAP = {
'Buy' : 'BUY',
'Sell' : 'SELL',
'Deposit Cash' : 'DEPOSIT',
'Withdraw Cash' : 'WITHDRAW',
}
#---------------------\
# EXPOSED FUNCTIONS |
#---------------------/
def parse_frano_transactions(reader):
parsed = []
for row in reader:
parsed.append({
'date' : datetime.strptime(row[0], '%m/%d/%Y').date(),
'type' : row[1],
'symbol' : row[2],
'quantity' : float(row[3]),
'price' : float(row[4]),
'total' : float(row[5]),
'linked_symbol' : (row[6] if row[6] != '' else None),
})
return parsed
def parse_google_transactions(reader):
parsed = []
for row in reader:
type = GOOGLE_TRANSACTION_TYPE_MAP.get(row[2])
if type == None:
raise Exception("Unknown transaction type in google finance file: %s" % row[2])
if type == 'DEPOSIT' or type == 'WITHDRAW':
symbol = CASH_SYMBOL
quantity = abs(float(row[6]))
price = 1.0
commission = 0.0
else:
symbol = row[0]
quantity = float(row[4])
price = float(row[5])
commission = float(row[7])
commission_multiplier = 1.0
if type == 'SELL':
commission_multiplier = -1.0
parsed.append({
'date' : datetime.strptime(row[3], '%b %d, %Y').date(),
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : ((quantity * price) + (commission_multiplier * commission)),
})
return parsed
def parse_ameritrade_transactions(reader):
parsed = []
for row in reader:
if len(row) != len(AMERITRADE_TRANSACTION_EXPORT_HEADER):
continue
date_field = row[0]
description_field = row[2]
quantity_field = row[3]
symbol_field = row[4]
price_field = row[5]
commission_field = row[6]
amount_field = row[7]
net_cash_field = row[8]
linked_symbol = None
# money market interest is a special case since it doesn't have a normal amount
if description_field.startswith('MONEY MARKET INTEREST'):
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(quantity_field)
price = 1.0
commission = 0.0
# skip no amount and no net cash transactions...for now
elif abs(float(amount_field)) < 0.01 or abs(float(net_cash_field)) < 0.01:
continue
# skip money market purchases and redemptions\
elif description_field.startswith('MONEY MARKET PURCHASE') or description_field.startswith('MONEY MARKET REDEMPTION'):
continue
# symbol and price in place, buy/sell transactions
elif symbol_field != '' and price_field != '':
symbol = symbol_field
type = ('SELL' if float(amount_field) >= 0 else 'BUY')
quantity = float(quantity_field)
price = float(price_field)
commission = (float(commission_field) if len(commission_field) > 0 else 0.0)
# symbol is there, but price is not, dividend
elif symbol_field != '' or description_field.startswith('MONEY MARKET INTEREST'):
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(amount_field)
price = 1.0
commission = 0.0
linked_symbol = symbol_field
# otherwise its a cash movement
else:
symbol = CASH_SYMBOL
type = ('DEPOSIT' if float(amount_field) >= 0 else 'WITHDRAW')
quantity = (abs(float(amount_field)))
price = 1.0
commission = 0.0
commission_multiplier = 1.0
if type == 'SELL':
commission_multiplier = -1.0
parsed.append({
'date' : datetime.strptime(date_field, '%m/%d/%Y').date(),
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : ((quantity * price) + (commission_multiplier * commission)),
'linked_symbol': linked_symbol,
})
return parsed
def parse_zecco_transactions(reader):
split_map = { }
parsed = []
for row in reader:
as_of_date = datetime.strptime(row[0], '%m/%d/%Y').date()
account_type = row[1]
transaction_type = row[2]
description_field = row[5]
symbol_field = row[3]
quantity_field = row[7]
price_field = row[8]
net_amount_field = row[11]
linked_symbol = None
# skip credit sweeps
if description_field.find('Credit Sweep') >= 0:
continue
# deposits/withdrawals happen on the cash journal
elif transaction_type == 'Cash Journal' and (
description_field.startswith('ACH DEPOSIT') or description_field.startswith('ACH DISBURSEMENT') or
description_field.startswith('W/T FRM CUST') or description_field.startswith('W/T TO CUST')
):
symbol = CASH_SYMBOL
type = ('DEPOSIT' if float(net_amount_field) >= 0 else 'WITHDRAW')
quantity = (abs(float(net_amount_field)))
price = 1.0
commission = 0.0
# buys/sells are marked by their transaction types
elif transaction_type == 'B' or transaction_type == 'S':
symbol = symbol_field
type = ('SELL' if transaction_type == 'S' else 'BUY')
quantity = abs(float(quantity_field))
price = float(price_field)
commission = abs(float(net_amount_field)) - (quantity * price)
# everything else on the margin account or cash is an adjustment
elif transaction_type in ['Interest Paid', 'Qualified Dividend', 'Short Term Capital Gain', 'Long Term Capital Gain']:
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(net_amount_field)
price = 1.0
commission = 0.0
linked_symbol = symbol_field
# splits are processed after all the parsing is done, just record and skip them
elif transaction_type in [ 'Security Journal' ] and description_field.endswith('SPLIT'):
_record_split(split_map, as_of_date, symbol_field, float(quantity_field))
continue
# otherwise just skip it for now
else:
continue
commission_multiplier = 1.0
if type == 'SELL':
commission_multiplier = -1.0
parsed.append({
'date' : as_of_date,
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : ((quantity * price) + (commission_multiplier * commission)),
'linked_symbol': linked_symbol,
})
splits = [ split for sub_list in split_map.values() for split in sub_list ]
_apply_splits(parsed, splits)
return parsed
def parse_scottrade_transactions(reader):
parsed = []
for row in reader:
action_field = row[3]
symbol_field = row[0]
quantity_field = row[1]
price_field = row[2]
date_field = row[4]
amount_field = row[7]
commission_field = row[8]
linked_symbol = None
# deposits and withdrawals
if action_field == 'IRA Receipt' or action_field == 'Journal':
symbol = CASH_SYMBOL
type = ('DEPOSIT' if float(amount_field) >= 0 else 'WITHDRAW')
quantity = abs(float(amount_field))
price = 1.0
commission = 0.0
# buys and sells
elif action_field == 'Buy' or action_field == 'Sell':
symbol = symbol_field
type = ('SELL' if action_field == 'Sell' else 'BUY')
quantity = abs(float(quantity_field))
price = float(price_field)
commission = abs(float(commission_field))
# incoming transfers mimic a deposit and a buy
elif action_field == 'Transfer In':
quantity = float(quantity_field)
price = float(price_field) / quantity
parsed.append({
'date' : datetime.strptime(date_field, '%m/%d/%Y').date(),
'type' : 'DEPOSIT',
'symbol' : CASH_SYMBOL,
'quantity' : (price * quantity),
'price' : 1.0,
'total' : (price * quantity),
})
symbol = symbol_field
type = 'BUY'
commission = 0.0
# everything else is an adjustment
else:
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(amount_field)
price = 1.0
commission = 0.0
linked_symbol = (symbol_field if symbol_field != 'Cash' else None)
commission_multiplier = 1.0
if type == 'SELL':
commission_multiplier = -1.0
parsed.append({
'date' : datetime.strptime(date_field, '%m/%d/%Y').date(),
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : ((quantity * price) + (commission_multiplier * commission)),
'linked_symbol': linked_symbol,
})
return parsed
def parse_charles_transactions(reader):
parsed = []
for row in reader:
date_field = row[0][:10]
action_field = row[1].strip(' ')
quantity_field = row[2].strip(' ')
symbol_field = row[3].strip(' ')
price_field = row[5].replace('$', '').strip(' ')
amount_field = row[6].replace('$', '').strip(' ')
commission_field = row[7].replace('$', '').strip(' ').strip('*')
linked_symbol = None
# deposits and withdrawals have no symbols or prices
if symbol_field == '' and price_field == '':
symbol = CASH_SYMBOL
type = ('DEPOSIT' if float(amount_field) >= 0 else 'WITHDRAW')
quantity = abs(float(amount_field))
price = 1.0
commission = 0.0
# buys and sells
elif action_field == 'Buy' or action_field == 'Sell':
symbol = symbol_field
type = ('SELL' if action_field == 'Sell' else 'BUY')
quantity = float(quantity_field)
price = float(price_field)
commission = (float(commission_field) if commission_field != '' else 0.0)
# transfers have a symbol and quantity, and little else
elif symbol_field != '' and quantity_field != '' and amount_field == '':
as_of_date = datetime.strptime(date_field, '%m/%d/%Y')
symbol = symbol_field
quantity = float(quantity_field)
price = price_as_of(quote_by_symbol(symbol), as_of_date)
parsed.append({
'date' : as_of_date.date(),
'type' : 'DEPOSIT',
'symbol' : CASH_SYMBOL,
'quantity' : (price * quantity),
'price' : 1.0,
'total' : (price * quantity),
})
type = 'BUY'
commission = 0.0
# everything else is an adjustment
else:
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(amount_field)
price = 1.0
commission = 0.0
linked_symbol = symbol_field
commission_multiplier = 1.0
if type == 'SELL':
commission_multiplier = -1.0
parsed.append({
'date' : datetime.strptime(date_field, '%m/%d/%Y').date(),
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : ((quantity * price) + (commission_multiplier * commission)),
'linked_symbol': linked_symbol,
})
return parsed
def parse_fidelity_transactions(reader):
parsed = []
for row in reader:
if len(row) < 11:
continue
date_field = row[0].strip()
action_field = row[1].strip()
symbol_field = row[2].strip()
symbol_description_field = row[3].strip()
quantity_field = row[5].strip()
price_field = row[6].strip()
amount_field = row[10].strip()
linked_symbol = None
# deposits and withdrawals have no symbols or prices
if symbol_field == '' and price_field == '':
symbol = CASH_SYMBOL
type = ('DEPOSIT' if float(amount_field) >= 0 else 'WITHDRAW')
quantity = abs(float(amount_field))
price = 1.0
total = quantity
# buys and sells
elif action_field.startswith('YOU BOUGHT') or action_field.startswith('YOU SOLD') or (symbol_description_field != 'CASH' and (action_field in [ 'PURCHASE INTO CORE ACCOUNT', 'REDEMPTION FROM CORE ACCOUNT', 'REINVESTMENT' ])):
symbol = symbol_field
type = ('SELL' if (action_field.startswith('YOU SOLD') or action_field == 'REDEMPTION FROM CORE ACCOUNT') else 'BUY')
quantity = abs(float(quantity_field))
price = float(price_field)
total = abs(float(amount_field))
# certain known actions are adjustments
elif action_field in [ 'SHORT-TERM CAP GAIN', 'LONG-TERM CAP GAIN', 'DIVIDEND RECEIVED', 'INTEREST EARNED' ]:
symbol = CASH_SYMBOL
type = 'ADJUST'
quantity = float(amount_field)
price = 1.0
total = abs(float(amount_field))
linked_symbol = (symbol_field if symbol_description_field != 'CASH' else None)
# ignore everything else
else:
continue
parsed.append({
'date' : datetime.strptime(date_field, '%m/%d/%Y').date(),
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : total,
'linked_symbol': linked_symbol,
})
return parsed
def parse_mercer_401_transactions(reader):
parsed = []
for row in reader:
if len(row) != 8 or row[0] == 'Total':
continue
as_of_date = datetime.strptime(row[0].strip(' '), '%m/%d/%Y').date()
action = row[2].strip(' ')
symbol = row[3].strip(' ')
amount_field = row[5].replace('$', '').replace(',', '').strip(' ')
price = float(row[6].replace('$', '').replace(',', '').strip(' '))
quantity = float(row[7].replace('$', '').replace(',', '').strip(' '))
linked_symbol = None
if amount_field[:1] == '(' and amount_field[-1:] == ')':
amount = 0 - float(amount_field[1:-1])
else:
amount = float(amount_field)
# deposits are contributions or conversions and are treated like transfers
if action in [ 'CONTRIBUTIONS', 'CONVERSION' ]:
parsed.append({
'date' : as_of_date,
'type' : 'DEPOSIT',
'symbol' : CASH_SYMBOL,
'quantity' : amount,
'price' : 1.0,
'total' : amount,
})
type = 'BUY'
# buys and sells are transfer in/out actions
elif action in [ 'TRANSFER OUT', 'TRANSFER IN' ] and symbol != None and symbol != '':
type = ('SELL' if action == 'TRANSFER OUT' else 'BUY')
quantity = abs(quantity)
amount = abs(amount)
# dividends are adjustments and reinvestments. fees are sells and negative adjustments.
elif action in [ 'DIVIDEND', 'FEE' ]:
parsed.append({
'date' : as_of_date,
'type' : 'ADJUST',
'symbol' : CASH_SYMBOL,
'quantity' : amount,
'price' : 1.0,
'total' : amount,
'linked_symbol' : symbol,
})
type = ('BUY' if action == 'DIVIDEND' else 'SELL')
quantity = abs(quantity)
amount = abs(amount)
else:
continue
parsed.append({
'date' : as_of_date,
'type' : type,
'symbol' : symbol,
'quantity' : quantity,
'price' : price,
'total' : amount,
'linked_symbol' : linked_symbol,
})
return parsed
#-----------------\
# VALUE OBJECTS |
#-----------------/
class Split:
def __init__(self, as_of_date, symbol, quantity):
self.as_of_date = as_of_date
if quantity > 0:
self.to_symbol = symbol
self.to_quantity = quantity
else:
self.from_symbol = symbol
self.from_quantity = abs(quantity)
def __repr__(self):
return "Split: %.4f of %s to %.4f of %s" % (self.from_quantity, self.from_symbol, self.to_quantity, self.to_symbol)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _record_split(split_map, as_of_date, symbol, quantity):
splits_on_date = split_map.get(as_of_date, None)
if splits_on_date == None:
splits_on_date = [ Split(as_of_date, symbol, quantity) ]
split_map[as_of_date] = splits_on_date
else:
found = False
for split in splits_on_date:
if quantity > 0 and split.from_symbol != None and split.from_symbol.startswith(symbol):
split.to_symbol = symbol
split.to_quantity = quantity
found = True
break
elif split.to_symbol != None and symbol.startswith(split.to_symbol):
split.from_symbol = symbol
split.from_quantity = abs(quantity)
found = True
break
if not found:
splits_on_date.append(Split(as_of_date, symbol, quantity))
def _apply_splits(parsed, splits):
"""
split processing - adjust price and quantity of all pre-split transactions
the double loop is intentional since a stock can split more than once, start processing by earliest date
"""
for split in sorted(splits, key = (lambda split: split.as_of_date)):
for transaction in parsed:
if transaction.get('symbol') == split.from_symbol and transaction.get('date') <= split.as_of_date:
factor = split.to_quantity / split.from_quantity
transaction['symbol'] = split.to_symbol
transaction['quantity'] = transaction.get('quantity') * factor
transaction['price'] = transaction.get('price') / factor
|
fxdemolisher/frano
|
frano/account/views.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from django import forms
from django.shortcuts import redirect
from main.decorators import login_required_decorator
from main.decorators import portfolio_manipulation_decorator
from main.models import Portfolio
from main.models import create_portfolio
from main.view_utils import logout_user
from main.view_utils import redirect_to_portfolio_action
from main.view_utils import render_page
#-------------\
# CONSTANTS |
#-------------/
#---------\
# VIEWS |
#---------/
@login_required_decorator
def account(request, user):
return render_page('account.html', request)
@login_required_decorator
def remove(request, user):
user.delete()
return logout_user(request)
@login_required_decorator
def new_portfolio(request, user):
form = PortfolioNameForm(request.POST)
if not form.is_valid():
return redirect("/account.html")
name = _get_effective_portfolio_name(user, form.cleaned_data.get('portfolioName').encode('UTF-8'))
portfolio = create_portfolio(user, name)
return redirect_to_portfolio_action('importTransactions', portfolio)
@login_required_decorator
@portfolio_manipulation_decorator
def set_portfolio_name(request, user, portfolio, is_sample, read_only):
form = PortfolioNameForm(request.POST)
if form.is_valid():
portfolio.name = _get_effective_portfolio_name(user, form.cleaned_data.get('portfolioName').encode('UTF-8'))
portfolio.save()
return redirect("/account.html")
@login_required_decorator
@portfolio_manipulation_decorator
def remove_portfolio(request, user, portfolio, is_sample, read_only):
portfolio.delete()
return redirect("/account.html")
#---------\
# FORMS |
#---------/
class PortfolioNameForm(forms.Form):
portfolioName = forms.CharField(min_length = 3, max_length = 30)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
def _get_effective_portfolio_name(user, name):
portfolios = Portfolio.objects.filter(user__id__exact = user.id)
names = set([ p.name for p in portfolios])
index = 0
new_name = name
while new_name in names:
index = index + 1
new_name = "%s-%d" % (name, index)
return new_name
|
otchlan/Pytkon_wprowadzenie
|
src/main.py
|
<filename>src/main.py
import podstawy as po
import toto as tt
import tablica_np as tn
if __name__ == '__main__':
# print("Witaj świecie")
a = 7
b = 5.5
"""SKTÓTY KLAWISZOWE"""
"""cTRL+d - duplikownie lini kodu"""
"""Ctrl + / - komentowanie lini/linijek kodu"""
# print ("to jest liczbe całokowita ",a)
# print (b)
# print(a + b)
# po.suma(a, b)
# po.jezeli(55, 3 , 2)
# po.petla_while()
# po.petla_for()
# tt.losowanie()
# tt.lotek()
"""pokazać wprowadzanie z klawiatury i rzutowanie liczby na inta"""
# zKlaw = int(input("Podaj liczbe "))
# print(zKlaw)
"""Tablica wbudowana w Pythona"""
# tablica = [1,2,3]
# print(tablica)
"""Klasa tablica_np"""
# tn.prosta()
# tn.wybierz_elem()
# tn.wybierz_wk()
# tn.losowanie()
# tn.zamien()
|
otchlan/Pytkon_wprowadzenie
|
src/tablica_np.py
|
<reponame>otchlan/Pytkon_wprowadzenie<gh_stars>0
import numpy as np
def prosta():
print()
tablica = np.array([4,5,6])
print("Tablica jednowymiarowa ", tablica)
print()
tablica_dwu = np.array([[1,2,3],[4,5,6],[7,8,9]])
print("Tablica dwuwymiarowa:\n", tablica_dwu)
print()
tablica_m = np.array([[1, 2, 3], ["raz", "dwa", "trzy"]])
print("Tablica dwuwymiarowa:\n", tablica_m)
print()
print("Rozmiat tablicy:")
print("jednowymiarowa:", tablica.shape)
print("jednowymiarowa:", tablica_dwu.shape)
def wybierz_elem (wiersz = 0, kolumna = 0):
print()
tablica = np.array([[1,2,3,4,5],[6,7,8,9,0]])
x = tablica.shape
# print("Kształt tablicy", x)
print("Tablica:\n", tablica)
print()
print("Konkretna liczna z tablicy (0,0)- ", tablica[wiersz, kolumna])
print("Podaj wiersz i kolume tablica o rozmiarze:", x)
wiersz = int(input("P wiersz: "))
kolumna = int(input("P kolumna: "))
print("Wybrałeś:", tablica[wiersz, kolumna])
def wybierz_wk():
tablica = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
print("Tab 3x3\n", tablica)
print()
print("Wiersz nr 1", tablica[0, :])
print("Kolumna nr 3", tablica[:, 2])
# def losowanie():
# tablica = np.random.rand()
# print()
# print("wYLOSOWANA TABLICA\n", tablica")
def zamien():
tab_1_wym = np.zeros(3)
tab_2_wym = np.zeros((5, 5))
tabnn = np.zeros((2,2,2,2))
print()
print("Tablica jedno:\n", tab_1_wym)
print("Tablica dwu:\n", tab_2_wym)
# print("Tablica nn:\n", tabnn)
a = 5
tab_2_wym[2,3] = a
print()
print(tab_2_wym)
|
otchlan/Pytkon_wprowadzenie
|
src/podstawy.py
|
def suma (a, b):
print("w klasie", a+b)
def jezeli (a, b, c):
ab = a+b
if ab == c:
print ("if-1 a+b=c", a+b)
elif a+c == b:
print ("if-2 a+c=b", a+c)
elif b+c == a:
print ("if-3 b+c=a", b+c)
else:
print ("żadna liczba nie psuje")
"""1.True"""
"""2.<"""
"""3.<="""
def petla_while ():
x = 1
while x <= 10:
print("Lecimy dalej. x = ", x)
x += 1
def petla_for ():
for x in range(2, 5):
print(x)
|
otchlan/Pytkon_wprowadzenie
|
src/toto.py
|
<gh_stars>0
# 1. importujemy rand -> tworzymy funkcję losowanie -> wywołujemy ją w main
# 2. tworzymy funkcję lotek:
# tworzymy listę i FORa dla listy -> wywołujemy w main
# importujemy numpy -> instalujemy przez PIP -> tworzymy tablice
import random
import numpy as np
def losowanie ():
liczba = random.randint(1, 10)
print("wylosowana liczba = ", liczba)
def lotek(x=6):
lista = []
tablica = np.array
for i in range(x):
liczba = random.randint(1,49)
print(i+1, liczba)
# lista.append(liczba) #Dodawanie do listy
"""Do tablicy tablica dodajemy liczbe"""
tablica = np.append(tablica, liczba)
"""Może się trafić, że liczby bądą się powtarzać dlatego jeśli starczy nam czasu"""
"""Możemy dodać IFa, który będzie nam sprawdzał czy liczba się powtórzyła i wtedy jeszcze raz będzie randomować"""
# print("Szczęśliwe numery to: ", *lista)
print("Szczęsliwe liczby tabliy", tablica)
|
ztaylor2/sr-problems
|
problems.py
|
<gh_stars>0
"""Problem solutions."""
"""Qualitative questions:
1. What are your goals for the next five years?
I am currently very focused on getting my first software engineering job,
and my hope is that after I get that job I will be able to move
vertically within that organization. I would like to be something
like a Senior Lead Software Engineer five years from now.
2. What is your reason for leaving your previous position,
or why do you want to leave your
current position (if applicable)?
What do you hope to get out of this position?
The first part of the question is not applicable.
I hope to gain quality experience and growth as a software engineer.
I'm seeking a position that will facilitate learning and career growth.
3. Can you share your biggest professional or academic accomplishment? Can you share
your biggest professional or academic failure?
My biggest academic accomplishment is graduating Cum Laude with a
mechanical engineering degree. This is not an easy thing to do, and I'm
very proud of the hard work and dedication that accomplishment highlights.
My biggest professional failure is probably not receiving a job offer
from the startup Rover. They are currently only hiring senior engineers
but they decided to give me a chance at the interview process after I
passed a coding test for them. I then passed an hour long live coding
phone screen and was assigned a take home coding project. I spent around
30 hours on the project and ended up not moving forward in the interview
process. This was fairly discouraging for me, but on the bright side I
learned a great deal while building out the project and became a
better software engineer in the process.
4. What would your peers say are your strengths and weaknesses?
My peers would say my strengths include my work ethic, drive,
ability to get along with people, and technical aptitude. My weaknesses
include organization and first impressions.
"""
"""
Coding Questions:
1. Implement a function to check if a binary tree is balanced. For the purposes of this question,
a balanced tree is defined to be a tree such that the heights of the two subtrees of any node
never differ by more than one.
2. Implement an algorithm to find the kth to last element of a singly linked list.
3. Write a program that outputs all possibilities to put the operators ‘+’, ‘-’, or nothing between
the numbers 1,2,...,9 (in this order) such that the result is 100. For example 1 + 2 + 3 - 4 + 5
+ 6 + 78 + 9 = 100.
"""
"""1. Check if a bst is balanced at every node."""
def is_balanced(root):
"""Check if balanced."""
return is_balanced_int(root) >= 0
def is_balanced_int(root):
"""Check if balanced."""
if root is None:
return 0
left = is_balanced_int(root.left)
right = is_balanced_int(root.right)
if left < 0 or right < 0 or abs(left - right) > 1:
return -1
return max((left, right)) + 1
"""2. Implement an algorithm to find the kth to last element of a singly linked list."""
class Node(object):
"""The node object."""
def __init__(self, data, next):
"""Build node attributes on init."""
self.data = data
self.next = next
class LinkedList(object):
"""Create linked list object."""
def __init__(self, iterable=None):
"""Head node is none on init."""
self.head = None
self._counter = 0
if isinstance(iterable, (tuple, list)):
for item in iterable:
self.push(item)
def kth_to_last(self, k):
"""Return the kth to last elements in linked list."""
if k > self._counter:
raise IndexError('K greater than length of list.')
current_node = self.head
for i in range(k - 1):
current_node = current_node.next
kth_to_last_list = []
while current_node:
kth_to_last_list.append(current_node.data)
current_node = current_node.next
return kth_to_last_list
"""3. Write a program that outputs all possibilities to put the operators ‘+’, ‘-’, or nothing between
the numbers 1,2,...,9 (in this order) such that the result is 100. For example 1 + 2 + 3 - 4 + 5
+ 6 + 78 + 9 = 100."""
def find_all_paths():
"""."""
digits = [1, 2, 3, 4, 5, 6, 7, 8, 9]
search_sum = 100
paths = []
def find_paths(curr_sum, previous_number, index, total_path='1'):
"""."""
previous_digit = abs(previous_number % 10)
if index >= len(digits):
if curr_sum == 100:
paths.append(total_path)
return
return
current_digit = digits[index]
concatinated_number = int(str(previous_digit) + str(current_digit))
if total_path[-2:].isdigit():
concatinated_path = total_path[:-1] + str(concatinated_number)
else:
concatinated_path = total_path + str(concatinated_number)
find_paths(curr_sum - previous_number, current_digit, index + 1, total_path + ' + ' + str(current_digit))
find_paths(curr_sum - previous_number, -current_digit, index + 1, total_path + ' - ' + str(current_digit))
find_paths(curr_sum, concatinated_number, index + 1, concatinated_path)
find_paths(search_sum, digits[0], 1)
|
Operation-Falcon/sectrails
|
dns.py
|
import argparse
import sys
from banner.banner import banner_design
from function import *
banner=banner_design()
parser=argparse.ArgumentParser(description="Subdomain enumeration", formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("-d", "--domain", help="domain name for subdomain enumeration", type=str, required=True)
parser.add_argument("-o","--output", help="output filename to save the results", type=str, required=True)
parser.add_argument("-a", "--api", help="Security trails api key", type=str, required=True)
args=parser.parse_args()
if len(sys.argv) <7:
sys.exit()
elif sys.argv[2]==args.domain and sys.argv[4]==args.api and sys.argv[6]==args.output:
dns(args.domain, args.api, args.output)
|
Operation-Falcon/sectrails
|
function.py
|
<reponame>Operation-Falcon/sectrails
import requests
def subdomain(domain, api, output):
param={"children_only":"false",
"include_inactive":"true"}
header={"Accept": "application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/domain/{domain}/subdomains?children_only=false&include_inactive=true", params=param, headers=header)
data=r.json()["subdomains"]
with open(output, 'w') as file:
for i in range(0, len(data)):
try:
file.writelines("%s%s%s\n"% (data[i], ".", domain))
print("%s%s%s\n" %(data[i], ".", domain))
except Exception as e:
pass
def tags(domain, api, output):
headers={"Accept":"application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/domain/{domain}/tags", headers=headers)
data=r.json()["tags"]
with open(output, "w") as file:
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data[i])
print("%s\n" % data[i])
except Exception as e:
pass
def details(domain, api, output):
headers = {"Accept": "application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/domain/{domain}", headers=headers)
data=r.json()["current_dns"]
with open(output, "a") as file:
file.writelines("TXT:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e:\
pass
file.writelines("SOA:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e: \
pass
file.writelines("NS:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e:\
pass
file.writelines("MX:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e:\
pass
file.writelines("AAAA:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e:\
pass
file.writelines("A:\n")
for i in range(0, len(data)):
try:
file.writelines("%s\n" % data["txt"]["values"][i]["value"])
print("%s\n" % data["txt"]["values"][i]["value"])
except Exception as e:\
pass
def dns(domain, api, output):
headers={"Accept":"application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/history/{domain}/dns/a", headers=headers)
data=r.json()["records"]
with open(output, "w") as file:
for i in range(0, len(data)):
for j in range(0, len(data[i]["values"])):
try:
file.writelines("%s\n" % data[i]["values"][j]["ip"])
print("%s\n" % data[i]["values"][j]["ip"])
except Exception as e:
pass
def whois(domain, api, output):
headers={"Accept":"application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/history/{domain}/whois", headers=headers)
data=r.json()["result"]["items"]
with open(output, "w") as file:
for i in range(0, len(data)):
for j in range(0, len(data[i]["contact"])):
try:
file.writelines("%s\n" % data[i]["contact"][j]["type"])
file.writelines("%s\n" % data[i]["contact"][j]["telephone"])
file.writelines("%s\n" % data[i]["contact"][j]["street1"])
file.writelines("%s\n" % data[i]["contact"][j]["state"])
file.writelines("%s\n" % data[i]["contact"][j]["postalCode"])
file.writelines("%s\n" % data[i]["contact"][j]["organization"])
file.writelines("%s\n" % data[i]["contact"][j]["name"])
file.writelines("%s\n" % data[i]["contact"][j]["email"])
file.writelines("%s\n" % data[i]["contact"][j]["country"])
file.writelines("%s\n" % data[i]["contact"][j]["city"])
file.writelines("\n\n")
print("%s\n" % data[i]["contact"][j]["type"])
print("%s\n" % data[i]["contact"][j]["telephone"])
print("%s\n" % data[i]["contact"][j]["street1"])
print("%s\n" % data[i]["contact"][j]["state"])
print("%s\n" % data[i]["contact"][j]["postalCode"])
print("%s\n" % data[i]["contact"][j]["organization"])
print("%s\n" % data[i]["contact"][j]["name"])
print("%s\n" % data[i]["contact"][j]["email"])
print("%s\n" % data[i]["contact"][j]["country"])
print("%s\n" % data[i]["contact"][j]["city"])
print("\n\n")
except Exception as e:
pass
def neighbours(ip, api, output):
headers={"Accept":"application/json",
"APIKEY": f"{api}"}
r=requests.get(f"https://api.securitytrails.com/v1/ips/nearby/{ip}", headers=headers)
data=r.json()["blocks"]
with open(output, "w") as file:
for i in range(0, len(data)):
try:
file.writelines("%s\n" % (data[i]["ip"]))
print("%s\n" % (data[i]["ip"]))
except Exception as e:
pass
|
miniyus/finance-data-auto-posting
|
fdap/app/autopost/template.py
|
def make_subject(sector: str, year: str, q: str):
return f'[업종: {sector}] {year}년도 {q}분기'
def make_strong_tag(value: str):
return f'<strong>{value}</strong>'
def make_p_tag(value: str):
return f'<p>{value}</p>'
def make_img_tag(name: str, src: str):
return f'<img src="{src}" alt="{name}">'
def make_new_line(repeat: int = 1):
return '<p> </p>' * repeat
def replace_template_str(template: str, values: dict, parameter_format: list = None):
import re
if parameter_format is None:
parameter_format = ['{{', '}}']
re_value = None
for key, value in values.items():
re_key = parameter_format[0] + key + parameter_format[1]
re_value = re.sub(re_key, value, template)
return re_value
class Template:
_title: str = ''
_description: str = ''
_image: str = ''
def __init__(self, title: str, image: dict, description: str = ''):
"""
Args:
title:
image:
description:
"""
self.set_title(title)
self.set_image(image['name'], image['src'])
self.set_description(description)
def set_title(self, title: str):
if title:
self._title = make_p_tag(title)
return self
def set_image(self, name: str, src: str):
if name and src:
self._image = make_img_tag(name, src)
return self
def set_description(self, description: str):
if description:
self._description = make_p_tag(description)
return self
def make(self):
res = ''
res += self._title
res += make_new_line(1)
res += self._description
res += self._image
res += make_new_line(3)
return res
|
miniyus/finance-data-auto-posting
|
fdap_manage/views.py
|
<filename>fdap_manage/views.py
from django.shortcuts import render
from django.shortcuts import redirect
from django.urls import reverse
from django.http import HttpResponse
from django.utils import timezone
from .models import Posts
from fdap.definitions import LOG_PATH
from fdap.utils.util import config_json, write_config_json
from fdap.config.config import Config
from .forms import get_form
import os
# Create your views here.
def index(request):
post_list = Posts.objects.all()
return render(request, 'home.html', {'post_list': post_list})
def posts(request):
post_list = Posts.objects.all()
return render(request, 'posts.html', {'post_list': post_list})
def logs(request):
log_list = os.listdir(LOG_PATH)
logs_list = []
recent = {}
for con in log_list:
if 'log' in con:
try:
name, ext, date = con.split('.')
logs_list.append({'date': date, 'subject': con})
except ValueError as e:
name, ext = con.split('.')
date = timezone.now().strftime('%Y-%m-%d')
recent = {'date': date, 'subject': con}
logs_list.reverse()
logs_list.insert(0, recent)
return render(request, 'log_list.html', {'log_list': logs_list})
def log(request, f_name):
f = open(LOG_PATH + '/' + f_name, 'r', encoding='utf-8')
contents = f.read()
return render(request, 'log.html', {'f_name': f_name, 'contents': contents})
def config(request, module):
config_list = Config.list()
conf = None
if module in config_list:
conf = config_json(module)
return render(request, 'config_form.html', {'config': conf, 'module': module})
def write_config(request, module):
if module in Config.list():
data = get_form(module, request.POST)
conf = write_config_json(module, data.to_dict())
return redirect('config', module=module)
|
miniyus/finance-data-auto-posting
|
fdap/contracts/webdriver.py
|
<gh_stars>0
from abc import ABC, abstractmethod
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.common.exceptions import WebDriverException
from typing import Union
from fdap.utils.loggeradapter import LoggerAdapter
from fdap.contracts.blog_client import BlogLoginInfo
class WebDriverHandler(ABC):
_web_driver: WebDriver
_logger: LoggerAdapter
_config: dict
_login_data: BlogLoginInfo
def __init__(
self,
web_driver: WebDriver,
logger: LoggerAdapter,
config: dict = None,
login_data: BlogLoginInfo = None
):
self._web_driver = web_driver
self._logger = logger
self._config = config
self._login_data = login_data
def run(self, url: str) -> Union[bool, any]:
try:
self._web_driver.get(url)
result = self.handle()
self._web_driver.close()
return result
except WebDriverException as e:
self._logger.error(e.msg)
self._logger.error(e.stacktrace)
return False
@abstractmethod
def handle(self):
pass
|
miniyus/finance-data-auto-posting
|
fdap/__init__.py
|
<reponame>miniyus/finance-data-auto-posting
from fdap.application import Application
from fdap.core.containter import Container
app = Application(Container())
|
miniyus/finance-data-auto-posting
|
fdap/app/opendart/__init__.py
|
<reponame>miniyus/finance-data-auto-posting
# Resolve dependency injection
# use: from fdap.app.opendart import OpenDartService
def service():
from fdap.app.opendart.opendart_service import OpenDartService
from fdap.config.config import Config
config = Config.OPENDART
return OpenDartService(config['api']['url'], config['api']['api_key'])
|
miniyus/finance-data-auto-posting
|
fdap/application.py
|
<gh_stars>0
from dependency_injector import containers
from fdap.config.config import Config
from fdap.utils.customlogger import CustomLogger
from fdap.utils.loggeradapter import LoggerAdapter
from typing import Callable
class Application:
_container: containers.DeclarativeContainer
_logger: LoggerAdapter
def __init__(self, container: containers.DeclarativeContainer, callback: Callable = None):
self._logger = CustomLogger.logger('root', 'application')
self._container = container
self._container.config.from_dict(Config.all())
self._container.init_resources()
if callback is not None:
self.bootstrap(callback)
def get(self, name: str) -> any:
return self._container.providers.get(name)()
def bootstrap(self, callback: Callable):
try:
callback(self)
except Exception as e:
self._logger.error('Failed Bootstrapping...')
self._logger.error(e)
|
miniyus/finance-data-auto-posting
|
fdap/database/database.py
|
<gh_stars>0
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from fdap.config.config import Config
config = Config.DATABASE['mysql']
access_info = 'mysql://{id}:{passwd}@{host}/{db}?charset=utf8'.format(
id=config['id'],
passwd=config['password'],
host=config['host'],
db=config['db']
)
engine = create_engine(access_info, convert_unicode=False)
db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
Base.metadata.create_all(engine)
|
miniyus/finance-data-auto-posting
|
fdap/app/tistory/__init__.py
|
<filename>fdap/app/tistory/__init__.py
# Resolve dependency injection
# use: from fdap.app.tistory import TistoryClient
def service():
from fdap.app.tistory.tistory_client import TistoryClient, LoginInfo
from fdap.config.config import Config
config = Config.TISTORY
api_config = config['api']
kakao_config = config['kakao']
client = TistoryClient(api_config['url'], config)
login_info = LoginInfo(
client_id=api_config['client_id'],
client_secret=api_config['client_secret'],
redirect_uri=api_config['redirect_uri'],
response_type=api_config['response_type'],
login_id=kakao_config['id'],
login_password=<PASSWORD>_<PASSWORD>['password'],
state=api_config['state']
)
client.login(login_info)
return client
|
miniyus/finance-data-auto-posting
|
fdap.py
|
<gh_stars>0
# execute fdap
from fdap.application import Application
def run(app: Application):
from fdap.database.database import init_db
from fdap.app.autopost.autopost import AutoPost
from fdap.app.tistory.tistory_client import TistoryClient
init_db()
tistory_login_info = app.get('tistory_login_info')
tistory_client = app.get('tistory_client')
if isinstance(tistory_client, TistoryClient):
tistory_client.login(tistory_login_info)
# print(tistory_client)
service = app.get('auto_post')
if isinstance(service, AutoPost):
service.auto()
# print(service)
if __name__ == "__main__":
import fdap
fdap.app.bootstrap(run)
|
miniyus/finance-data-auto-posting
|
fdap/app/kiwoom/__init__.py
|
<filename>fdap/app/kiwoom/__init__.py
# Resolve dependency injection
# use: from fdap.app.kiwoom import KiwoomService
def service():
from fdap.app.kiwoom.kiwoom_service import KiwoomService
from fdap.config.config import Config
return KiwoomService(Config.KOAPY['account']['id'], Config.KOAPY['account']['password'])
|
miniyus/finance-data-auto-posting
|
fdap/prototype/auto_post.py
|
from fdap.prototype.handler import Handler
from fdap.app.autopost import instance
class AutoPost(Handler):
TAG: str = 'auto-post'
@staticmethod
def make_module():
return instance()
def handle(self):
module = self.make_module()
# result = module.run(
# Parameters(
# sector_code='20',
# sector_name='통신업',
# year='2021',
# quarter=3
# )
# )
result = module.auto()
# kiwoom api 연결 종료를 위해...
module.close()
return result
|
miniyus/finance-data-auto-posting
|
fdap/app/autopost/autopost.py
|
<gh_stars>0
from fdap.contracts.service import Service
from fdap.app.opendart.report_code import ReportCode
from fdap.app.kiwoom.kiwoom_service import KiwoomService
from fdap.app.opendart.opendart_service import OpenDartService
from fdap.app.tistory.tistory_client import TistoryClient
from fdap.app.kiwoom.basic_info import BasicInfo
from fdap.app.refine.refine import Refine
from fdap.app.infographic.table import Table
from fdap.app.infographic.chart import Chart
from fdap.definitions import RESOURCE_PATH
from fdap.app.autopost.template import *
from fdap.app.repositories.post_repository import PostsRepository
from fdap.app.tistory.tistory_data import PostData
from fdap.app.tistory.tistory_data import PostDto
from fdap.app.autopost.parameters import Parameters
from fdap.app.autopost.rotation import RotationSector, StockCondition
from fdap.utils.util import get_quarter
from typing import Dict, Union, List
from datetime import datetime
import os
class AutoPost(Service):
_kiwoom: KiwoomService
_opendart: OpenDartService
_tistory: TistoryClient
_refine: Refine
_repo: PostsRepository
def __init__(
self,
kiwoom: KiwoomService,
opendart: OpenDartService,
tistory: TistoryClient,
refine: Refine,
repo: PostsRepository
):
super().__init__()
self._kiwoom = kiwoom
self._opendart = opendart
self._tistory = tistory
self._refine = refine
self._repo = repo
def __del__(self):
self.close()
def close(self):
self._kiwoom.disconnect()
def _make_data(self, parameters: Parameters):
sector = parameters.sector_code
year = parameters.year
q = ReportCode.get_by_index(parameters.quarter)
condition = parameters.stock_condition
self._logger.debug('make_data')
stock_list = self._kiwoom.get_stock_list_by_sector(sector)
self._logger.debug('stock_list: ' + str(len(stock_list)))
corp_codes = []
for basic_info in stock_list:
if isinstance(basic_info, BasicInfo):
corp_code = self._opendart.get_corp_code_by_stock_code(basic_info.code)
if corp_code is not None:
corp_codes.append(corp_code.corp_code)
collect = self._opendart.get_multi(corp_codes, year, q)
self._logger.debug(f"finance_data: {str(len(collect))}")
refine_collection = self._refine.refine_multiple(stock_list, collect)
self._logger.debug(f"refine_data: {str(refine_collection.count())}")
table = Table(refine_collection)
df = table.make_dataframe(condition.value)
if df is None:
self._logger.debug('make_data is None')
return None
table_file_path = os.path.join(RESOURCE_PATH, f'{sector}_{year}_{q.value}_table.png')
chart_dict = {}
if table.save_img(table_file_path):
chart = Chart(df)
y_label = chart.get_ko_col_names()
# y_label.pop('rank')
y_label.pop('stock_name')
y_label.pop('stock_code')
for key, ko in y_label.items():
chart_file_path = os.path.join(RESOURCE_PATH, f'{sector}_{year}_{q.value}_{key}_chart.png')
if chart.save_img(chart_file_path, '종목명', ko):
chart_dict[ko] = chart_file_path
return {
'table': table_file_path,
'chart': chart_dict
}
def _upload_file(self, file_path: str):
with open(file_path, 'rb') as f:
filename = os.path.basename(file_path)
contents = f.read()
res = self._tistory.apis().post().attach(filename, contents)
return res['tistory']['url']
def _upload_images(self, upload_files: Dict[str, Union[str, Union[str, dict]]]) -> dict:
self._logger.debug('ready for post')
img_url = {
'table': None,
'chart': {}
}
if 'table' in upload_files:
table = upload_files['table']
img_url['table'] = self._upload_file(table)
if 'chart' in upload_files:
chart_dict = upload_files['chart']
for ko, chart in chart_dict.items():
img_url['chart'][ko] = self._upload_file(chart)
return img_url
def run(self, parameters: Parameters) -> Union[int, None]:
self._logger.debug(f"Parameters: {parameters.to_json()}")
sector_name = parameters.sector_name
sector = parameters.sector_code
year = parameters.year
report_code = ReportCode.get_by_index(parameters.quarter)
stock_condition = parameters.stock_condition
data = self._make_data(parameters)
urls = self._upload_images(data)
subject = make_subject(sector_name, year, str(ReportCode.get_index(report_code.value)))
table = urls['table']
if stock_condition == StockCondition.UP:
condition_txt = '상위'
else:
condition_txt = '하위'
contents = Template(
title=f'{sector_name} {condition_txt} 10개 종목',
image={'name': f'{sector_name} {condition_txt} 10개 종목', 'src': table}
).make()
for ko, chart in urls['chart'].items():
contents += Template(
title=f'{sector_name}: {ko}',
image={'name': f'{sector_name}: {ko}', 'src': chart},
description=f'{ko}: is test'
).make()
post = PostData(
title=subject,
content=contents,
visibility=0
)
post_api = self._tistory.apis().post()
res = post_api.write(post)
post_dto = PostDto(title=subject, content=contents)
post_dto.sector = sector
post_dto.report_code = report_code.value
post_dto.year = year
if post_api.is_success():
post_dto.url = res['tistory']['url']
post_dto.is_success = True
else:
post_dto.url = None
post_dto.is_success = False
return self._repo.create(post_dto)
@staticmethod
def _make_parameters(rotator: RotationSector) -> Parameters:
date = datetime.now()
quarter = get_quarter(date) - 1
report_code = ReportCode.get_by_index(quarter)
sector = rotator.get_sector(str(date.year), report_code.value)
return Parameters(
sector_code=sector['code'],
sector_name=sector['name'],
year=str(date.year),
quarter=quarter,
stock_condition=sector['condition']
)
def auto(self, sector_list: List[dict] = None, rules: dict = None) -> Union[int, None]:
if sector_list is None or len(sector_list) == 0:
sector_list = self._kiwoom.get_sector_list()
rotator = RotationSector(self._repo, sector_list, rules)
parameter = self._make_parameters(rotator)
return self.run(parameter)
|
miniyus/finance-data-auto-posting
|
fdap_manage/forms.py
|
from django import forms
from abc import abstractmethod
class ConfigForm(forms.Form):
@abstractmethod
def to_dict(self) -> dict:
pass
class KoapyForm(ConfigForm):
kw_id = forms.CharField(label='koapy id')
kw_pass = forms.CharField(label='koapy pass')
def to_dict(self) -> dict:
return {
'account': {
'id': self.data['kw_id'],
'password': self.data['kw_pass']
}
}
class DataBaseForm(ConfigForm):
db_name = forms.CharField(label='db name')
db_id = forms.CharField(label='db id')
db_pass = forms.CharField(label='db pass')
dbms = 'mysql'
host = 'localhost'
def to_dict(self) -> dict:
return {
self.dbms: {
'id': self.data['db_id'],
'password': <PASSWORD>['<PASSWORD>'],
'host': self.host,
'db': self.data['db_name']
}
}
class OpenDartForm(ConfigForm):
op_key = forms.CharField(label='open dart api key')
op_url = forms.CharField(label='open dart url')
def to_dict(self) -> dict:
return {
'api': {
'url': self.data['op_url'],
'api_key': self.data['op_key']
}
}
class TistoryForm(ConfigForm):
kakao_id = forms.CharField()
kakao_pass = forms.CharField()
ts_url = forms.CharField()
client_id = forms.CharField()
client_secret = forms.CharField()
redirect_uri = forms.CharField()
blog_name = forms.CharField()
driver = forms.CharField()
confirm_btn = forms.CharField()
kakao_login_link = forms.CharField()
kakao_email_input = forms.CharField()
kakao_pass_input = forms.CharField()
kakao_login_submit = forms.CharField()
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36"
def to_dict(self) -> dict:
return {
'kakao': {
'id': self.data['kakao_id'],
'password': self.data['<PASSWORD>']
},
'api': {
'url': self.data['ts_url'],
'client_id': self.data['client_id'],
'client_secret': self.data['client_secret'],
'redirect_uri': self.data['redirect_uri'],
'response_type': 'code',
'state': self.data['blog_name'],
'blog_name': self.data['blog_name'],
'user_agent': self.USER_AGENT,
},
'webdriver': {
'driver_name': self.data['driver'],
'confirm_btn': self.data['confirm_btn'],
'kakao_login_link': self.data['kakao_login_link'],
'kakao_email_input': self.data['kakao_email_input'],
'kakao_pass_input': self.data['kak<PASSWORD>pass_input'],
'kakao_login_submit': self.data['kakao_login_submit']
}
}
def get_form(module: str, data) -> ConfigForm:
config_dict = {
'koapy': KoapyForm,
'opendart': OpenDartForm,
'database': DataBaseForm,
'tistory': TistoryForm
}
return config_dict[module](data)
|
miniyus/finance-data-auto-posting
|
fdap/contracts/core.py
|
<reponame>miniyus/finance-data-auto-posting
from abc import ABC
class Container(ABC):
def run(self, **kwargs):
pass
|
miniyus/finance-data-auto-posting
|
fdap/app/opendart/opendart_service.py
|
<reponame>miniyus/finance-data-auto-posting<filename>fdap/app/opendart/opendart_service.py
import xmltodict
import os
from fdap.app.opendart.opendart_client import OpenDartClient
from fdap.app.opendart.opendart_data import *
from fdap.utils.util import currency_to_int
from fdap.config.config import Config
from fdap.app.opendart.report_code import ReportCode
from fdap.contracts.service import Service
from typing import List, Dict, Union
class OpenDartService(Service):
"""
Open Dart Api
"""
Q1: ReportCode.Q1 = ReportCode.Q1
Q2: ReportCode.Q2 = ReportCode.Q2
Q3: ReportCode.Q3 = ReportCode.Q3
Q4: ReportCode.Q4 = ReportCode.Q4
QUARTERS: dict = ReportCode.__members__
_config: dict
_client: OpenDartClient
def __init__(self, url: str = None, api_key: str = None):
"""
:param url: open dart api access url
:param api_key: open dart api key
"""
super().__init__()
if url is None or api_key is None:
self._config = Config.OPENDART
url = self._config['api']['url']
api_key = self._config['api']['api_key']
self._client = OpenDartClient(host=url, api_key=api_key)
self._logger.info('init: %s', __name__)
def get_corp_codes(self) -> List[CorpCode]:
"""
상장기업들의 고유코드 가져오기
:return: List[CorpCode]
"""
filename = 'CORPCODE.xml'
self._logger.debug('get corp codes')
if os.path.isfile(filename):
self._logger.debug('exists CORPCODE.xml')
with open(filename, 'r', encoding='utf-8') as f:
corp_code_xml = f.read()
corp_code_dict = xmltodict.parse(corp_code_xml).get('result').get('list')
else:
self._logger.debug('request open dart get corp-codes...')
corp_code_dict = self._client.get_corp_codes()
if self._client.is_success():
self._logger.debug('success request')
else:
self._logger.warning('fail request: %s', self._client.get_error())
return CorpCode().map_list(corp_code_dict)
def get_corp_code_by_stock_code(self, stock_code: str) -> Union[CorpCode, None]:
for corp_code in self.get_corp_codes():
if stock_code == corp_code.stock_code:
return corp_code
return None
def get_single(self, corp_code: str, year: str, report_code: ReportCode) -> Union[Dict[str, AcntCollection], None]:
self._logger.debug('request get single corp accounts')
self._logger.debug(f"corp_code: {corp_code}, year: {year}, report_code: {report_code.value}")
single_acnt = self._client.get_single(corp_code, year, report_code.value)
if self._client.is_success():
self._logger.debug('success request')
self._logger.debug('corp_code: ' + corp_code)
else:
self._logger.warning('fail request: %s', self._client.get_error())
single = None
if isinstance(single_acnt, dict):
if 'list' in single_acnt:
single = self._div_by_stock(Acnt().map_list(single_acnt['list']))
return single
def get_recent(self, corp_code: str, year: str, report_code: ReportCode) -> Union[Dict[str, AcntCollection], None]:
single = self.get_single(corp_code, year, report_code)
if single is None:
max_count = 8
count = 0
while count < max_count:
re_rc = ReportCode.sub(report_code.value, 1)
if ReportCode.Q4 == re_rc:
re_year = int(year) - 1
else:
re_year = year
single = self.get_single(corp_code, str(re_year), re_rc)
if single is not None:
return single
count += 1
return single
def get_multi(self, corp_codes: list, year: str, report_code: ReportCode) -> Dict[str, AcntCollection]:
self._logger.debug('request get multiple corp accounts')
multi = {}
for corp_code in corp_codes:
single = self.get_single(corp_code, year, report_code)
if single is not None:
multi.update(single)
return multi
def get_deficit_count(self, corp_code: str, year: str, count: int = 3):
deficit_count = 0
for i in range(count):
for q in self.QUARTERS.values():
acnt = self.get_single(corp_code, str(int(year) - i), q)
if acnt is not None:
for acnt_collect in acnt.values():
account = acnt_collect.get_by_account_nm('당기순')
if account is not None:
if account.thstrm_amount is not None:
if currency_to_int(account.thstrm_amount) < 0:
deficit_count += 1
return deficit_count
@staticmethod
def _div_by_stock(multi_acnt: List[Acnt]) -> Dict[str, AcntCollection]:
rs_dict = {}
for acnt in multi_acnt:
if acnt.stock_code:
if acnt.stock_code not in rs_dict:
rs_dict[acnt.stock_code] = AcntCollection()
rs_dict[acnt.stock_code].push(acnt)
return rs_dict
|
miniyus/finance-data-auto-posting
|
fdap/app/opendart/finance_data.py
|
import dataclasses
from fdap.app.opendart.opendart_service import OpenDartService
from fdap.utils.data import BaseData
from typing import Dict, Union
from fdap.app.opendart.opendart_data import AcntCollection
from fdap.utils.customlogger import CustomLogger
from fdap.utils.util import currency_to_int
@dataclasses.dataclass
class FinanceData(BaseData):
date: str = None
reprt_code: str = None
current_assets: int = 0
total_assets: int = 0
floating_debt: int = 0
total_debt: int = 0
total_capital: int = 0
net_income: int = 0
deficit_count: int = 0
flow_rate: float = 0.0
debt_rate: float = 0.0
pbr: float = 0.0
per: float = 0.0
roe: float = 0.0
@staticmethod
def get_map_table() -> Dict[str, Dict[str, str]]:
return {
'account_nm': {
'current_assets': '유동자산',
'total_assets': '자산총계',
'floating_debt': '유동부채',
'total_debt': '부채총계',
'total_capital': '자본총계',
'net_income': '당기순이익'
}
}
def map(self, acnt: AcntCollection) -> __name__:
"""
:param acnt: List[Acnt]
:return FinanceData:
"""
for key, name in self.get_map_table()['account_nm'].items():
account = acnt.get_by_account_nm(account_nm=name, fs_div='CFS')
if account is None:
account = acnt.get_by_account_nm(account_nm=name, fs_div='OFS')
if account is not None:
self.date = account.thstrm_dt
self.reprt_code = account.reprt_code
self.__setattr__(key, currency_to_int(account.thstrm_amount))
if '당기순' in account.account_nm.replace(' ', ''):
od_service = OpenDartService()
corp_code = od_service.get_corp_code_by_stock_code(account.stock_code)
self.deficit_count = od_service.get_deficit_count(corp_code.corp_code, account.bsns_year)
return self
@staticmethod
def __logger():
return CustomLogger.logger('automatic-posting', __name__)
def calculate_flow_rate(self):
"""
유동 비율: (유동자산 / 유동부채) * 100
Returns:
FinanceData:
"""
try:
self.flow_rate = round(self.current_assets / self.floating_debt * 100, 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug('flow_rate:{} / {} * 100'.format(self.current_assets, self.floating_debt))
self.flow_rate = 0.0
return self
def calculate_debt_rate(self):
"""
부채비율: (부채총계 / 자산총계) * 100
Returns:
FinanceData:
"""
try:
self.debt_rate = round(self.total_debt / self.total_assets * 100, 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug('debt_rate:{} / {} * 100'.format(self.total_debt, self.total_assets))
self.debt_rate = 0.0
return self
def get_eps(self, issue_cnt: int) -> Union[int, float]:
"""
EPS: 당기순이익 / 발행주식수
Args:
issue_cnt: 발행 주식 수
Returns:
Union[int, float]:
"""
try:
return round(self.net_income / issue_cnt, 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug('eps: {} / {}'.format(self.net_income, issue_cnt))
return 0
def calculate_per(self, current_price: int, issue_cnt: int):
"""
PER: 주가 / EPS(순이익/발행주식수)
Args:
current_price: 현재가(주가)
issue_cnt: 발행주식수
Returns:
"""
try:
self.per = round(current_price / self.get_eps(issue_cnt), 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug('per:{} / ({} / {})'.format(current_price, self.net_income, issue_cnt))
self.per = 0.0
return self
def get_bps(self, issue_cnt) -> Union[int, float]:
"""
BPS: (자산총계 - 부채총계) / 발행주식수
Args:
issue_cnt:
Returns:
Union[int, float]
"""
try:
return round((self.current_assets - self.total_debt) / issue_cnt, 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug('bps:({} - {}) / {})'.format(self.current_assets, self.total_debt, issue_cnt))
def calculate_pbr(self, current_price: int, issue_cnt: int):
"""
PBR: 주가 / BPS((자산총계 - 부채총계) / 발행주식수)
Args:
current_price:
issue_cnt:
Returns:
"""
try:
self.pbr = round(current_price / self.get_bps(issue_cnt), 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug(
'pbr:{} / (({} - {}) / {})'.format(current_price, self.current_assets, self.total_debt, issue_cnt))
self.pbr = 0.0
return self
def calculate_roe(self, current_price: int, issue_cnt: int):
"""
ROE: PBR / PER
Returns:
"""
try:
if self.pbr and self.per:
self.roe = round(self.pbr / self.per, 2)
else:
self.calculate_per(current_price, issue_cnt)
self.calculate_pbr(current_price, issue_cnt)
self.roe = round(self.pbr / self.per, 2)
except ZeroDivisionError:
logger = self.__logger()
logger.debug(
'roe:({} / {}) * 100'.format(self.net_income, self.total_capital))
self.roe = 0.0
return self
|
miniyus/finance-data-auto-posting
|
fdap/app/opendart/report_code.py
|
<reponame>miniyus/finance-data-auto-posting
from enum import Enum, unique
@unique
class ReportCode(Enum):
Q1 = '11013'
Q2 = '11012'
Q3 = '11014'
Q4 = '11011'
@classmethod
def get_by_str(cls, q: str) -> __name__:
for k, v in cls.__members__.items():
if q == v.value:
return v
@classmethod
def get_by_index(cls, i: int) -> __name__:
for k, v in enumerate(cls.__members__.values()):
if k + 1 == i:
return v
@classmethod
def get_index(cls, q: str) -> int:
for idx, v in enumerate(cls.__members__.values()):
if v.value == q:
return idx + 1
@classmethod
def sub(cls, quarter: str, sub_num: int) -> __name__:
has_attr = False
for q in cls.__members__.values():
if quarter == q.value:
has_attr = True
if has_attr is False:
raise AttributeError(quarter + ' has not attribute')
if sub_num >= 4:
remainder = sub_num % 4
return cls.sub(quarter, remainder)
member_list = list(cls.__members__)
rs = 0
for idx, member in enumerate(member_list):
if cls.__members__[member].value == quarter:
if idx == 0:
if sub_num != 0:
tmp = sub_num - 1
rs = (len(cls.__members__) - 1) - tmp
else:
rs = idx
else:
rs = idx - sub_num
print(rs)
return cls.__members__[member_list[rs]]
|
miniyus/finance-data-auto-posting
|
fdap/contracts/blog_client.py
|
from abc import ABC, abstractmethod
from dataclasses import dataclass
from fdap.app.client.client import Client
from typing import Dict
@dataclass(frozen=False)
class BlogPostData(ABC):
title: str
content: str
@dataclass(frozen=True)
class BlogLoginInfo(ABC):
login_id: str
login_password: str
class BlogResource(ABC, Client):
access_token: str = None
_resource: str
_config: Dict[str, any]
def __init__(self, host: str, config: Dict[str, any]):
super().__init__(host)
self._config = config
def set_access_token(self, access_token: str):
self.access_token = access_token
class BlogPost(BlogResource):
@abstractmethod
def list(self):
pass
@abstractmethod
def write(self, data: BlogPostData):
pass
@abstractmethod
def modify(self, data: BlogPostData):
pass
@abstractmethod
def attach(self, filename: str, content: str):
pass
class BlogEndPoint(ABC, Client):
_end_point: str
_resources: Dict[type, BlogResource] = {}
_classes: Dict[str, type] = {}
_config: Dict[str, any] = {}
access_token: str = None
def __init__(self, host: str, config: dict):
super().__init__(host)
self._config = config
def set_resource(self, name: type, res: BlogResource):
self._resources[name] = res
return self
def get_resource(self, name: type):
return self._resources[name]
def set_access_token(self, access_token: str):
self.access_token = access_token
for cls in self._classes.values():
resource = self.get_resource(cls)
if isinstance(resource, BlogResource):
resource.set_access_token(self.access_token)
class BlogClient(ABC, Client):
_end_points: Dict[type, BlogEndPoint] = {}
_classes: Dict[str, type] = {}
_config: Dict[str, any] = {}
access_token: str
def __init__(self, host: str, config: dict):
super().__init__(host)
self._config = config
@abstractmethod
def login(self, obj: object):
pass
def set_end_point(self, name: type, end_point: BlogEndPoint):
self._end_points[name] = end_point
return self
def get_end_point(self, name: type = None):
if name is None:
return self._end_points
if name in self._end_points:
return self._end_points[name]
return None
def set_access_token(self, access_token: str):
self.access_token = access_token
for cls in self._classes.values():
end_point = self.get_end_point(cls)
if isinstance(end_point, BlogEndPoint):
end_point.set_access_token(self.access_token)
class BlogLogin(BlogEndPoint):
@abstractmethod
def login(self, login_info: BlogLoginInfo):
pass
|
miniyus/finance-data-auto-posting
|
fdap/app/autopost/parameters.py
|
<reponame>miniyus/finance-data-auto-posting
from fdap.app.autopost.rotation import StockCondition
from fdap.utils.data import BaseData
from dataclasses import dataclass
@dataclass
class Parameters(BaseData):
sector_name: str
sector_code: str
year: str
quarter: int
stock_condition: StockCondition
|
miniyus/finance-data-auto-posting
|
fdap/config/config.py
|
<filename>fdap/config/config.py
from fdap.utils.util import config_json
class Config:
DATABASE: dict = config_json('database')
KOAPY: dict = config_json('koapy')
LOGGER: dict = config_json('logger')
OPENDART: dict = config_json('opendart')
TISTORY: dict = config_json('tistory')
@classmethod
def all(cls) -> dict:
return {
'database': cls.DATABASE,
'koapy': cls.KOAPY,
'logger': cls.LOGGER,
'opendart': cls.OPENDART,
'tistory': cls.TISTORY
}
@classmethod
def get(cls, name: str) -> dict:
return cls.all()[name]
|
miniyus/finance-data-auto-posting
|
fdap/app/tistory/webdriver.py
|
from fdap.contracts.blog_client import BlogLoginInfo
from fdap.contracts.webdriver import WebDriverHandler as Handler
from fdap.utils.loggeradapter import LoggerAdapter
from fdap.utils.util import get_query_str_dict
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.remote.webdriver import WebDriver
from dataclasses import dataclass
import time
import selenium.common.exceptions
@dataclass()
class CustomElement:
by: str
value: str
action: str
func: str = 'find_element'
class WebDriverController:
_history: list
_web_driver: WebDriver
def __init__(self, driver: WebDriver):
self._web_driver = driver
def add_element(self, element: CustomElement):
class_method = getattr(WebDriver, element.func)
obj = class_method(self._web_driver, element.by, element.value)
if isinstance(obj, WebElement):
obj_method = getattr(obj, element.action)
self._history.append(obj_method(obj))
return self
def add(self, func_name, **kwargs):
class_method = getattr(WebDriver, func_name)
self._history.append(class_method(self._web_driver, **kwargs))
return self
def get_history(self):
return self._history
class CustomHandler(Handler):
TYPES = [
'element',
'function'
]
FUNCS = [
'find_element',
'get'
]
_dynamic: list = [
{
'type': 'element',
'name': 'find_element',
'by': 'css selector',
'value': '.confirm',
'action': 'click'
},
{
'type': 'function',
'name': 'get',
'args': {
'url': 'url'
}
}
]
def __init__(
self,
web_driver: WebDriver,
logger: LoggerAdapter,
config_list: list = None,
):
super().__init__(web_driver, logger)
self._dynamic = config_list
def handle(self):
controller = WebDriverController(self._web_driver)
for item in self._dynamic:
if item['name'] not in self.FUNCS:
self._logger.warning(f"{item['name']} is not support function")
continue
if item['type'] not in self.TYPES:
self._logger.warning(f"{item['type']} is not support type")
continue
elif item['type'] == 'element':
controller.add_element(
CustomElement(
by=item['by'],
value=item['value'],
action=item['action'],
func=item['name']
)
)
else:
controller.add(func_name=item['name'], **item['args'])
return controller.get_history()
class WebDriverHandler(Handler):
_web_driver: WebDriver
_logger: LoggerAdapter
_config: dict
_login_data: BlogLoginInfo
def handle(self):
try:
element = self._web_driver.find_element(By.CSS_SELECTOR, self._config['confirm_btn'])
element.click()
except selenium.common.exceptions.NoSuchElementException as e:
self._logger.warning('No Such Element 1: confirm_btn')
self._logger.warning(e.msg)
try:
self._web_driver.find_element(By.CSS_SELECTOR, self._config['kakao_login_link']).click()
self._logger.info('redirect kakao login: ' + self._web_driver.current_url)
except selenium.common.exceptions.NoSuchElementException as e:
self._logger.warning('fail redirect kakao login: ' + self._web_driver.current_url)
self._logger.warning(e.msg)
try:
self._web_driver.get(self._web_driver.current_url)
self._logger.info('request: ' + self._web_driver.current_url)
except selenium.common.exceptions.NoSuchElementException as e:
self._logger.warning(e.stacktrace)
self._logger.info('sleep 3s')
time.sleep(3)
try:
self._web_driver.find_element(By.CSS_SELECTOR, self._config['kakao_email_input']) \
.send_keys(self._login_data.login_id)
self._logger.info('input email')
time.sleep(1)
self._web_driver.find_element(By.CSS_SELECTOR, self._config['kakao_pass_input']) \
.send_keys(self._login_data.login_password)
self._logger.info('input password')
self._web_driver.find_element(By.CSS_SELECTOR, self._config['kakao_login_submit']).click()
self._logger.info('submit login form')
self._logger.info('sleep 3s')
time.sleep(3)
except selenium.common.exceptions.NoSuchElementException as e:
self._logger.warning(e.msg)
try:
self._web_driver.find_element(By.CSS_SELECTOR, self._config['confirm_btn']).click()
self._logger.info('success login: ' + self._web_driver.current_url)
except selenium.common.exceptions.NoSuchElementException as e:
self._logger.warning('fail login: ' + self._web_driver.current_url)
url = self._web_driver.current_url
self._logger.info('close webdriver')
return get_query_str_dict(url)
|
miniyus/finance-data-auto-posting
|
fdap/core/containter.py
|
from dependency_injector import containers, providers
from fdap.app.kiwoom.kiwoom_service import KiwoomService
from fdap.app.opendart.opendart_service import OpenDartService
from fdap.app.refine.refine import Refine
from fdap.database.database import db_session
from fdap.app.repositories.post_repository import PostsRepository
from fdap.app.tistory.tistory_client import TistoryClient, LoginInfo
from fdap.app.autopost.autopost import AutoPost
class Container(containers.DeclarativeContainer):
config = providers.Configuration()
# database
database = providers.Singleton(db_session)
# Services
kiwoom_service = providers.Singleton(
KiwoomService,
_id=config.koapy.account.id,
password=<PASSWORD>
)
opendart_service = providers.Singleton(
OpenDartService,
url=config.opendart.api.url,
api_key=config.opendart.api.api_key
)
refine = providers.Singleton(
Refine
)
post_repository = providers.Singleton(
PostsRepository,
session=database
)
tistory_login_info = providers.Singleton(
LoginInfo,
client_id=config.tistory.api.client_id,
client_secret=config.tistory.api.client_secret,
redirect_uri=config.tistory.api.redirect_uri,
response_type=config.tistory.api.response_type,
login_id=config.tistory.kakao.id,
login_password=<PASSWORD>,
state=config.tistory.api.state
)
tistory_client = providers.Singleton(
TistoryClient,
host=config.tistory.api.url,
config=config.tistory
)
auto_post = providers.Singleton(
AutoPost,
kiwoom=kiwoom_service,
opendart=opendart_service,
refine=refine,
tistory=tistory_client,
repo=post_repository
)
|
miniyus/finance-data-auto-posting
|
fdap/app/tistory/tistory_client.py
|
<filename>fdap/app/tistory/tistory_client.py
import requests
from typing import Union
from fdap.utils.util import make_url
from selenium import webdriver
from fdap.definitions import ROOT_DIR
from fdap.utils.customlogger import CustomLogger
from fdap.app.tistory.tistory_data import *
from fdap.app.tistory.webdriver import WebDriverHandler
from fdap.contracts.blog_client import *
class TistoryLogin(BlogLogin):
_end_point = '/oauth'
def __init__(self, host, config: dict):
super().__init__(host=host, config=config['webdriver'])
self._logger = CustomLogger.logger('automatic-posting', __name__)
def login(self, login_info: LoginInfo) -> Union[None, dict]:
res = self._authorize(login_info)
if 'code' in res:
self._logger.info('code: ' + res['code'])
req = AccessTokenRequest(
client_id=login_info.client_id,
client_secret=login_info.client_secret,
redirect_uri=login_info.redirect_uri,
code=res['code']
)
token = self._access_token(req)
else:
self._logger.warning('fail issue token')
return None
[name, token] = token.split('=')
self._logger.debug(name + ': ' + token)
self.access_token = token
return {name: self.access_token}
def _access_token(self, req: AccessTokenRequest):
self._logger.info('request access_token')
method = self._end_point + '/access_token'
url = make_url(self.get_host(), method, {
'client_id': req.client_id,
'client_secret': req.client_secret,
'redirect_uri': req.redirect_uri,
'code': req.code,
'grant_type': req.grant_type
})
self._logger.debug(url)
return self._set_response(requests.get(url, verify=False))
def _authorize(self, login_info: LoginInfo):
options = webdriver.ChromeOptions()
# options.add_argument('--headless')
web_driver = webdriver.Chrome(executable_path=ROOT_DIR + '\\' + self._config['driver_name'],
chrome_options=options)
handler = WebDriverHandler(
web_driver,
self._logger,
self._config,
login_info
)
url = self.get_host()
method = self._end_point + '/authorize'
url = make_url(url, method, {
'client_id': login_info.client_id,
'redirect_uri': login_info.redirect_uri,
'response_type': login_info.response_type,
'state': login_info.state
})
return handler.run(url)
class Post(BlogPost):
blog_name: str
_resource: str = '/post'
_user_agent: Dict[str, str] = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36'
}
def __init__(self, host: str, config: Dict[str, any]):
super().__init__(host=host, config=config['api'])
self.blog_name = self._config['blog_name']
if self._config['user_agent'] is not None:
self._user_agent = {'User-Agent': self._config['user_agent']}
self._logger = CustomLogger.logger('automatic-posting', __name__)
def list(self, page: int = 1):
method = self._resource + '/list'
url = make_url(self.get_host(), method, {
'access_token': self.access_token,
'blogName': self.blog_name,
'output': 'json',
'page': page
})
self._logger.debug(url)
return self._set_response(requests.get(url, verify=False))
def read(self, post_id: int):
method = self._resource + '/read'
url = make_url(self.get_host(), method, {
'access_token': self.access_token,
'blogName': self.blog_name,
'postId': post_id
})
return self._set_response(requests.get(url, verify=False))
def write(self, post: BlogPostData):
method = self._resource + '/write'
post_data = post.__dict__
post_data.update({
'access_token': self.access_token,
'blogName': self.blog_name,
'output': 'json'
})
url = make_url(self.get_host(), method)
return self._set_response(requests.post(url, data=post_data, headers=self._user_agent, verify=False))
def modify(self, obj: BlogPostData):
pass
def attach(self, filename: str, contents: bytes):
method = self._resource + '/attach'
files = {'uploadedfile': contents}
url = make_url(self.get_host(), method, {
'access_token': self.access_token,
'blogName': self.blog_name,
'output': 'json'
})
return self._set_response(requests.post(url, files=files, headers=self._user_agent, verify=False))
class Apis(BlogEndPoint):
blog_name: str
_end_point = '/apis'
_classes = {
'post': Post
}
def __init__(self, host, config: Dict[str, any]):
super().__init__(host, config)
for name, cls in self._classes.items():
res = cls(host + self._end_point, config)
self.set_resource(res.__class__, res)
def set_post(self, post_api: Post):
self.set_resource(post_api.__class__, post_api)
return self
def post(self) -> Post:
return self.get_resource(self._classes['post'])
class TistoryClient(BlogClient):
_config: dict
# 의존성 바인딩
# 정의된 클래스가 아니면 가져올 수 없다.
# 해당 프로퍼티를 수정하여 다형성을 만족시킬 수 있다.
_classes: Dict[str, type] = {
'login': TistoryLogin,
'apis': Apis
}
access_token: str = None
def __init__(self, host: str, config: Dict[str, any]):
super().__init__(host=host, config=config)
self._config = config
self._logger = CustomLogger.logger('automatic-posting', __name__)
for name, cls in self._classes.items():
end_point = cls(host, config)
self.set_end_point(end_point.__class__, end_point)
def set_login(self, login: TistoryLogin):
return self.set_end_point(login.__class__, login)
def login(self, login_info: LoginInfo) -> Union[Dict[str, str], None]:
login = self.get_end_point(self._classes['login'])
if isinstance(login, BlogLogin):
login.login(login_info)
self.set_access_token(login.access_token)
return {'access_token': self.access_token}
return None
def set_apis(self, apis: Apis):
return self.set_end_point(apis.__class__, apis)
def apis(self) -> Apis:
return self.get_end_point(self._classes['apis'])
|
miniyus/finance-data-auto-posting
|
fdap/app/refine/__init__.py
|
# Resolve dependency injection
# use: from fdap.app.refine import Refine
def service():
from fdap.app.refine.refine import Refine
return Refine()
|
miniyus/finance-data-auto-posting
|
fdap/app/autopost/__init__.py
|
# Resolve dependency injection
# use: from fdap.app.autopost import AutoPost
def service():
from fdap.app.autopost.autopost import AutoPost
from fdap.app.kiwoom import service as kiwoom
from fdap.app.opendart import service as opendart
from fdap.app.refine import service as refine
from fdap.app.tistory import service as tistory
from fdap.app.repositories.post_repository import PostsRepository
return AutoPost(
kiwoom=kiwoom(),
opendart=opendart(),
refine=refine(),
tistory=tistory(),
repo=PostsRepository()
)
|
miniyus/finance-data-auto-posting
|
fdap_manage/models.py
|
<gh_stars>0
from django.db import models
# Create your models here.
class Posts(models.Model):
post_id = models.AutoField(primary_key=True)
post_subject = models.CharField(max_length=255, null=False)
post_contents = models.TextField(null=False)
post_category = models.CharField(max_length=255, null=True)
post_tags = models.CharField(max_length=255, null=True)
post_sector = models.CharField(max_length=255, null=False)
post_year = models.CharField(max_length=255, null=False)
report_code = models.CharField(max_length=10, null=False)
stock_condition = models.IntegerField(default=1, null=True)
is_success = models.BooleanField(default=False, null=False)
post_url = models.CharField(max_length=255, null=True)
created_at = models.DateTimeField(null=True)
|
miniyus/finance-data-auto-posting
|
fdap/app/client/client.py
|
<reponame>miniyus/finance-data-auto-posting
import requests
import json
from typing import Union
from fdap.contracts.logging import Logging
class Client(Logging):
"""
requests wrapper
"""
def __init__(self, host: str = None):
"""
Args:
host(str): 요청할 웹 서버의 호스트정보(URL), ex) https://www.example.com
"""
super().__init__()
self._host = host
self._response = None
self._error = {}
def is_success(self) -> bool:
"""
응답의 성공여부 확인
Returns:
bool: 응답 상태 코드가 200번대이면 True, 아니면 False
"""
if self._response is None:
return False
elif isinstance(self._response, requests.Response):
if self._response.status_code == requests.codes.ok:
return True
return False
def _set_response(self, response: requests.Response) -> Union[str, dict]:
"""
응답 객체 결과 등록 및 에러 유무 체크
Args:
response(requests.Response): requests를 통해 받은 응답을 현재 wrapper에 등록하여 에러 체크 및 응답 결과를 json 혹은 text로 변환
Returns:
Union[str, dict]
"""
self._response = response
if self.is_success():
try:
return self._response.json()
except json.decoder.JSONDecodeError:
return self._response.text
else:
self._error['status_code'] = self._response.status_code
self._error['message'] = self._response.text
return self._response.text
def get_error(self) -> dict:
"""
_set_response 메서드에서 등록된 에러를 출력
Returns:
dict: 'status_code': 응답 코드, 'message': 응답 메세지
"""
return self._error
def get_response(self) -> requests.Response:
"""
requests 패키지에서 받은 응답 객체 가져오기
Returns:
requests.Response: 응답 객체 원본
"""
return self._response
def get_host(self) -> str:
"""
host 가져오기 getter
Returns:
str: ex) https://www.example.com
"""
return self._host
def set_host(self, host: str):
"""
Args:
host(str): 호스트 정보 입력 setter
Returns:
Client:
"""
self._host = host
return self
|
AndyKhang404/noimport
|
noimport.py
|
import sys
#No importing
sys.path = []
sys.meta_path = []
for key in sys.modules.keys():
sys.modules[key] = None
sys.builtin_module_names = []
sys.path_hooks = []
for key in sys.path_importer_cache.keys():
sys.path_importer_cache[key] = None
|
UBC-MDS/game_sales
|
src/platforms.py
|
import altair as alt
import pandas as pd
import numpy as np
alt.data_transformers.disable_max_rows()
sales = pd.read_csv("data/raw/vgsales.csv")
def plotPlatforms(top_val, daterange):
top_platforms = sales["Platform"].value_counts().head(int(top_val)).index.tolist()
top_platforms_df = sales.query("Platform in @top_platforms").head(4900)
top_platforms_df = top_platforms_df.loc[(top_platforms_df['Year'] > daterange[0]) & (top_platforms_df['Year'] < daterange[1])]
chart = alt.Chart(top_platforms_df, title='Game Copies Sold by Platform').mark_bar().encode(
x=alt.X("count()", title='Copies Sold (M)'),
y=alt.Y("Platform", sort="-x")
)
return chart.to_html()
|
SimonBoothroyd/beflow
|
beflow/services/coordinator/models.py
|
from typing import List, Optional
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from pydantic import BaseModel, Field
from beflow.services.coordinator.stages import StageType
from beflow.utilities.typing import Status
class CoordinatorGETStageStatus(BaseModel):
stage_type: str = Field(..., description="")
stage_status: Status = Field(..., description="")
stage_error: Optional[str] = Field(..., description="")
stage_ids: Optional[List[str]] = Field(..., description="")
@classmethod
def from_stage(cls, stage: StageType):
stage_ids = stage.id if hasattr(stage, "id") else stage.ids
if isinstance(stage_ids, str):
stage_ids = [stage_ids]
elif isinstance(stage_ids, dict):
stage_ids = [
stage_id
for dict_values in stage_ids.values()
for stage_id in dict_values
]
return CoordinatorGETStageStatus(
stage_type=stage.type,
stage_status=stage.status,
stage_error=stage.error,
stage_ids=stage_ids,
)
class CoordinatorGETResponse(BaseModel):
optimization_id: str = Field(
..., description="The ID associated with the bespoke optimization."
)
smiles: str = Field(..., description="")
stages: List[CoordinatorGETStageStatus] = Field(..., description="")
@classmethod
def from_task(cls, task: "CoordinatorTask"):
stages = [
CoordinatorGETStageStatus.from_stage(stage)
for stage in task.pending_stages + task.completed_stages
]
if task.running_stage is not None:
stages.insert(
len(task.pending_stages),
CoordinatorGETStageStatus.from_stage(task.running_stage),
)
return CoordinatorGETResponse(
optimization_id=task.id,
smiles=task.input_schema.smiles,
stages=stages,
)
class CoordinatorPOSTBody(BaseModel):
input_schema: BespokeOptimizationSchema = Field(..., description="")
class CoordinatorPOSTResponse(BaseModel):
optimization_id: str = Field(
..., description="The ID associated with the optimization."
)
class CoordinatorTask(BaseModel):
"""An internal model that tracks a task (i.e. a bespoke optimization) that is being
executed by the executor.
"""
id: str = Field(..., description="The unique ID associated with this task.")
input_schema: BespokeOptimizationSchema = Field(..., description="")
pending_stages: List[StageType] = Field(..., description="")
running_stage: Optional[StageType] = Field(None, description="")
completed_stages: List[StageType] = Field([], description="")
@property
def status(self) -> Status:
if any(stage.status == "errored" for stage in self.completed_stages):
return "errored"
if len(self.pending_stages) > 0 or self.running_stage is not None:
return "running"
return "success"
|
SimonBoothroyd/beflow
|
beflow/tests/services/qcgenerator/test_app.py
|
from typing import List
import numpy
import pytest
from celery.result import AsyncResult
from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask
from openff.toolkit.topology import Molecule
from pydantic import parse_raw_as
from qcelemental.models import AtomicResult, AtomicResultProperties, DriverEnum
from qcelemental.models.common_models import Model, Provenance
from beflow.services.qcgenerator import worker
from beflow.services.qcgenerator.app import _retrieve_qc_result
from beflow.services.qcgenerator.models import (
QCGeneratorGETResponse,
QCGeneratorPOSTBody,
QCGeneratorPOSTResponse,
)
from beflow.tests.mocking.celery import mock_celery_task
@pytest.fixture()
def mock_atomic_result() -> AtomicResult:
molecule: Molecule = Molecule.from_smiles("C")
molecule.generate_conformers(n_conformers=1)
return AtomicResult(
molecule=molecule.to_qcschema(),
driver=DriverEnum.hessian,
model=Model(method="rdkit", basis=None),
return_result=5.2,
success=True,
provenance=Provenance(creator="pytest"),
properties=AtomicResultProperties(),
)
@pytest.mark.parametrize(
"task_status, task_result, expected_state",
[
("PENDING", {}, "waiting"),
("STARTED", {}, "running"),
("FAILURE", {"error_message": "error"}, "errored"),
],
)
def test_retrieve_qc_result_pending_running_errored(
redis_connection, monkeypatch, task_status, task_result, expected_state
):
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": task_status, "result": task_result},
)
redis_connection.hset("qcgenerator:types", "1", "torsion1d")
result = QCGeneratorGETResponse.parse_obj(_retrieve_qc_result("1", True))
assert result.qc_calc_status == expected_state
assert result.qc_calc_result is None
assert result.qc_calc_type == "torsion1d"
assert result.qc_calc_id == "1"
def test_retrieve_qc_result_success(
qcgenerator_client, redis_connection, monkeypatch, mock_atomic_result
):
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": "SUCCESS", "result": mock_atomic_result.json()},
)
redis_connection.hset("qcgenerator:types", "1", "hessian")
result = QCGeneratorGETResponse.parse_obj(_retrieve_qc_result("1", True))
assert result.qc_calc_status == "success"
assert result.qc_calc_result is not None
assert result.qc_calc_type == "hessian"
assert result.qc_calc_id == "1"
assert result.qc_calc_result.driver == DriverEnum.hessian
assert numpy.isclose(result.qc_calc_result.return_result, 5.2)
def test_get_qc_result(
qcgenerator_client, redis_connection, monkeypatch, mock_atomic_result
):
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": "SUCCESS", "result": mock_atomic_result.json()},
)
redis_connection.hset("qcgenerator:types", "1", "hessian")
request = qcgenerator_client.get("/qc-calc/1")
request.raise_for_status()
result = QCGeneratorGETResponse.parse_raw(request.text)
assert result.qc_calc_status == "success"
assert result.qc_calc_result is not None
assert result.qc_calc_type == "hessian"
assert result.qc_calc_id == "1"
assert result.qc_calc_result.driver == DriverEnum.hessian
assert numpy.isclose(result.qc_calc_result.return_result, 5.2)
@pytest.mark.parametrize(
"task, compute_function",
[
(
Torsion1DTask(
smiles="[CH2:1][CH2:2]",
central_bond=(1, 2),
program="rdkit",
model=Model(method="uff", basis=None),
),
"compute_torsion_drive",
),
(
OptimizationTask(
smiles="[CH2:1][CH2:2]",
n_conformers=1,
program="rdkit",
model=Model(method="uff", basis=None),
),
"compute_optimization",
),
(
HessianTask(
smiles="[CH2:1][CH2:2]",
program="rdkit",
model=Model(method="uff", basis=None),
),
"compute_hessian",
),
],
)
def test_post_qc_result(
qcgenerator_client, redis_connection, monkeypatch, task, compute_function
):
submitted_task_kwargs = mock_celery_task(worker, compute_function, monkeypatch)
request = qcgenerator_client.post(
"/qc-calc", data=QCGeneratorPOSTBody(input_schema=task).json()
)
request.raise_for_status()
assert submitted_task_kwargs["task_json"] == task.json()
assert redis_connection.hget("qcgenerator:types", "1").decode() == task.type
result = QCGeneratorPOSTResponse.parse_raw(request.text)
assert result.qc_calc_id == "1"
@pytest.mark.parametrize("include_result", [True, False])
def test_get_qc_results(
qcgenerator_client,
redis_connection,
monkeypatch,
mock_atomic_result,
include_result,
):
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": "SUCCESS", "result": mock_atomic_result.json()},
)
redis_connection.hset("qcgenerator:types", "1", "hessian")
redis_connection.hset("qcgenerator:types", "2", "hessian")
request = qcgenerator_client.get(
f"/qc-calcs?ids=1&ids=2&results={str(include_result).lower()}"
)
request.raise_for_status()
results = parse_raw_as(List[QCGeneratorGETResponse], request.text)
assert len(results) == 2
for i, result in enumerate(results):
assert result.qc_calc_status == "success"
assert (result.qc_calc_result is not None) == include_result
assert result.qc_calc_type == "hessian"
assert result.qc_calc_id == f"{i + 1}"
|
SimonBoothroyd/beflow
|
beflow/utilities/depiction.py
|
import functools
from typing import Optional, Tuple
IMAGE_UNAVAILABLE_SVG = """
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" style="isolation:isolate" viewBox="0 0 200 200" width="200pt" height="200pt"><defs><clipPath id="_clipPath_eSdCSpw1sB1xWp7flmMoZ0WjTPwPpzQh"><rect width="200" height="200"/></clipPath></defs><g clip-path="url(#_clipPath_eSdCSpw1sB1xWp7flmMoZ0WjTPwPpzQh)"><g clip-path="url(#_clipPath_LvpdWbrYj1cREqoXz8Lwbk3ZilfC6tg9)"><text transform="matrix(1,0,0,1,44.039,91.211)" style="font-family:'Open Sans';font-weight:400;font-size:30px;font-style:normal;fill:#000000;stroke:none;">Preview</text><text transform="matrix(1,0,0,1,17.342,132.065)" style="font-family:'Open Sans';font-weight:400;font-size:30px;font-style:normal;fill:#000000;stroke:none;">Unavailable</text></g><defs><clipPath id="_clipPath_LvpdWbrYj1cREqoXz8Lwbk3ZilfC6tg9"><rect x="0" y="0" width="166" height="81.709" transform="matrix(1,0,0,1,17,59.146)"/></clipPath></defs></g></svg>
"""
def _oe_smiles_to_image(smiles: str, highlight_atoms: Tuple[int]) -> str:
from openeye import oechem, oedepict
image = oedepict.OEImage(200, 200)
opts = oedepict.OE2DMolDisplayOptions(200, 200, oedepict.OEScale_AutoScale)
opts.SetAromaticStyle(oedepict.OEAromaticStyle_Circle)
opts.SetMargins(5)
oe_molecule = oechem.OEGraphMol()
oechem.OESmilesToMol(oe_molecule, smiles)
highlighted_atoms = [
atom
for atom in oe_molecule.GetAtoms()
if atom.GetMapIdx() > 0 and atom.GetMapIdx() in highlight_atoms
]
for atom in oe_molecule.GetAtoms():
atom.SetMapIdx(0)
oedepict.OEPrepareDepiction(oe_molecule)
display = oedepict.OE2DMolDisplay(oe_molecule, opts)
highlighted_set = oechem.OEAtomBondSet()
for atom in highlighted_atoms:
highlighted_set.AddAtom(atom)
oedepict.OEAddHighlighting(
display,
oechem.OEColor(oechem.OELimeGreen),
oedepict.OEHighlightStyle_BallAndStick,
highlighted_set,
)
oedepict.OERenderMolecule(image, display)
svg_content = oedepict.OEWriteImageToString("svg", image)
return svg_content.decode()
def _rd_smiles_to_image(smiles: str, highlight_atoms: Tuple[int]) -> str:
from rdkit import Chem
from rdkit.Chem.Draw import rdMolDraw2D
smiles_parser = Chem.rdmolfiles.SmilesParserParams()
smiles_parser.removeHs = True
rdkit_molecule = Chem.MolFromSmiles(smiles, smiles_parser)
highlight_atom_indices = [
atom.GetIdx()
for atom in rdkit_molecule.GetAtoms()
if atom.GetAtomMapNum() in highlight_atoms
]
for atom in rdkit_molecule.GetAtoms():
atom.SetAtomMapNum(0)
if not rdkit_molecule.GetNumConformers():
Chem.rdDepictor.Compute2DCoords(rdkit_molecule)
drawer = rdMolDraw2D.MolDraw2DSVG(200, 200, 150, 200)
drawer.drawOptions().padding = 0.05
drawer.SetOffset(25, 0)
drawer.DrawMolecule(rdkit_molecule, highlightAtoms=highlight_atom_indices)
drawer.FinishDrawing()
svg_content = drawer.GetDrawingText()
return svg_content
@functools.lru_cache(100000)
def smiles_to_image(smiles: str, highlight_atoms: Optional[Tuple[int]] = None):
highlight_atoms = tuple() if highlight_atoms is None else highlight_atoms
try:
return _oe_smiles_to_image(smiles, highlight_atoms)
except (ModuleNotFoundError, ImportError):
return _rd_smiles_to_image(smiles, highlight_atoms)
|
SimonBoothroyd/beflow
|
beflow/tests/services/optimizer/test_app.py
|
from celery.result import AsyncResult
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.bespokefit.schema.optimizers import ForceBalanceSchema
from openff.bespokefit.schema.results import BespokeOptimizationResults
from openff.fragmenter.fragment import WBOFragmenter
from beflow.services.optimizer import worker
from beflow.services.optimizer.models import (
OptimizerGETResponse,
OptimizerPOSTBody,
OptimizerPOSTResponse,
)
from beflow.tests.mocking.celery import mock_celery_task
def test_get_optimize(optimizer_client, redis_connection, monkeypatch):
mock_optimization_result = BespokeOptimizationResults(
provenance={}, status="running"
)
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": "SUCCESS", "result": mock_optimization_result.json()},
)
request = optimizer_client.get("/optimizer/1")
request.raise_for_status()
result = OptimizerGETResponse.parse_raw(request.text)
assert result.optimization_status == "success"
assert result.optimization_result is not None
assert result.optimization_id == "1"
assert result.optimization_result.status == mock_optimization_result.status
assert result.optimization_result.provenance == mock_optimization_result.provenance
def test_post_optimize(optimizer_client, redis_connection, monkeypatch):
submitted_task_kwargs = mock_celery_task(worker, "optimize", monkeypatch)
input_schema = BespokeOptimizationSchema(
smiles="CC",
initial_force_field="openff-2.0.0.offxml",
parameters=[],
parameter_hyperparameters=[],
fragmentation_engine=WBOFragmenter(),
targets=[],
optimizer=ForceBalanceSchema(max_iterations=1),
)
request = optimizer_client.post(
"/optimizer", data=OptimizerPOSTBody(input_schema=input_schema).json()
)
request.raise_for_status()
assert submitted_task_kwargs is not None
assert submitted_task_kwargs["optimization_input_json"] == input_schema.json()
result = OptimizerPOSTResponse.parse_raw(request.text)
assert result.optimization_id == "1"
|
SimonBoothroyd/beflow
|
beflow/services/models.py
|
<reponame>SimonBoothroyd/beflow
from typing import Optional
from pydantic import BaseModel, Field
class Error(BaseModel):
type: str = Field(..., description="The type of exception that was raised.")
message: str = Field(..., description="The message associated with the exception.")
traceback: Optional[str] = Field(
None, description="The traceback associated with the exception"
)
|
SimonBoothroyd/beflow
|
beflow/utilities/typing.py
|
<reponame>SimonBoothroyd/beflow
from typing_extensions import Literal
Status = Literal["waiting", "running", "errored", "success"]
|
SimonBoothroyd/beflow
|
beflow/services/qcgenerator/models.py
|
from typing import Optional, Union
from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask
from pydantic import BaseModel, Field
from qcelemental.models import AtomicResult, FailedOperation, OptimizationResult
from qcengine.procedures.torsiondrive import TorsionDriveResult
from typing_extensions import Literal
from beflow.utilities.typing import Status
class QCGeneratorBaseResponse(BaseModel):
qc_calc_id: str = Field(
..., description="The ID associated with the QC calculation."
)
qc_calc_type: Literal["torsion1d", "optimization", "hessian"] = Field(
..., description="The type of QC calculation being run."
)
class QCGeneratorGETStatusResponse(BaseModel):
qc_calc_status: Status = Field(
"waiting", description="The status of the QC calculation."
)
class QCGeneratorGETResultResponse(BaseModel):
qc_calc_result: Optional[
Union[AtomicResult, OptimizationResult, TorsionDriveResult, FailedOperation]
] = Field(..., description="The result of the QC calculation if any was produced.")
class QCGeneratorGETErrorResponse(BaseModel):
qc_calc_error: Optional[str] = Field(
..., description="The error raised while running the QC calculation if any."
)
class QCGeneratorGETResponse(
QCGeneratorBaseResponse,
QCGeneratorGETStatusResponse,
QCGeneratorGETResultResponse,
QCGeneratorGETErrorResponse,
):
"""The object model returned by a GET request."""
class QCGeneratorPOSTBody(BaseModel):
input_schema: Union[HessianTask, OptimizationTask, Torsion1DTask] = Field(
..., description="The schema that fully defines the QC data to generate."
)
class QCGeneratorPOSTResponse(QCGeneratorBaseResponse):
"""The object model returned by a POST request."""
|
SimonBoothroyd/beflow
|
beflow/services/fragmenter/models.py
|
<reponame>SimonBoothroyd/beflow
from typing import List, Optional, Union
from openff.fragmenter.fragment import (
FragmentationResult,
PfizerFragmenter,
WBOFragmenter,
)
from pydantic import BaseModel, Field
from beflow.utilities.typing import Status
class FragmenterBaseResponse(BaseModel):
fragmentation_id: str = Field(
..., description="The ID associated with the fragmentation."
)
class FragmenterGETStatusResponse(BaseModel):
fragmentation_status: Status = Field(
"waiting", description="The status of the fragmentation."
)
class FragmenterGETResultResponse(BaseModel):
fragmentation_result: Optional[FragmentationResult] = Field(
..., description="The result of the fragmentation if any was produced."
)
class FragmenterGETErrorResponse(BaseModel):
fragmentation_error: Optional[str] = Field(
..., description="The error raised while fragmenting if any."
)
class FragmenterGETResponse(
FragmenterBaseResponse,
FragmenterGETStatusResponse,
FragmenterGETResultResponse,
FragmenterGETErrorResponse,
):
"""The object model returned by a GET request."""
class FragmenterPOSTBody(BaseModel):
"""The object model expected by a POST request."""
cmiles: str = Field(
..., description="The CMILES representation of the molecule to fragment."
)
fragmenter: Union[PfizerFragmenter, WBOFragmenter] = Field(
..., description="The fragmentation engine to use."
)
target_bond_smarts: List[str] = Field(
...,
description="A list of SMARTS patterns that should be used to identify the "
"bonds within the parent molecule to grow fragments around. Each SMARTS pattern "
"should include **two** indexed atoms that correspond to the two atoms involved "
"in the central bond.",
)
class FragmenterPOSTResponse(FragmenterBaseResponse):
"""The object model returned by a POST request."""
|
SimonBoothroyd/beflow
|
beflow/services/qcgenerator/worker.py
|
<reponame>SimonBoothroyd/beflow
from typing import List
import qcengine
import redis
from openff.bespokefit.schema.tasks import OptimizationTask, Torsion1DTask
from openff.toolkit.topology import Molecule
from qcelemental.models import AtomicResult
from qcelemental.models.common_models import DriverEnum
from qcelemental.models.procedures import (
OptimizationInput,
OptimizationResult,
OptimizationSpecification,
QCInputSpecification,
TDKeywords,
TorsionDriveInput,
TorsionDriveResult,
)
from qcelemental.util import serialize
from beflow.services import settings
from beflow.utilities.celery import configure_celery_app
redis_connection = redis.Redis(
host=settings.BEFLOW_REDIS_ADDRESS,
port=settings.BEFLOW_REDIS_PORT,
db=settings.BEFLOW_REDIS_DB,
)
celery_app = configure_celery_app("qcgenerator", redis_connection)
@celery_app.task
def compute_torsion_drive(task_json: str) -> TorsionDriveResult:
"""Runs a torsion drive using QCEngine."""
task = Torsion1DTask.parse_raw(task_json)
molecule: Molecule = Molecule.from_smiles(task.smiles)
# TODO: Ask Josh about multiple conformers.
molecule.generate_conformers(n_conformers=1)
map_to_atom_index = {
map_index: atom_index
for atom_index, map_index in molecule.properties["atom_map"].items()
}
index_2 = map_to_atom_index[task.central_bond[0]]
index_3 = map_to_atom_index[task.central_bond[1]]
index_1 = [
atom.molecule_atom_index
for atom in molecule.atoms[index_2].bonded_atoms
if atom.molecule_atom_index != index_3
][0]
index_4 = [
atom.molecule_atom_index
for atom in molecule.atoms[index_3].bonded_atoms
if atom.molecule_atom_index != index_2
][0]
del molecule.properties["atom_map"]
input_schema = TorsionDriveInput(
keywords=TDKeywords(
dihedrals=[(index_1, index_2, index_3, index_4)],
grid_spacing=[task.grid_spacing],
dihedral_ranges=[task.scan_range],
),
extras={
"canonical_isomeric_explicit_hydrogen_mapped_smiles": molecule.to_smiles(
isomeric=True, explicit_hydrogens=True, mapped=True
)
},
initial_molecule=molecule.to_qcschema(),
input_specification=QCInputSpecification(
model=task.model, driver=DriverEnum.gradient
),
optimization_spec=OptimizationSpecification(
procedure=task.optimization_spec.procedure,
keywords={
"coordsys": "dlc",
"maxiter": task.optimization_spec.max_iterations,
"program": task.program,
},
),
)
return_value = qcengine.compute_procedure(
input_schema, "torsiondrive", raise_error=True
)
if isinstance(return_value, TorsionDriveResult):
return_value = TorsionDriveResult(
**return_value.dict(exclude={"optimization_history", "stdout", "stderr"}),
optimization_history={},
)
# noinspection PyTypeChecker
return return_value.json()
@celery_app.task
def compute_optimization(
task_json: str,
) -> List[OptimizationResult]:
"""Runs a set of geometry optimizations using QCEngine."""
task = OptimizationTask.parse_raw(task_json)
molecule: Molecule = Molecule.from_smiles(task.smiles)
molecule.generate_conformers(n_conformers=task.n_conformers)
input_schemas = [
OptimizationInput(
keywords={
"coordsys": "dlc",
"maxiter": task.optimization_spec.max_iterations,
"program": task.program,
},
extras={
"canonical_isomeric_explicit_hydrogen_mapped_smiles": molecule.to_smiles(
isomeric=True, explicit_hydrogens=True, mapped=True
)
},
input_specification=QCInputSpecification(
model=task.model, driver=DriverEnum.gradient
),
initial_molecule=molecule.to_qcschema(conformer=i),
)
for i in range(molecule.n_conformers)
]
return_values = []
for input_schema in input_schemas:
return_value = qcengine.compute_procedure(
input_schema, task.optimization_spec.procedure, raise_error=True
)
if isinstance(return_value, OptimizationResult):
# Strip the extra **heavy** data
return_value = OptimizationResult(
**return_value.dict(exclude={"trajectory", "stdout", "stderr"}),
trajectory=[],
)
return_values.append(return_value)
# noinspection PyTypeChecker
return serialize(return_values, "json")
@celery_app.task
def compute_hessian(task_json: str) -> AtomicResult:
"""Runs a set of hessian evaluations using QCEngine."""
raise NotImplementedError()
|
SimonBoothroyd/beflow
|
beflow/tests/utilities/test_redis.py
|
<filename>beflow/tests/utilities/test_redis.py
import os.path
import shutil
import pytest
from redis import Redis
from beflow.utilities.redis import is_redis_available, launch_redis
def test_launch_redis(tmpdir):
assert not is_redis_available("localhost", 1234)
redis_process = launch_redis(port=1234, directory=str(tmpdir), persistent=True)
assert is_redis_available("localhost", 1234)
redis_connection = Redis(port=1234)
assert redis_connection.get("test-key") is None
redis_connection.set("test-key", "set")
assert redis_connection.get("test-key") is not None
redis_process.terminate()
redis_process.wait()
assert not is_redis_available("localhost", 1234)
assert os.path.isfile(os.path.join(tmpdir, "redis.db"))
def test_launch_redis_already_exists(tmpdir):
assert not is_redis_available("localhost", 1234)
redis_process = launch_redis(port=1234, directory=str(tmpdir), persistent=True)
assert is_redis_available("localhost", 1234)
with pytest.raises(RuntimeError, match="here is already a server running"):
launch_redis(port=1234, directory=str(tmpdir))
redis_process.terminate()
redis_process.wait()
assert not is_redis_available("localhost", 1234)
@pytest.mark.parametrize("missing_command", ["redis-server", "redis-cli"])
def test_launch_redis_missing_command(tmpdir, monkeypatch, missing_command):
monkeypatch.setattr(
shutil, "which", lambda x: None if x == missing_command else "some/path"
)
redis_process = None
with pytest.raises(RuntimeError, match=f"The `{missing_command}`"):
redis_process = launch_redis(port=1234, directory=str(tmpdir))
if redis_process is not None:
redis_process.terminate()
redis_process.wait()
|
SimonBoothroyd/beflow
|
beflow/services/optimizer/app.py
|
<filename>beflow/services/optimizer/app.py<gh_stars>0
import json
from fastapi import APIRouter
from qcelemental.util import serialize
from beflow.services import settings
from beflow.services.optimizer import worker
from beflow.services.optimizer.models import (
OptimizerGETResponse,
OptimizerPOSTBody,
OptimizerPOSTResponse,
)
from beflow.utilities.celery import get_task_information
router = APIRouter()
@router.get("/" + settings.BEFLOW_OPTIMIZER_PREFIX + "/{optimization_id}")
def get_optimization(optimization_id: str) -> OptimizerGETResponse:
task_info = get_task_information(worker.celery_app, optimization_id)
# noinspection PyTypeChecker
return {
"optimization_id": optimization_id,
"optimization_status": task_info["status"],
"optimization_result": task_info["result"],
"optimization_error": json.dumps(task_info["error"]),
}
@router.post("/" + settings.BEFLOW_OPTIMIZER_PREFIX)
def post_optimization(body: OptimizerPOSTBody) -> OptimizerPOSTResponse:
# We use celery delay method in order to enqueue the task with the given
# parameters
task = worker.optimize.delay(
optimization_input_json=serialize(body.input_schema, "json")
)
return OptimizerPOSTResponse(optimization_id=task.id)
|
SimonBoothroyd/beflow
|
beflow/utilities/redis.py
|
import atexit
import functools
import shlex
import shutil
import subprocess
import time
from typing import IO, Optional, Union
import redis
def is_redis_available(host: str, port: int = 6379) -> bool:
"""Returns whether a server running on the local host on a particular port is
available.
"""
redis_client = redis.Redis(host=host, port=port)
try:
redis_client.get("null")
except (redis.exceptions.ConnectionError, redis.exceptions.BusyLoadingError):
return False
return True
def _cleanup_redis(redis_process: subprocess.Popen):
redis_process.terminate()
def launch_redis(
port: int = 6379,
stderr_file: Optional[Union[IO, int]] = None,
stdout_file: Optional[Union[IO, int]] = None,
directory: Optional[str] = None,
persistent: bool = True,
):
redis_server_path = shutil.which("redis-server")
if redis_server_path is None:
raise RuntimeError(
"The `redis-server` command could not be found. Please make sure `redis` is "
"correctly installed."
)
redis_cli_path = shutil.which("redis-cli")
if redis_cli_path is None:
raise RuntimeError(
"The `redis-cli` command could not be found. Please make sure `redis` is "
"correctly installed."
)
if is_redis_available("localhost", port):
raise RuntimeError(f"There is already a server running at localhost:{port}")
redis_command = f"redis-server --port {str(port)} --dbfilename redis.db"
if directory:
redis_command = f"{redis_command} --dir {directory}"
if persistent:
redis_command = (
f"{redis_command} --save 900 1 --save 300 100 --save 60 200 --save 15 1000"
)
redis_process = subprocess.Popen(
shlex.split(redis_command), stderr=stderr_file, stdout=stdout_file
)
atexit.register(functools.partial(_cleanup_redis, redis_process))
timeout = True
for i in range(0, 60):
if is_redis_available("localhost", port):
timeout = False
break
time.sleep(1.0)
if timeout:
raise RuntimeError("The redis server failed to start.")
return redis_process
|
SimonBoothroyd/beflow
|
beflow/services/optimizer/models.py
|
from typing import Optional
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.bespokefit.schema.results import BespokeOptimizationResults
from pydantic import BaseModel, Field
from beflow.utilities.typing import Status
class OptimizerBaseResponse(BaseModel):
optimization_id: str = Field(
..., description="The ID associated with the optimization."
)
class OptimizerGETStatusResponse(BaseModel):
optimization_status: Status = Field(
"waiting", description="The status of the optimization."
)
class OptimizerGETResultResponse(BaseModel):
optimization_result: Optional[BespokeOptimizationResults] = Field(
..., description="The result of the optimization if any was produced."
)
class OptimizerGETErrorResponse(BaseModel):
optimization_error: Optional[str] = Field(
..., description="The error raised while optimizing if any."
)
class OptimizerGETResponse(
OptimizerBaseResponse,
OptimizerGETStatusResponse,
OptimizerGETResultResponse,
OptimizerGETErrorResponse,
):
"""The object model returned by a GET request."""
class OptimizerPOSTBody(BaseModel):
"""The object model expected by a POST request."""
input_schema: BespokeOptimizationSchema = Field(
..., description="The schema that fully defines optimization to perform."
)
class OptimizerPOSTResponse(OptimizerBaseResponse):
"""The object model returned by a POST request."""
|
SimonBoothroyd/beflow
|
beflow/services/settings.py
|
<gh_stars>0
from pydantic import BaseSettings
class Settings(BaseSettings):
BEFLOW_API_V1_STR: str = "/api/v1"
BEFLOW_GATEWAY_PORT: int = 8000
BEFLOW_GATEWAY_LOG_LEVEL: str = "error"
BEFLOW_REDIS_ADDRESS: str = "localhost"
BEFLOW_REDIS_PORT: int = 6379
BEFLOW_REDIS_DB: int = 0
BEFLOW_COORDINATOR_PREFIX = "optimization"
BEFLOW_COORDINATOR_ROUTER = "beflow.services.coordinator.app:router"
BEFLOW_COORDINATOR_WORKER = "beflow.services.coordinator.worker:"
BEFLOW_FRAGMENTER_PREFIX = "fragmenter"
BEFLOW_FRAGMENTER_ROUTER = "beflow.services.fragmenter.app:router"
BEFLOW_FRAGMENTER_WORKER = "beflow.services.fragmenter.worker"
BEFLOW_QC_COMPUTE_PREFIX = "qc-calc"
BEFLOW_QC_COMPUTE_ROUTER = "beflow.services.qcgenerator.app:router"
BEFLOW_QC_COMPUTE_WORKER = "beflow.services.qcgenerator.worker"
BEFLOW_OPTIMIZER_PREFIX = "optimizer"
BEFLOW_OPTIMIZER_ROUTER = "beflow.services.optimizer.app:router"
BEFLOW_OPTIMIZER_WORKER = "beflow.services.optimizer.worker"
|
SimonBoothroyd/beflow
|
examples/launch-executor.py
|
from openff.bespokefit.schema.data import BespokeQCData
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.bespokefit.schema.optimizers import ForceBalanceSchema
from openff.bespokefit.schema.smirnoff import (
ProperTorsionHyperparameters,
ProperTorsionSMIRKS,
)
from openff.bespokefit.schema.targets import TorsionProfileTargetSchema
from openff.bespokefit.schema.tasks import Torsion1DTaskSpec
from openff.fragmenter.fragment import WBOFragmenter
from openff.toolkit.topology import Molecule
from qcelemental.models.common_models import Model
from qcelemental.util import serialize
from beflow.executor import BespokeExecutor
def main():
smiles = Molecule.from_smiles("c1ccc(cc1)c2ccccc2").to_smiles(mapped=True)
input_schema = BespokeOptimizationSchema(
# Define the molecule to generate the bespoke parameters for.
smiles=smiles,
# and the initial force field to source most parameters from.
initial_force_field="openff_unconstrained-2.0.0.offxml",
# Define the bespoke parameters that should be created and trained.
parameters=[
ProperTorsionSMIRKS(
smirks="[*:1]~[#6X3:2]-[#6X3:3]~[*:4]", attributes={"k1"}
)
],
parameter_hyperparameters=[ProperTorsionHyperparameters()],
#
fragmentation_engine=WBOFragmenter(),
# Define the targets that the parameters should be trained to.
targets=[
TorsionProfileTargetSchema(
reference_data=BespokeQCData(
spec=Torsion1DTaskSpec(
program="rdkit", model=Model(method="uff", basis=None)
)
)
)
],
optimizer=ForceBalanceSchema(max_iterations=1),
)
with BespokeExecutor(
n_fragmenter_workers=1, n_qc_compute_workers=4, n_optimizer_workers=1
) as executor:
optimization_id = executor.submit(input_schema)
response = executor.wait_until_complete(optimization_id)
with open("output.json", "w") as file:
file.write(serialize(response, "json"))
if __name__ == "__main__":
main()
|
SimonBoothroyd/beflow
|
beflow/tests/__init__.py
|
<reponame>SimonBoothroyd/beflow<gh_stars>0
from contextlib import contextmanager
import redis
from beflow.services import settings
try:
from pytest_cov.embed import cleanup_on_sigterm
except ImportError:
pass
else:
cleanup_on_sigterm()
@contextmanager
def patch_settings(redis_connection: redis.Redis):
old_settings = settings.copy(deep=True)
settings.BEFLOW_REDIS_ADDRESS = redis_connection.connection_pool.connection_kwargs[
"host"
]
settings.BEFLOW_REDIS_PORT = redis_connection.connection_pool.connection_kwargs[
"port"
]
settings.BEFLOW_REDIS_DB = redis_connection.connection_pool.connection_kwargs["db"]
yield
settings.BEFLOW_REDIS_ADDRESS = old_settings.BEFLOW_REDIS_ADDRESS
settings.BEFLOW_REDIS_PORT = old_settings.BEFLOW_REDIS_PORT
settings.BEFLOW_REDIS_DB = old_settings.BEFLOW_REDIS_DB
|
SimonBoothroyd/beflow
|
beflow/services/coordinator/app.py
|
<reponame>SimonBoothroyd/beflow
import asyncio
import logging
import os
import pickle
import signal
import urllib.parse
from typing import List, Optional
from fastapi import APIRouter, HTTPException
from fastapi.responses import Response
from openff.toolkit.topology import Molecule
from beflow.services import settings
from beflow.services.coordinator import worker
from beflow.services.coordinator.models import (
CoordinatorGETResponse,
CoordinatorPOSTBody,
CoordinatorPOSTResponse,
CoordinatorTask,
)
from beflow.services.coordinator.stages import (
FragmentationStage,
OptimizationStage,
QCGenerationStage,
)
from beflow.utilities.depiction import smiles_to_image
router = APIRouter()
_logger = logging.getLogger(__name__)
_worker_task: Optional[asyncio.Future] = None
@router.get("/" + settings.BEFLOW_COORDINATOR_PREFIX + "/{optimization_id}")
def get_optimization(optimization_id: str) -> CoordinatorGETResponse:
"""Retrieves a bespoke optimization that has been submitted to this server
using its unique id."""
task_pickle = worker.redis_connection.get(
f"coordinator:optimization:{optimization_id}"
)
if task_pickle is None:
raise HTTPException(status_code=404, detail=f"{optimization_id} not found")
task = CoordinatorTask.parse_obj(pickle.loads(task_pickle))
return CoordinatorGETResponse.from_task(task)
@router.post("/" + settings.BEFLOW_COORDINATOR_PREFIX)
def post_optimization(body: CoordinatorPOSTBody) -> CoordinatorPOSTResponse:
"""Submit a bespoke optimization to be performed by the server."""
try:
# Make sure the input SMILES does not have any atoms mapped as these may
# cause issues for certain stages such as fragmentation.
molecule = Molecule.from_smiles(body.input_schema.smiles)
molecule.properties.pop("atom_map", None)
body.input_schema.smiles = molecule.to_smiles(mapped=True)
except BaseException:
# TODO: Custom exception handling rather than 500 error. 400 / 402?
raise
task_id = worker.redis_connection.incr("coordinator:id-counter")
task_key = f"coordinator:optimization:{task_id}"
task = CoordinatorTask(
id=task_id,
input_schema=body.input_schema,
pending_stages=[FragmentationStage(), QCGenerationStage(), OptimizationStage()],
)
worker.redis_connection.set(task_key, pickle.dumps(task.dict()))
worker.redis_connection.zadd("coordinator:optimizations", {task_key: task_id})
return CoordinatorPOSTResponse(optimization_id=task_id)
@router.get("/" + settings.BEFLOW_COORDINATOR_PREFIX + "s")
def get_optimizations(skip: int = 0, limit: int = 10) -> List[CoordinatorGETResponse]:
"""Retrieves all bespoke optimizations that have been submitted to this server."""
optimization_keys = worker.redis_connection.zrange(
"coordinator:optimizations", skip * limit, (skip + 1) * limit - 1
)
response = []
for optimization_key in optimization_keys:
task_pickle = worker.redis_connection.get(optimization_key)
if task_pickle is None:
raise HTTPException(status_code=404, detail=f"{optimization_key} not found")
task = CoordinatorTask.parse_obj(pickle.loads(task_pickle))
response.append(CoordinatorGETResponse.from_task(task))
return response
@router.get("/" + settings.BEFLOW_COORDINATOR_PREFIX + "/{optimization_id}/image")
async def get_molecule_image(optimization_id: str):
"""Render the molecule associated with a particular bespoke optimization to an
SVG file."""
task_pickle = worker.redis_connection.get(
f"coordinator:optimization:{optimization_id}"
)
if task_pickle is None:
raise HTTPException(status_code=404, detail=f"{optimization_id} not found")
task = CoordinatorTask.parse_obj(pickle.loads(task_pickle))
svg_content = smiles_to_image(urllib.parse.unquote(task.input_schema.smiles))
svg_response = Response(svg_content, media_type="image/svg+xml")
return svg_response
@router.on_event("startup")
def startup():
main_loop = asyncio.get_event_loop()
global _worker_task
_worker_task = asyncio.ensure_future(worker.cycle(), loop=main_loop)
def _handle_task_result(task: asyncio.Task) -> None:
# noinspection PyBroadException
try:
task.result()
except asyncio.CancelledError:
pass
except Exception:
_logger.exception(
"Exception raised by the main loop. This should never happen."
)
os.kill(os.getpid(), signal.SIGINT)
_worker_task.add_done_callback(_handle_task_result)
@router.on_event("shutdown")
def shutdown():
if _worker_task is not None:
_worker_task.cancel()
|
SimonBoothroyd/beflow
|
beflow/tests/services/fragmenter/test_app.py
|
<reponame>SimonBoothroyd/beflow
from celery.result import AsyncResult
from openff.fragmenter.fragment import FragmentationResult, PfizerFragmenter
from beflow.services.fragmenter import worker
from beflow.services.fragmenter.models import (
FragmenterGETResponse,
FragmenterPOSTBody,
FragmenterPOSTResponse,
)
from beflow.tests.mocking.celery import mock_celery_task
def test_get_fragment(fragmenter_client, redis_connection, monkeypatch):
mock_fragmentation_result = FragmentationResult(
parent_smiles="[Ar:1]", fragments=[], provenance={"version": "mock"}
)
monkeypatch.setattr(
AsyncResult,
"_get_task_meta",
lambda self: {"status": "SUCCESS", "result": mock_fragmentation_result.json()},
)
request = fragmenter_client.get("/fragmenter/1")
request.raise_for_status()
result = FragmenterGETResponse.parse_raw(request.text)
assert result.fragmentation_status == "success"
assert result.fragmentation_result is not None
assert result.fragmentation_id == "1"
assert (
result.fragmentation_result.parent_smiles
== mock_fragmentation_result.parent_smiles
)
assert (
result.fragmentation_result.provenance == mock_fragmentation_result.provenance
)
def test_post_fragment(fragmenter_client, redis_connection, monkeypatch):
submitted_task_kwargs = mock_celery_task(worker, "fragment", monkeypatch)
request = fragmenter_client.post(
"/fragmenter",
data=FragmenterPOSTBody(
cmiles="[CH2:1]=[CH2:2]",
fragmenter=PfizerFragmenter(),
target_bond_smarts=["[#6:1]-[#6:2]"],
).json(),
)
request.raise_for_status()
assert submitted_task_kwargs is not None
assert submitted_task_kwargs["cmiles"] == "[CH2:1]=[CH2:2]"
assert submitted_task_kwargs["target_bond_smarts"] == ["[#6:1]-[#6:2]"]
result = FragmenterPOSTResponse.parse_raw(request.text)
assert result.fragmentation_id == "1"
|
SimonBoothroyd/beflow
|
beflow/services/__init__.py
|
<filename>beflow/services/__init__.py<gh_stars>0
from beflow.services.settings import Settings
settings = Settings()
__all__ = [settings]
|
SimonBoothroyd/beflow
|
beflow/tests/services/qcgenerator/test_worker.py
|
<gh_stars>0
import json
import pytest
from openff.bespokefit.schema.tasks import OptimizationTask, Torsion1DTask
from openff.toolkit.topology import Molecule
from qcelemental.models.common_models import Model
from qcelemental.models.procedures import OptimizationResult, TorsionDriveResult
from beflow.services.qcgenerator import worker
def test_compute_torsion_drive():
task = Torsion1DTask(
smiles="[CH3:1][CH3:2]",
central_bond=(1, 2),
grid_spacing=180,
scan_range=(-180, 180),
program="rdkit",
model=Model(method="uff", basis=None),
)
result_json = worker.compute_torsion_drive(task.json())
assert isinstance(result_json, str)
result_dict = json.loads(result_json)
assert isinstance(result_dict, dict)
result = TorsionDriveResult.parse_obj(result_dict)
assert result.success
cmiles = result.final_molecules["180"].extras[
"canonical_isomeric_explicit_hydrogen_mapped_smiles"
]
# Make sure a molecule can be created from CMILES
final_molecule = Molecule.from_mapped_smiles(cmiles)
assert Molecule.are_isomorphic(final_molecule, Molecule.from_smiles("CC"))[0]
def test_compute_optimization():
task = OptimizationTask(
smiles="CCCCC",
n_conformers=2,
program="rdkit",
model=Model(method="uff", basis=None),
)
result_json = worker.compute_optimization(task.json())
assert isinstance(result_json, str)
result_dicts = json.loads(result_json)
assert isinstance(result_dicts, list)
assert 0 < len(result_dicts) <= 2
for result_dict in result_dicts:
result = OptimizationResult.parse_obj(result_dict)
assert result.success
cmiles = result.final_molecule.extras[
"canonical_isomeric_explicit_hydrogen_mapped_smiles"
]
# Make sure a molecule can be created from CMILES
final_molecule = Molecule.from_mapped_smiles(cmiles)
assert Molecule.are_isomorphic(final_molecule, Molecule.from_smiles("CCCCC"))
@pytest.mark.parametrize(
"compute_function, task",
[
(
worker.compute_torsion_drive,
Torsion1DTask(
smiles="[CH2:1]=[CH2:2]",
central_bond=(1, 2),
grid_spacing=180,
scan_range=(-180, 180),
program="non-existent-program",
model=Model(method="uff", basis=None),
),
),
(
worker.compute_optimization,
OptimizationTask(
smiles="CCCCC",
n_conformers=1,
program="non-existent-program",
model=Model(method="uff", basis=None),
),
),
],
)
def test_compute_failure(compute_function, task, celery_worker):
with pytest.raises(ValueError, match="non-existent-program"):
compute_function(task.json())
|
SimonBoothroyd/beflow
|
beflow/executor.py
|
import functools
import importlib
import logging
import multiprocessing
import os
import time
from typing import Optional
import celery
import requests
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.utilities import temporary_cd
from beflow.services import settings
from beflow.services.coordinator.models import (
CoordinatorGETResponse,
CoordinatorPOSTBody,
CoordinatorPOSTResponse,
)
from beflow.services.gateway import launch as launch_gateway
from beflow.services.gateway import wait_for_gateway
from beflow.utilities.celery import spawn_worker
from beflow.utilities.redis import is_redis_available, launch_redis
_logger = logging.getLogger(__name__)
class BespokeExecutor:
def __init__(
self,
n_fragmenter_workers: int = 0,
n_qc_compute_workers: int = 0,
n_optimizer_workers: int = 0,
directory: str = "beflow-executor",
launch_redis_if_unavailable: bool = True,
):
self._n_fragmenter_workers = n_fragmenter_workers
self._n_qc_compute_workers = n_qc_compute_workers
self._n_optimizer_workers = n_optimizer_workers
self._directory = directory
self._launch_redis_if_unavailable = launch_redis_if_unavailable
self._started = False
self._gateway_process: Optional[multiprocessing.Process] = None
def _launch_redis(self):
if self._launch_redis_if_unavailable and not is_redis_available(
host=settings.BEFLOW_REDIS_ADDRESS, port=settings.BEFLOW_REDIS_PORT
):
redis_log_file = open("redis.log", "w")
launch_redis(settings.BEFLOW_REDIS_PORT, redis_log_file, redis_log_file)
def _launch_workers(self):
for import_path, n_workers in {
(settings.BEFLOW_FRAGMENTER_WORKER, self._n_fragmenter_workers),
(settings.BEFLOW_QC_COMPUTE_WORKER, self._n_qc_compute_workers),
(settings.BEFLOW_OPTIMIZER_WORKER, self._n_optimizer_workers),
}:
worker_module = importlib.import_module(import_path)
worker_app = getattr(worker_module, "celery_app")
assert isinstance(worker_app, celery.Celery), "workers must be celery based"
spawn_worker(worker_app, concurrency=n_workers)
def start(self, asynchronous=False):
if self._started:
raise RuntimeError("This executor is already running.")
self._started = True
if self._directory is not None and len(self._directory) > 0:
os.makedirs(self._directory, exist_ok=True)
with temporary_cd(self._directory):
self._launch_redis()
self._launch_workers()
if asynchronous:
self._gateway_process = multiprocessing.Process(
target=functools.partial(launch_gateway, self._directory), daemon=True
)
self._gateway_process.start()
wait_for_gateway()
else:
launch_gateway(self._directory)
def stop(self):
if not self._started:
raise ValueError("The executor is not running.")
self._started = False
if self._gateway_process is not None and self._gateway_process.is_alive():
self._gateway_process.terminate()
self._gateway_process.join()
def submit(self, input_schema: BespokeOptimizationSchema) -> str:
assert self._started, "the executor is not running"
request = requests.post(
"http://127.0.0.1:8000/api/v1/optimization",
data=CoordinatorPOSTBody(input_schema=input_schema).json(),
)
return CoordinatorPOSTResponse.parse_raw(request.text).optimization_id
def wait_until_complete(
self, optimization_id: str, frequency: int = 10
) -> CoordinatorGETResponse:
while True:
try:
request = requests.get(
f"http://127.0.0.1:8000/api/v1/optimization/{optimization_id}"
)
request.raise_for_status()
response = CoordinatorGETResponse.parse_raw(request.text)
# TODO: Return the actual result
if all(
stage.stage_status == "success" for stage in response.stages
) or any(stage.stage_status == "errored" for stage in response.stages):
return response
time.sleep(frequency)
except KeyboardInterrupt:
break
def __enter__(self):
self.start(asynchronous=True)
return self
def __exit__(self, *args):
self.stop()
|
SimonBoothroyd/beflow
|
beflow/services/fragmenter/app.py
|
import json
from fastapi import APIRouter
from fastapi.responses import Response
from openff.fragmenter.depiction import _oe_render_fragment
from openff.fragmenter.fragment import FragmentationResult
from beflow.services import settings
from beflow.services.fragmenter import worker
from beflow.services.fragmenter.models import (
FragmenterGETResponse,
FragmenterPOSTBody,
FragmenterPOSTResponse,
)
from beflow.utilities.celery import get_task_information
from beflow.utilities.depiction import IMAGE_UNAVAILABLE_SVG
router = APIRouter()
@router.get("/" + settings.BEFLOW_FRAGMENTER_PREFIX + "/{fragmentation_id}")
def get_fragment(fragmentation_id: str) -> FragmenterGETResponse:
task_info = get_task_information(worker.celery_app, fragmentation_id)
return FragmenterGETResponse(
fragmentation_id=fragmentation_id,
fragmentation_status=task_info["status"],
fragmentation_result=task_info["result"],
fragmentation_error=json.dumps(task_info["error"]),
)
@router.post("/" + settings.BEFLOW_FRAGMENTER_PREFIX)
def post_fragment(body: FragmenterPOSTBody) -> FragmenterPOSTResponse:
# We use celery delay method in order to enqueue the task with the given
# parameters
task = worker.fragment.delay(
cmiles=body.cmiles,
fragmenter_json=body.fragmenter.json(),
target_bond_smarts=body.target_bond_smarts,
)
return FragmenterPOSTResponse(fragmentation_id=task.id)
@router.get(
"/"
+ settings.BEFLOW_FRAGMENTER_PREFIX
+ "/{fragmentation_id}/fragment/{fragment_id}/image"
)
def get_fragment_image(fragmentation_id: str, fragment_id: int) -> Response:
task_info = get_task_information(worker.celery_app, fragmentation_id)
if task_info["status"] != "success":
return Response(IMAGE_UNAVAILABLE_SVG, media_type="image/svg+xml")
result = FragmentationResult.parse_obj(task_info["result"])
if 0 > fragment_id >= len(result.fragments):
return Response(IMAGE_UNAVAILABLE_SVG, media_type="image/svg+xml")
fragment = result.fragments[fragment_id]
svg_content = _oe_render_fragment(
result.parent_molecule,
fragment.molecule,
fragment.bond_indices,
image_width=200,
image_height=200,
)
return Response(svg_content, media_type="image/svg+xml")
|
SimonBoothroyd/beflow
|
beflow/services/fragmenter/worker.py
|
from typing import List, Union
import redis
from openff.fragmenter.fragment import PfizerFragmenter, WBOFragmenter
from pydantic import parse_raw_as
from beflow.services import settings
from beflow.utilities.celery import configure_celery_app
redis_connection = redis.Redis(
host=settings.BEFLOW_REDIS_ADDRESS,
port=settings.BEFLOW_REDIS_PORT,
db=settings.BEFLOW_REDIS_DB,
)
celery_app = configure_celery_app("fragmenter", redis_connection)
@celery_app.task
def fragment(cmiles: str, fragmenter_json: str, target_bond_smarts: List[str]) -> str:
from openff.toolkit.topology import Molecule
fragmenter = parse_raw_as(Union[PfizerFragmenter, WBOFragmenter], fragmenter_json)
molecule = Molecule.from_mapped_smiles(cmiles)
return fragmenter.fragment(molecule, target_bond_smarts=target_bond_smarts).json()
|
SimonBoothroyd/beflow
|
beflow/tests/test_beflow.py
|
import importlib
def test_importable():
importlib.import_module("beflow")
|
SimonBoothroyd/beflow
|
beflow/services/qcgenerator/__init__.py
|
<gh_stars>0
from beflow.services.qcgenerator.app import router
__all__ = [router]
|
SimonBoothroyd/beflow
|
beflow/__init__.py
|
<reponame>SimonBoothroyd/beflow
"""
beflow
A proof of concept refactoring of the OpenFF bespoke executor to use something closer to a microservice pattern.
"""
from ._version import get_versions
versions = get_versions()
__version__ = versions["version"]
__git_revision__ = versions["full-revisionid"]
del get_versions, versions
|
SimonBoothroyd/beflow
|
beflow/services/coordinator/stages.py
|
import abc
import json
from collections import defaultdict
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
import httpx
from chemper.graphs.cluster_graph import ClusterGraph
from openff.bespokefit.schema.data import BespokeQCData, LocalQCData
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.bespokefit.schema.results import BespokeOptimizationResults
from openff.bespokefit.schema.smirnoff import ProperTorsionSMIRKS, SMIRNOFFParameter
from openff.bespokefit.schema.tasks import Torsion1DTask
from openff.bespokefit.utilities.molecule import (
get_atom_symmetries,
group_valence_by_symmetry,
)
from openff.fragmenter.fragment import FragmentationResult
from openff.fragmenter.utils import get_map_index
from openff.toolkit.topology import Molecule
from openff.toolkit.typing.engines.smirnoff import ForceField
from pydantic import BaseModel, Field, parse_raw_as
from qcelemental.models import AtomicResult, OptimizationResult
from qcelemental.util import serialize
from qcengine.procedures.torsiondrive import TorsionDriveResult
from typing_extensions import Literal
from beflow.services import settings
from beflow.services.fragmenter.models import (
FragmenterGETResponse,
FragmenterPOSTBody,
FragmenterPOSTResponse,
)
from beflow.services.optimizer.models import (
OptimizerGETResponse,
OptimizerPOSTBody,
OptimizerPOSTResponse,
)
from beflow.services.qcgenerator.models import (
QCGeneratorGETResponse,
QCGeneratorPOSTBody,
QCGeneratorPOSTResponse,
)
from beflow.utilities.typing import Status
if TYPE_CHECKING:
from beflow.services.coordinator.models import CoordinatorTask
class _Stage(BaseModel, abc.ABC):
type: Literal["base-stage"] = "base-stage"
status: Status = Field("waiting", description="The status of this stage.")
error: Optional[str] = Field(
None, description="The error raised, if any, while running this stage."
)
@abc.abstractmethod
async def enter(self, task: "CoordinatorTask"):
pass
@abc.abstractmethod
async def update(self):
pass
class FragmentationStage(_Stage):
type: Literal["fragmentation"] = "fragmentation"
id: Optional[str] = Field(None, description="")
result: Optional[FragmentationResult] = Field(None, description="")
@staticmethod
def _generate_target_bond_smarts(
smiles: str, parameters: List[SMIRNOFFParameter]
) -> List[str]:
"""Attempts to find all of the bonds in the molecule around which a bespoke
torsion parameter is being trained."""
molecule = Molecule.from_mapped_smiles(smiles)
all_central_bonds = {
tuple(sorted(central_bond))
for parameter in parameters
if isinstance(parameter, ProperTorsionSMIRKS)
for (_, *central_bond, _) in molecule.chemical_environment_matches(
parameter.smirks
)
}
grouped_central_bonds = group_valence_by_symmetry(
molecule, sorted(all_central_bonds)
)
unique_central_bonds = [group[0] for group in grouped_central_bonds.values()]
target_bond_smarts = set()
for central_bond in unique_central_bonds:
molecule.properties["atom_map"] = {
i: (j + 1) for j, i in enumerate(central_bond)
}
target_bond_smarts.add(molecule.to_smiles(mapped=True))
return sorted(target_bond_smarts)
async def enter(self, task: "CoordinatorTask"):
async with httpx.AsyncClient() as client:
raw_response = await client.post(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_FRAGMENTER_PREFIX}",
data=FragmenterPOSTBody(
cmiles=task.input_schema.smiles,
fragmenter=task.input_schema.fragmentation_engine,
target_bond_smarts=self._generate_target_bond_smarts(
task.input_schema.smiles, task.input_schema.parameters
),
).json(),
)
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
contents = raw_response.text
post_response = FragmenterPOSTResponse.parse_raw(contents)
self.id = post_response.fragmentation_id
async def update(self):
if self.status == "errored":
return
async with httpx.AsyncClient() as client:
raw_response = await client.get(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_FRAGMENTER_PREFIX}/{self.id}"
)
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
contents = raw_response.text
get_response = FragmenterGETResponse.parse_raw(contents)
self.result = get_response.fragmentation_result
self.error = get_response.fragmentation_error
self.status = get_response.fragmentation_status
class QCGenerationStage(_Stage):
type: Literal["qc-generation"] = "qc-generation"
ids: Optional[Dict[int, List[str]]] = Field(None, description="")
results: Optional[
Dict[str, Union[AtomicResult, OptimizationResult, TorsionDriveResult]]
] = Field(None, description="")
async def enter(self, task: "CoordinatorTask"):
fragment_stage = next(
iter(
stage
for stage in task.completed_stages
if stage.type == "fragmentation"
),
None,
)
fragments = [] if fragment_stage is None else fragment_stage.result.fragments
target_qc_tasks = defaultdict(list)
for i, target in enumerate(task.input_schema.targets):
if not isinstance(target.reference_data, BespokeQCData):
continue
if target.bespoke_task_type() == "torsion1d":
target_qc_tasks[i].extend(
Torsion1DTask(
smiles=fragment.smiles,
central_bond=fragment.bond_indices,
**target.reference_data.spec.dict(),
)
for fragment in fragments
)
else:
raise NotImplementedError()
qc_calc_ids = defaultdict(set)
async with httpx.AsyncClient() as client:
for i, qc_tasks in target_qc_tasks.items():
for qc_task in qc_tasks:
raw_response = await client.post(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_QC_COMPUTE_PREFIX}",
data=QCGeneratorPOSTBody(input_schema=qc_task).json(),
)
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
response = QCGeneratorPOSTResponse.parse_raw(raw_response.text)
qc_calc_ids[i].add(response.qc_calc_id)
self.ids = {i: sorted(ids) for i, ids in qc_calc_ids.items()}
async def update(self):
if self.status == "errored":
return
async with httpx.AsyncClient() as client:
id_query = "&ids=".join(qc_id for i in self.ids for qc_id in self.ids[i])
raw_response = await client.get(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_QC_COMPUTE_PREFIX}s?ids={id_query}"
)
contents = raw_response.text
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
get_responses = parse_raw_as(List[QCGeneratorGETResponse], contents)
statuses = {get_response.qc_calc_status for get_response in get_responses}
errors = [
json.loads(get_response.qc_calc_error)
for get_response in get_responses
if get_response.qc_calc_error is not None
]
self.error = json.dumps(errors)
self.status = "running"
if "errored" in statuses:
self.status = "errored"
elif statuses == {"waiting"}:
self.status = "waiting"
elif statuses == {"success"}:
self.status = "success"
self.results = {
get_response.qc_calc_id: get_response.qc_calc_result
for get_response in get_responses
}
class OptimizationStage(_Stage):
type: Literal["optimization"] = "optimization"
id: Optional[str] = Field(
None, description="The id of the optimization associated with this stage."
)
result: Optional[BespokeOptimizationResults] = Field(
None, description="The result of the optimization."
)
@staticmethod
def _regenerate_torsion_parameters(
original_parameters: List[SMIRNOFFParameter],
fragmentation_result: FragmentationResult,
) -> List[Tuple[ProperTorsionSMIRKS, ProperTorsionSMIRKS]]:
parent = fragmentation_result.parent_molecule
parent_atom_symmetries = get_atom_symmetries(parent)
parent_map_symmetries = {
get_map_index(parent, i): parent_atom_symmetries[i]
for i in range(parent.n_atoms)
}
fragments = [fragment.molecule for fragment in fragmentation_result.fragments]
fragment_by_symmetry = {
tuple(
sorted(parent_map_symmetries[i] for i in result.bond_indices)
): fragment
for fragment, result in zip(fragments, fragmentation_result.fragments)
}
assert len(fragment_by_symmetry) == len(fragmentation_result.fragments)
fragment_map_to_atom_index = [
{j: i for i, j in fragment.properties.get("atom_map", {}).items()}
for fragment in fragments
]
return_value = []
for original_parameter in original_parameters:
if not isinstance(original_parameter, ProperTorsionSMIRKS):
continue
matches = parent.chemical_environment_matches(original_parameter.smirks)
matches = list(
set(
match if match[1] < match[2] else tuple(reversed(match))
for match in matches
)
)
# Figure out which fragments need to be matched by this parameter and
# update the parameter so it matches these AND the parent.
match_symmetries = {
tuple(sorted(parent_atom_symmetries[i] for i in match[1:3]))
for match in matches
}
match_fragments = [
fragment_by_symmetry[match_symmetry]
for match_symmetry in match_symmetries
]
target_atoms = [matches]
target_molecules = [parent]
for fragment, map_to_atom_index in zip(
match_fragments, fragment_map_to_atom_index
):
match_atoms = [
tuple(
map_to_atom_index.get(get_map_index(parent, i), None)
for i in match
)
for match in matches
]
target_atoms.append(
[
match
for match in match_atoms
if all(i is not None for i in match)
]
)
if len(target_atoms) == 0:
continue
target_molecules.append(fragment)
parameter = original_parameter.copy(deep=True)
parameter.smirks = ClusterGraph(
mols=[molecule.to_rdkit() for molecule in target_molecules],
smirks_atoms_lists=target_atoms,
layers="all",
).as_smirks(compress=False)
return_value.append((original_parameter, parameter))
return return_value
async def _regenerate_parameters(
self,
fragmentation_stage: FragmentationStage,
input_schema: BespokeOptimizationSchema,
):
initial_force_field = ForceField(
input_schema.initial_force_field, allow_cosmetic_attributes=True
)
torsion_parameters = self._regenerate_torsion_parameters(
input_schema.parameters, fragmentation_stage.result
)
torsion_handler = initial_force_field["ProperTorsions"]
for original_parameter, new_parameter in torsion_parameters:
force_field_parameter = torsion_handler.parameters[
original_parameter.smirks
]
force_field_parameter.smirks = new_parameter.smirks
input_schema.parameters = [
*[
parameter
for parameter in input_schema.parameters
if not isinstance(parameter, ProperTorsionSMIRKS)
],
*[parameter for _, parameter in torsion_parameters],
]
input_schema.initial_force_field = initial_force_field.to_string()
@staticmethod
async def _inject_bespoke_qc_data(
qc_generation_stage: QCGenerationStage,
input_schema: BespokeOptimizationSchema,
):
for i, target in enumerate(input_schema.targets):
local_qc_data = LocalQCData(
qc_records=[
qc_generation_stage.results[result_id]
for result_id in qc_generation_stage.ids[i]
]
)
target.reference_data = local_qc_data
async def enter(self, task: "CoordinatorTask"):
completed_stages = {stage.type: stage for stage in task.completed_stages}
input_schema = task.input_schema.copy(deep=True)
# Regenerate any parameters that should target both the parent molecule and
# its fragments
fragmentation_stage: FragmentationStage = completed_stages["fragmentation"]
# TODO: Move these methods onto the celery worker.
try:
await self._regenerate_parameters(fragmentation_stage, input_schema)
except BaseException as e:
self.status = "errored"
self.error = json.dumps(
f"Failed to generate SMIRKS patterns that match both the parent and "
f"torsion fragments: {str(e)}"
)
return
# Map the generated QC results into a local QC data class and update the schema
# to target these.
qc_generation_stage: QCGenerationStage = completed_stages["qc-generation"]
try:
await self._inject_bespoke_qc_data(qc_generation_stage, input_schema)
except BaseException as e:
self.status = "errored"
self.error = json.dumps(
f"Failed to inject the bespoke QC data into the optimization "
f"schema: {str(e)}"
)
return
async with httpx.AsyncClient() as client:
raw_response = await client.post(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_OPTIMIZER_PREFIX}",
data=serialize(
OptimizerPOSTBody(input_schema=input_schema), encoding="json"
),
)
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
response = OptimizerPOSTResponse.parse_raw(raw_response.text)
self.id = response.optimization_id
async def update(self):
if self.status == "errored":
return
async with httpx.AsyncClient() as client:
raw_response = await client.get(
f"http://127.0.0.1:"
f"{settings.BEFLOW_GATEWAY_PORT}"
f"{settings.BEFLOW_API_V1_STR}/"
f"{settings.BEFLOW_OPTIMIZER_PREFIX}/{self.id}"
)
contents = raw_response.text
if raw_response.status_code != 200:
self.error = json.dumps(raw_response.text)
self.status = "errored"
return
get_response = OptimizerGETResponse.parse_raw(contents)
self.result = get_response.optimization_result
self.error = get_response.optimization_error
self.status = get_response.optimization_status
StageType = Union[FragmentationStage, QCGenerationStage, OptimizationStage]
|
SimonBoothroyd/beflow
|
beflow/services/optimizer/worker.py
|
from typing import Union
import redis
from openff.bespokefit.optimizers import get_optimizer
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from pydantic import parse_raw_as
from qcelemental.util import serialize
from beflow.services import settings
from beflow.utilities.celery import configure_celery_app
redis_connection = redis.Redis(
host=settings.BEFLOW_REDIS_ADDRESS,
port=settings.BEFLOW_REDIS_PORT,
db=settings.BEFLOW_REDIS_DB,
)
celery_app = configure_celery_app("optimizer", redis_connection)
@celery_app.task
def optimize(optimization_input_json: str) -> str:
input_schema = parse_raw_as(
Union[BespokeOptimizationSchema], optimization_input_json
)
optimizer = get_optimizer(input_schema.optimizer.type)
result = optimizer.optimize(input_schema, keep_files=True)
return serialize(result, encoding="json")
|
SimonBoothroyd/beflow
|
beflow/services/fragmenter/__init__.py
|
<filename>beflow/services/fragmenter/__init__.py
from beflow.services.fragmenter.app import router
__all__ = [router]
|
SimonBoothroyd/beflow
|
beflow/services/coordinator/__init__.py
|
from beflow.services.coordinator.app import router
__all__ = [router]
|
SimonBoothroyd/beflow
|
beflow/services/optimizer/__init__.py
|
<gh_stars>0
from beflow.services.optimizer.app import router
__all__ = [router]
|
SimonBoothroyd/beflow
|
beflow/tests/utilities/test_depiction.py
|
<gh_stars>0
import pytest
from beflow.utilities.depiction import (
_oe_smiles_to_image,
_rd_smiles_to_image,
smiles_to_image,
)
@pytest.mark.parametrize(
"to_image", [_oe_smiles_to_image, _rd_smiles_to_image, smiles_to_image]
)
def test_smiles_to_image(to_image):
try:
svg_contents = to_image("C", tuple())
except ModuleNotFoundError as e:
pytest.skip(f"missing optional dependency - {e.name}")
return
assert len(svg_contents) > 0 and "svg" in svg_contents
|
SimonBoothroyd/beflow
|
beflow/tests/services/fragmenter/test_worker.py
|
import json
from openff.fragmenter.fragment import FragmentationResult, PfizerFragmenter
from openff.toolkit.topology import Molecule
from beflow.services.fragmenter import worker
def test_fragment():
molecule = Molecule.from_smiles("CCCCCC")
result_json = worker.fragment(
cmiles=molecule.to_smiles(mapped=True),
fragmenter_json=PfizerFragmenter().json(),
target_bond_smarts=["[#6]-[#6]-[#6:1]-[#6:2]-[#6]-[#6]"],
)
assert isinstance(result_json, str)
result_dict = json.loads(result_json)
assert isinstance(result_dict, dict)
result = FragmentationResult.parse_obj(result_dict)
are_isomorphic, _ = Molecule.are_isomorphic(
molecule, result.parent_molecule, return_atom_map=True
)
assert are_isomorphic
assert len(result.fragments) == 1
assert result.provenance["options"]["scheme"] == "Pfizer"
|
SimonBoothroyd/beflow
|
beflow/tests/services/optimizer/test_worker.py
|
<filename>beflow/tests/services/optimizer/test_worker.py
import json
from openff.bespokefit.optimizers import ForceBalanceOptimizer
from openff.bespokefit.schema.fitting import BespokeOptimizationSchema
from openff.bespokefit.schema.optimizers import ForceBalanceSchema
from openff.bespokefit.schema.results import BespokeOptimizationResults
from openff.fragmenter.fragment import WBOFragmenter
from beflow.services.optimizer import worker
def test_optimize(monkeypatch):
input_schema = BespokeOptimizationSchema(
smiles="CC",
initial_force_field="openff-2.0.0.offxml",
parameters=[],
parameter_hyperparameters=[],
fragmentation_engine=WBOFragmenter(),
targets=[],
optimizer=ForceBalanceSchema(max_iterations=1),
)
input_schema_json = input_schema.json()
expected_output = BespokeOptimizationResults(
input_schema=input_schema, provenance={}, status="running"
)
received_schema = None
def mock_optimize(schema, keep_files=False):
nonlocal received_schema
received_schema = schema
return expected_output
monkeypatch.setattr(ForceBalanceOptimizer, "optimize", mock_optimize)
result_json = worker.optimize(optimization_input_json=input_schema_json)
assert isinstance(result_json, str)
result_dict = json.loads(result_json)
assert isinstance(result_dict, dict)
result = BespokeOptimizationResults.parse_obj(result_dict)
assert result.status == expected_output.status
assert received_schema.json() == input_schema_json
|
SimonBoothroyd/beflow
|
beflow/tests/conftest.py
|
<filename>beflow/tests/conftest.py
import subprocess
import pytest
import redis
from beflow.utilities.redis import launch_redis
@pytest.fixture(scope="session")
def redis_session(tmpdir_factory):
redis_exists_error = RuntimeError(
"It looks like a redis server is already running with the test "
"settings. Exiting early in-case this is a production redis server."
)
try:
connection = redis.Redis(port=5678, db=0)
keys = connection.keys("*")
assert len(keys) == 0
except redis.ConnectionError:
pass
except AssertionError:
raise redis_exists_error
else:
raise redis_exists_error
launch_redis(
port=5678,
stderr_file=subprocess.DEVNULL,
stdout_file=subprocess.DEVNULL,
persistent=False,
directory=str(tmpdir_factory.mktemp("redis")),
)
@pytest.fixture(scope="session")
def redis_connection(redis_session) -> redis.Redis:
return redis.Redis(port=5678, db=0)
@pytest.fixture(scope="function", autouse=True)
def reset_redis(redis_connection, monkeypatch):
redis_connection.flushdb()
|
SimonBoothroyd/beflow
|
beflow/services/qcgenerator/app.py
|
import json
from typing import List, Optional, Union
from fastapi import APIRouter, Query
from fastapi.responses import Response
from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask
from pydantic import parse_obj_as
from qcelemental.models import AtomicResult, OptimizationResult
from qcengine.procedures.torsiondrive import TorsionDriveResult
from beflow.services import settings
from beflow.services.qcgenerator import worker
from beflow.services.qcgenerator.models import (
QCGeneratorGETResponse,
QCGeneratorPOSTBody,
QCGeneratorPOSTResponse,
)
from beflow.utilities.celery import get_task_information
from beflow.utilities.depiction import IMAGE_UNAVAILABLE_SVG, smiles_to_image
router = APIRouter()
def _retrieve_qc_result(qc_calc_id: str, results: bool) -> QCGeneratorGETResponse:
qc_task_info = get_task_information(worker.celery_app, qc_calc_id)
qc_calc_type = worker.redis_connection.hget("qcgenerator:types", qc_calc_id)
# Because QCElemental models contain numpy arrays that aren't natively JSON
# serializable we need to work with plain dicts of primitive types here.
# noinspection PyTypeChecker
return {
"qc_calc_id": qc_calc_id,
"qc_calc_status": qc_task_info["status"],
"qc_calc_type": qc_calc_type.decode(),
"qc_calc_result": None if not results else qc_task_info["result"],
"qc_calc_error": json.dumps(qc_task_info["error"]),
}
@router.get("/" + settings.BEFLOW_QC_COMPUTE_PREFIX + "s")
def get_qc_results(
ids: Optional[List[str]] = Query(None), results: bool = True
) -> List[QCGeneratorGETResponse]:
if ids is None:
raise NotImplementedError()
return [_retrieve_qc_result(qc_calc_id, results) for qc_calc_id in ids]
@router.get("/" + settings.BEFLOW_QC_COMPUTE_PREFIX + "/{qc_calc_id}")
def get_qc_result(qc_calc_id: str, results: bool = True) -> QCGeneratorGETResponse:
return _retrieve_qc_result(qc_calc_id, results)
@router.post("/" + settings.BEFLOW_QC_COMPUTE_PREFIX)
def post_qc_result(body: QCGeneratorPOSTBody) -> QCGeneratorPOSTResponse:
if isinstance(body.input_schema, Torsion1DTask):
compute = worker.compute_torsion_drive
elif isinstance(body.input_schema, OptimizationTask):
compute = worker.compute_optimization
elif isinstance(body.input_schema, HessianTask):
compute = worker.compute_hessian
else:
raise NotImplementedError()
task = compute.delay(task_json=body.input_schema.json())
worker.redis_connection.hset("qcgenerator:types", task.id, body.input_schema.type)
return QCGeneratorPOSTResponse(
qc_calc_id=task.id, qc_calc_type=body.input_schema.type
)
@router.get("/" + settings.BEFLOW_QC_COMPUTE_PREFIX + "/{qc_calc_id}/image/molecule")
def get_qc_result_molecule_image(qc_calc_id: str):
task_info = get_task_information(worker.celery_app, qc_calc_id)
if task_info["status"] != "success":
return Response(IMAGE_UNAVAILABLE_SVG, media_type="image/svg+xml")
qc_result = parse_obj_as(
Union[TorsionDriveResult, OptimizationResult, AtomicResult], task_info["result"]
)
if isinstance(qc_result, (OptimizationResult, TorsionDriveResult)):
highlight_atoms = (
None
if isinstance(qc_result, OptimizationResult)
else tuple(i + 1 for i in qc_result.keywords["dihedrals"][0])
)
svg_content = smiles_to_image(
qc_result.initial_molecule.extras[
"canonical_isomeric_explicit_hydrogen_mapped_smiles"
],
highlight_atoms=highlight_atoms,
)
elif isinstance(qc_result, AtomicResult):
svg_content = smiles_to_image(
qc_result.molecule.extras[
"canonical_isomeric_explicit_hydrogen_mapped_smiles"
]
)
else:
raise NotImplementedError()
return Response(svg_content, media_type="image/svg+xml")
|
SimonBoothroyd/beflow
|
beflow/tests/services/test_gateway.py
|
import functools
from multiprocessing import Process
import pytest
from beflow.services import settings
from beflow.services.gateway import app, launch, wait_for_gateway
def test_default_routes_loaded():
found_routes = [router.path for router in app.routes]
assert all(
route in found_routes
for route in [
"/api/v1/fragmenter",
"/api/v1/optimizer",
"/api/v1/qc-calc",
"/api/v1/optimization",
]
)
@pytest.mark.parametrize("directory", [None, "."])
def test_launch(directory):
process = Process(target=functools.partial(launch, directory))
process.start()
wait_for_gateway()
process.terminate()
process.join()
def test_wait_for_gateway_timeout(monkeypatch):
monkeypatch.setattr(settings, "BEFLOW_GATEWAY_PORT", 111)
with pytest.raises(RuntimeError, match="The gateway could not be reached"):
wait_for_gateway(n_retries=1)
|
SimonBoothroyd/beflow
|
beflow/tests/utilities/test_celery.py
|
<reponame>SimonBoothroyd/beflow
import functools
import celery
import pytest
from celery import shared_task
from beflow.tests.mocking.celery import mock_celery_result
from beflow.utilities.celery import (
_spawn_worker,
configure_celery_app,
get_status,
get_task_information,
spawn_worker,
)
@shared_task
def mock_task_success():
return '{"key": "value"}'
@shared_task
def mock_task_error():
raise RuntimeError("mock error occured")
@pytest.mark.parametrize(
"task_result, expected_status",
[
(mock_celery_result(status="PENDING"), "waiting"),
(mock_celery_result(status="STARTED"), "running"),
(mock_celery_result(status="RETRY"), "running"),
(mock_celery_result(status="FAILURE"), "errored"),
(mock_celery_result(status="SUCCESS"), "success"),
],
)
def test_get_status(task_result, expected_status):
assert get_status(task_result) == expected_status
def test_configure_celery_app(redis_connection):
celery_app = configure_celery_app(
app_name="test-app-name",
redis_connection=redis_connection,
include=["beflow.services.fragmenter.worker"],
)
assert isinstance(celery_app, celery.Celery)
assert celery_app.main == "test-app-name"
assert celery_app.conf.task_track_started is True
assert celery_app.conf.task_default_queue == "test-app-name"
assert (
celery_app.backend.client.connection_pool.connection_kwargs["port"]
== redis_connection.connection_pool.connection_kwargs["port"]
)
def test_spawn_no_worker(celery_app):
assert spawn_worker(celery_app, concurrency=0) is None
@pytest.mark.parametrize(
"spawn_function",
[_spawn_worker, functools.partial(spawn_worker, asynchronous=False)],
)
def test_spawn_worker(spawn_function, celery_app, monkeypatch):
started = False
def mock_start(self):
nonlocal started
started = True
monkeypatch.setattr(celery_app.Worker, "start", mock_start)
spawn_function(celery_app, concurrency=1)
assert started
def test_get_task_information_success(celery_app, celery_worker):
task_result = mock_task_success.delay()
task_result.get(timeout=10)
task_info = get_task_information(celery_app, task_result.id)
assert task_info["id"] == task_result.id
assert task_info["status"] == "success"
assert task_info["error"] is None
assert task_info["result"] == {"key": "value"}
def test_get_task_information_error(celery_app, celery_worker):
task_result = mock_task_error.delay()
task_result.get(propagate=False, timeout=10)
task_info = get_task_information(celery_app, task_result.id)
assert task_info["id"] == task_result.id
assert task_info["status"] == "errored"
assert task_info["result"] is None
assert task_info["error"]["type"] == "RuntimeError"
assert task_info["error"]["message"] == "mock error occured"
assert task_info["error"]["traceback"] is not None
|
Bruck1701/Windhoek-Weather-Bot
|
SensorReader.py
|
<filename>SensorReader.py
import time
import serial
import sys
from datetime import datetime
import ast
class ArduinoSensor:
def __init__(self):
self.ser = serial.Serial('/dev/ttyACM0',9600, timeout = 5)
# listen for the input, exit if nothing received in timeout period
def ReadInput(self):
line=""
dataframe_columns=['Temp','Hum','Vis','IR']
date_hour=datetime.now()
while True:
line =self.ser.readline().decode("utf-8")
if len(line)!=0:
if(line[0]=='{'):
#print(line)
frame_dict=ast.literal_eval(line)
frame_dict['date']=date_hour
#print(dict(frame_dict))
return(frame_dict)
|
Bruck1701/Windhoek-Weather-Bot
|
LambdaFunctions/subscriberDDB.py
|
<filename>LambdaFunctions/subscriberDDB.py<gh_stars>1-10
import json
import boto3
import time
import os
from decimal import Decimal
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(os.environ['TABLE_NAME'])
def lambda_handler(event, context):
ts = time.time()
message_attrs = event['Records'][0]['Sns']['MessageAttributes']
print(ts)
print(message_attrs)
delta = 2592000 # ts+delta: 30 days from today
response = table.put_item(
Item={
'timestamp': Decimal(str(ts)),
'expire_on': Decimal(str(ts+delta)),
'datetime': message_attrs['datetime']['Value'],
'temp': Decimal(message_attrs['temp']['Value']),
'hum': Decimal(message_attrs['hum']['Value']),
'ir': Decimal(message_attrs['ir']['Value']),
'vis': Decimal(message_attrs['vis']['Value'])
}
)
return 0
|
Bruck1701/Windhoek-Weather-Bot
|
Publisher.py
|
<reponame>Bruck1701/Windhoek-Weather-Bot
import config
import tweepy
import datetime
from SensorReader import ArduinoSensor
import requests
import boto3
def getWeatherDataAPI():
temp_data={}
url = "https://api.openweathermap.org/data/2.5/weather?q=Windhoek,na&appid="+config.WEATHER_API
response = requests.get(url)
if "main" in response.json():
temp_data["temp"]= "{:.2f}".format((float(response.json()["main"]["temp"])- 273.15))
temp_data["pressure"] = response.json()["main"]["pressure"]
temp_data["humidity"] = response.json()["main"]["humidity"]
return temp_data
return None
if __name__ == "__main__":
arduino = ArduinoSensor()
data = arduino.ReadInput()
api_data = getWeatherDataAPI()
info = ""
degree_sign= u'\N{DEGREE SIGN}'
msg=f"[Windhoek/NA]: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\nSensor_Temp: {data['Temp']}{degree_sign}, Humidity: {data['Hum']}%\nCaptured_IR: {data['IR']}, Visible_Light: {data['Vis']}\n"
msg+=f"\nOpenWeather_data: Temp: {api_data['temp']}{degree_sign}, Hum: {api_data['humidity']}%, Press: {api_data['pressure']}"
client = boto3.client('sns')
response = client.publish(
TopicArn=config.TOPIC_ARN,
Message=msg,
Subject='Weather Data',
MessageAttributes={
'datetime': {
'DataType': 'String',
'StringValue': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
},
'temp': {
'DataType': 'String',
'StringValue': str(data['Temp'])
},
'hum': {
'DataType': 'String',
'StringValue': str(data['Hum'])
},
'ir': {
'DataType': 'String',
'StringValue': str(data['IR'])
},
'vis': {
'DataType': 'String',
'StringValue': str(data['Vis'])
}
}
)
print(response)
|
Bruck1701/Windhoek-Weather-Bot
|
LambdaFunctions/subscriberTwitter.py
|
<reponame>Bruck1701/Windhoek-Weather-Bot
import json
import boto3
import tweepy
ssm = boto3.client('ssm',region_name='us-east-1')
def lambda_handler(event, context):
params = ssm.get_parameters(Names=['/wwbot-app/dev/twitter/api_key',
'/wwbot-app/dev/twitter/api_secret_key',
'/wwbot-app/dev/twitter/access_token',
'/wwbot-app/dev/twitter/access_token_secret'], WithDecryption=True)['Parameters']
access_token = params[0]['Value']
access_token_secret = params[1]['Value']
api_key = params[2]['Value']
api_secret_key = params[3]['Value']
auth = tweepy.OAuthHandler(api_key, api_secret_key)
auth.set_access_token(access_token, access_token_secret)
con = tweepy.API(auth)
message = event['Records'][0]['Sns']['Message']
con.update_status("☁️ "+message)
return 0
|
piotrwolinski-intel/incubator-mxnet
|
tests/python/dnnl/subgraphs/test_fc_subgraph.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import pytest
from subgraph_common import check_fusion, check_neg_fusion, check_neg_fusion_quantized, check_quantize
from subgraph_common import CustomNormalInit, DATA_SHAPE, TailNegBlock
from mxnet.contrib import quantization
from mxnet.gluon import nn
from mxnet.test_utils import assert_almost_equal_with_err
fc_post_ops_list=['relu', 'sigmoid', 'log_sigmoid', 'mish', 'tanh', 'softrelu', 'gelu', 'elu', 'leaky',
'square', 'square_root', 'abs', 'exp', 'bounded_relu']
def test_float64_fallback():
dtype = 'float64'
net = nn.Dense(units=3, dtype=dtype)
in_data = mx.np.random.normal(size=[3,3,3,3], dtype=dtype)
net.initialize()
out = net(in_data)
out.wait_to_read()
assert in_data.dtype == out.dtype
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('flatten', [True, False])
def test_single_fc(data_shape, use_bias, flatten):
class SingleFC(nn.HybridBlock):
def __init__(self, use_bias, flatten, **kwargs):
super(SingleFC, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten)
def forward(self, x):
return self.fc(x)
attrs = {'fc': {}}
net = SingleFC(use_bias, flatten)
check_fusion(net, data_shape, attrs, check_quantization=flatten)
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('flatten', [True, False])
@pytest.mark.parametrize('out_type', ['int8', 'auto'])
@pytest.mark.parametrize('module', [mx.npx, mx.nd])
def test_fc_reshape(data_shape, use_bias, out_type, flatten, module):
class FC_Reshape(nn.HybridBlock):
def __init__(self, use_bias, flatten, **kwargs):
super(FC_Reshape, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten)
def forward(self, x):
out = self.fc(x)
if module == mx.npx:
attrs = {"newshape": (1,-1)}
else:
attrs = {"shape": (1,-1)}
out = out.as_nd_ndarray()
out = getattr(module, "reshape")(out, **attrs)
return out.as_np_ndarray()
net = FC_Reshape(use_bias, flatten)
check_quantize(net, data_shape, out_type, name='fc')
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('out_type', ['int8', 'auto'])
@pytest.mark.parametrize('module', [mx.np, mx.nd])
def test_fc_transpose(data_shape, use_bias, out_type, module):
class FC_Transpose(nn.HybridBlock):
def __init__(self, use_bias, **kwargs):
super(FC_Transpose, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias)
def forward(self, x):
out = self.fc(x)
if module == mx.nd:
out = out.as_nd_ndarray()
out = module.transpose(out)
return out.as_np_ndarray()
net = FC_Transpose(use_bias)
check_quantize(net, data_shape, out_type, name='fc')
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('flatten', [True, False])
@pytest.mark.parametrize('alg', fc_post_ops_list)
def test_fc_eltwise(data_shape, use_bias, flatten, alg):
# fc + eltwise fusion case
class FCEltwise(nn.HybridBlock):
def __init__(self, use_bias, flatten, alg, **kwargs):
super(FCEltwise, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten,
weight_initializer=CustomNormalInit(mean=0.5, sigma=0.1, bounded=True) if alg == 'square_root' else None)
#avoid calculating square root of negative values
self.alg = alg
def forward(self, x):
if self.alg == 'square_root':
x = abs(x)
fc_out = self.fc(x)
if self.alg in ['relu', 'sigmoid', 'log_sigmoid', 'mish', 'tanh', 'softrelu']:
out = mx.npx.activation(fc_out, act_type=self.alg)
elif self.alg in ['gelu', 'elu', 'leaky']:
out = mx.npx.leaky_relu(fc_out, act_type=self.alg)
elif self.alg == 'square':
out = mx.np.square(fc_out)
elif self.alg == 'square_root':
out = mx.np.sqrt(fc_out)
elif self.alg == 'abs':
out = mx.np.abs(fc_out)
elif self.alg == 'exp':
out = mx.np.exp(fc_out)
else:
out = mx.np.clip(fc_out, 0, 1.0)
return out
attrs = {'fc': {'with_eltwise': 'true'}}
net = FCEltwise(use_bias, flatten, alg)
check_fusion(net, data_shape, attrs, check_quantization=flatten)
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('flatten', [True, False])
def test_neg_fc_relu(data_shape, use_bias, flatten):
# fc + relu can't be fusion case
# eg.1
# fc -----------> relu
# |
# |
# ---------------> [custom op]
class NegFCReLU(nn.HybridBlock):
def __init__(self, use_bias, flatten, **kwargs):
super(NegFCReLU, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten)
self.act1 = nn.Activation('relu')
self.act2 = nn.Activation('sigmoid')
self.tail_neg = TailNegBlock()
def forward(self, x):
fc_out = self.fc(x)
return self.tail_neg(self.act1(fc_out), self.act2(fc_out))
attrs, excluded_attrs = [], []
net = NegFCReLU(use_bias, flatten)
check_neg_fusion(net, attrs, excluded_attrs, data_shape, name='fc')
@mx.util.use_np
@pytest.mark.parametrize('data_min,data_max,weight_min,weight_max', [
(-1, 1, 0, 0),
(-1, 1, -1e-6, +1e-6),
(0, 0, 1, 1),
(-1e-6, +1e-6, -1, 1),
(-1e-6, +1e-6, -1e-6, +1e-6),
(0, 0, 0, 0)
])
def test_quantized_fc_bias_overflow(data_min, data_max, weight_min, weight_max):
data_shape = (1, 32)
data_nd = mx.np.random.uniform(data_min, data_max, size=data_shape, device=mx.cpu())
weight_nd = mx.np.random.uniform(weight_min, weight_max, size=[64, 32], device=mx.cpu())
bias_nd = mx.np.random.uniform(-1, +1, size=[64], device=mx.cpu())
class FCBiasOverflow(nn.HybridBlock):
def __init__(self, dtype='float32', **kwargs):
super(FCBiasOverflow, self).__init__(**kwargs)
self.weight = mx.gluon.Parameter('weight', dtype=dtype, allow_deferred_init=True)
self.bias = mx.gluon.Parameter('bias', dtype=dtype, allow_deferred_init=True)
def forward(self, x):
conv1 = mx.npx.fully_connected(x, num_hidden=64, weight=self.weight.data(x.device),
no_bias=False, bias=self.bias.data(x.device))
return conv1
def infer_shape(self, x, *args):
self.weight.shape = (64, x.shape[x.ndim-1])
self.bias.shape = (64,)
net = FCBiasOverflow()
net.initialize()
net(data_nd) # dummy run
net.weight.data()[:] = weight_nd
net.bias.data()[:] = bias_nd
out = net(data_nd)
calib_data = mx.gluon.data.DataLoader(data_nd, batch_size=1)
qnet = quantization.quantize_net(net,
device=mx.cpu(),
exclude_layers=None,
exclude_operators=None,
quantized_dtype='int8',
calib_mode='naive',
calib_data=calib_data,
num_calib_batches=1,
quantize_mode='full')
out_quantized = qnet(data_nd)
assert_almost_equal_with_err(out.asnumpy(), out_quantized.asnumpy(),
rtol=1e-2, atol=1e-2, etol=0.01)
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('flatten', [True, False])
def test_fc_int8_and_fp32_outputs(data_shape, flatten):
# /---> Quantizable op
# Input ---> FC -|
# \---> Non quantizable op
class MultiOutputFC(nn.HybridBlock):
def __init__(self, **kwargs):
super(MultiOutputFC, self).__init__(**kwargs)
self.dense0 = nn.Dense(64, flatten=flatten)
self.dense1 = nn.Dense(64, flatten=flatten)
def forward(self, x):
x = self.dense0(x)
y = self.dense1(x) # quantizable
z = mx.npx.softmax(x) # non quantizable
return y + z
attrs = {'fc': {}}
net = MultiOutputFC()
check_fusion(net, data_shape, attrs, check_quantization=flatten)
@mx.util.use_np
@pytest.mark.parametrize('identity_node', ['dropout', 'copy'])
def test_fc_identity_eltwise(identity_node):
class FCIdentityEltwise(nn.HybridBlock):
def __init__(self, identity_node, **kwargs):
super(FCIdentityEltwise, self).__init__(**kwargs)
self.fc1 = nn.Dense(units=64, use_bias=False, weight_initializer=None, flatten=True)
self.fc2 = nn.Dense(units=64, use_bias=False, weight_initializer=None, flatten=True)
self.identity_node = identity_node
def forward(self, x):
out = self.fc1(x)
if self.identity_node == 'copy':
out = mx.np.copy(out)
else:
out = mx.npx.dropout(out)
out = mx.npx.activation(out, act_type='relu')
out = self.fc2(out)
if self.identity_node == 'copy':
out = mx.np.copy(out)
else:
out = mx.npx.dropout(out)
out = mx.npx.activation(out, act_type='relu')
return out
data_shape = (64, 4, 10, 10)
attrs = {'sg_onednn_fully_connected_eltwise_0' : {'with_eltwise': 'true'},
'sg_onednn_fully_connected_eltwise_1' : {'with_eltwise': 'true'}}
net = FCIdentityEltwise(identity_node)
check_fusion(net, data_shape, attrs, check_quantization=False)
def function_fc_add(data_shape, add_op, quantize_mode, fc_out_add, flatten, relu, out_type):
class FCWithSumExample(nn.HybridBlock):
def __init__(self, num_hidden, add_op, fc_out_add, **kwargs):
super(FCWithSumExample, self).__init__(**kwargs)
self.fca = nn.Dense(units=num_hidden, flatten=flatten)
self.elemwise_add = (add_op == 'elemwise_add')
self.fc_out_as_rhs = (fc_out_add == 'rhs')
self.relu = (relu == 'leaky_relu')
def forward(self, data1a, data2):
fc_out = self.fca(data1a)
if self.relu:
fc_out = mx.npx.leaky_relu(fc_out, act_type='gelu')
if self.fc_out_as_rhs:
if self.elemwise_add:
sum1 = mx.nd.elemwise_add(data2.as_nd_ndarray(), fc_out.as_nd_ndarray()).as_np_ndarray()
else:
sum1 = data2 + fc_out
else:
if self.elemwise_add:
sum1 = mx.nd.elemwise_add(fc_out.as_nd_ndarray(), data2.as_nd_ndarray()).as_np_ndarray()
else:
sum1 = fc_out + data2
return sum1
attrs = {'fc': {'with_sum': 'true'}}
if quantize_mode is not None:
attrs['fc']['quantized'] = 'true'
if quantize_mode == 'smart':
attrs['fc']['enable_float_output'] = 'true'
num_hidden=10
net = FCWithSumExample(num_hidden, add_op, fc_out_add)
if flatten:
data_shapes = [data_shape, (data_shape[0], num_hidden)]
else:
data_shapes = [data_shape, (*data_shape[0:-1], num_hidden)]
check_fusion(net, data_shapes, attrs,
out_types=[out_type],
check_fp32_fusion=(quantize_mode is None),
check_quantization=(quantize_mode is not None) and flatten,
quantize_mode=quantize_mode)
@mx.util.use_np
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('relu', ['noleaky_re', 'leaky_relu'])
@pytest.mark.parametrize('flatten', ['flat', 'nofl'])
@pytest.mark.parametrize('fc_out_add', ['lhs', 'rhs'])
@pytest.mark.parametrize('add_op', ['elemwise_add'])
def test_fc_add(data_shape, add_op, fc_out_add, flatten, relu):
function_fc_add(data_shape, add_op, None, fc_out_add, flatten=='flat', relu, None)
@mx.util.use_np
@pytest.mark.seed(1234) # Seed set because the test is not robust enough to operate on random data
@pytest.mark.parametrize('data_shape', DATA_SHAPE)
@pytest.mark.parametrize('quantize_mode', ['full', 'smart'])
@pytest.mark.parametrize('out_type', ['int8', 'auto'])
@pytest.mark.parametrize('fc_out_add', ['lhs', 'rhs'])
@pytest.mark.parametrize('add_op', ['elemwise_add'])
def test_fc_add_quantized(data_shape, add_op, quantize_mode, fc_out_add, out_type):
function_fc_add(data_shape, add_op, quantize_mode, fc_out_add, True, 'noleaky_re', out_type)
class NegFCAdd(nn.HybridBlock):
#
# data --------------------------> 'add_op' ------------>
# / \
# sg_oned_dnn_fully_connected ----> npi_add -->
# \ /
# npi_multiply_scalar -->
def __init__(self, num_hidden, add_op, fc_out_add, scaled_fc_out, flatten, **kwargs):
super(NegFCAdd, self).__init__(**kwargs)
self.fca = nn.Dense(units=num_hidden, flatten=flatten)
self.elemwise_add = (add_op == 'elemwise_add')
self.fc_out_as_rhs = (fc_out_add == 'rhs')
self.scaled_fc_out_as_rhs = (scaled_fc_out == 's_rhs')
def forward(self, data1a, data2):
fc_out = self.fca(data1a)
scaled_fc_out = fc_out * 200.0
if self.fc_out_as_rhs:
if self.elemwise_add:
sum1 = mx.nd.elemwise_add(data2.as_nd_ndarray(), fc_out.as_nd_ndarray()).as_np_ndarray()
else:
sum1 = data2 + fc_out
else:
if self.elemwise_add:
sum1 = mx.nd.elemwise_add(fc_out.as_nd_ndarray(), data2.as_nd_ndarray()).as_np_ndarray()
else:
sum1 = fc_out + data2
if self.scaled_fc_out_as_rhs:
sum2 = sum1 + scaled_fc_out
else:
sum2 = scaled_fc_out + sum1
return sum2
@mx.util.use_np
@pytest.mark.parametrize('add_op', ['elemwise_add'])
@pytest.mark.parametrize('data_shape', [DATA_SHAPE[0]])
@pytest.mark.parametrize('flatten', ['flat', 'nofl'])
@pytest.mark.parametrize('fc_out_add', ['lhs', 'rhs'])
@pytest.mark.parametrize('scaled_fc_out', ['s_lhs', 's_rhs'])
def test_neg_fc_add(data_shape, add_op, flatten, fc_out_add, scaled_fc_out):
'''
Test if FullyConnected operator which output is not used for only one 'add_op' input is not fused.
See NegFCAdd for used graph example
'''
flatten = (flatten == 'flat')
num_hidden = 10
net = NegFCAdd(num_hidden, add_op, fc_out_add, scaled_fc_out, flatten)
if flatten:
data_shapes = [data_shape, (data_shape[0], num_hidden)]
else:
data_shapes = [data_shape, (*data_shape[0:-1], num_hidden)]
attrs = []
excluded_attrs = ['with_sum']
check_neg_fusion(net, attrs, excluded_attrs, data_shapes, name='fc')
@mx.util.use_np
@pytest.mark.parametrize('add_op', ['elemwise_add'])
@pytest.mark.parametrize('data_shape', [DATA_SHAPE[1]])
@pytest.mark.parametrize('fc_out_add', ['lhs', 'rhs'])
@pytest.mark.parametrize('scaled_fc_out', ['s_lhs', 's_rhs'])
def test_neg_fc_add_quantized(data_shape, add_op, fc_out_add, scaled_fc_out):
'''
Test if FullyConnected operator which output is not used for only one 'add_op' input
is not fused for quantized model.
See NegFCAdd for used graph example.
'''
num_hidden = 10
net = NegFCAdd(num_hidden, add_op, fc_out_add, scaled_fc_out, True)
data_shapes = [data_shape, (data_shape[0], num_hidden)]
attrs = []
excluded_attrs = ['with_sum']
check_neg_fusion_quantized(net, attrs, excluded_attrs, data_shapes, name='fc')
|
piotrwolinski-intel/incubator-mxnet
|
tests/python/gpu/test_amp.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from pathlib import Path
curr_path = Path(__file__).resolve().parent
sys.path.insert(0, str(curr_path.parent))
sys.path.insert(0, str(curr_path.parent/'unittest'))
import mxnet as mx
import pytest
from mxnet import amp
from mxnet.test_utils import set_default_device
from mxnet.gluon import nn, rnn
import amp.common as amp_common_tests
from common import assert_raises_cudnn_not_satisfied
AMP_DTYPE = 'float16'
set_default_device(mx.gpu(0))
def test_fp16_coverage():
amp_common_tests.test_amp_coverage(AMP_DTYPE, 'FP16')
@mx.util.use_np
def test_fp16_basic_use():
amp_common_tests.test_amp_basic_use(AMP_DTYPE)
@mx.util.use_np
def test_fp16_offline_casting():
amp_common_tests.test_amp_offline_casting(AMP_DTYPE)
@mx.util.use_np
def test_fp16_offline_casting_shared_params():
amp_common_tests.test_amp_offline_casting_shared_params(AMP_DTYPE)
@mx.util.use_np
def test_fp16_fp32_ops_order_independence():
amp_common_tests.test_lp16_fp32_ops_order_independence(AMP_DTYPE)
@mx.util.use_np
def test_fp16_test_node_excluding():
amp_common_tests.test_amp_node_excluding(AMP_DTYPE)
@pytest.mark.skip(reason='Error during waitall(). Tracked in #18099')
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_amp_conversion_rnn(amp_tests):
with mx.Device(mx.gpu(0)):
model = nn.HybridSequential()
model.add(rnn.LSTM(hidden_size=10, num_layers=2, bidirectional=True))
model.add(nn.Dense(2))
model.initialize()
model.hybridize()
out = model(mx.nd.ones((2, 3, 4)))
new_model = amp.convert_hybrid_block(model)
out2 = new_model(mx.nd.ones((2, 3, 4)))
mx.test_utils.assert_almost_equal(out.asnumpy(), out2.asnumpy(), atol=1e-2, rtol=1e-2)
|
piotrwolinski-intel/incubator-mxnet
|
tests/python/amp/common.py
|
<reponame>piotrwolinski-intel/incubator-mxnet<filename>tests/python/amp/common.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import warnings
import collections
import mxnet as mx
from mxnet import amp
from mxnet.gluon import nn
from mxnet.operator import get_all_registered_operators_grouped
def test_amp_coverage(lp_dtype, lp_name):
conditional = [item[0] for item in amp.list_conditional_fp32_ops(lp_dtype)]
lp16_ops = amp.list_lp16_ops(lp_dtype)
lp16_fp32_ops = amp.list_lp16_fp32_ops(lp_dtype)
fp32_ops = amp.list_fp32_ops(lp_dtype)
widest_ops = amp.list_widest_type_cast(lp_dtype)
all_lp_lists = [lp16_ops, lp16_fp32_ops, fp32_ops, widest_ops, conditional]
# Check for duplicates
for op_list in all_lp_lists:
ret = [op for op, count in collections.Counter(op_list).items() if count > 1]
assert ret == [], "Elements " + str(ret) + " are duplicated in the AMP lists."
all_lp_ops = [op for op_list in all_lp_lists for op in op_list]
ret = [op for op, count in collections.Counter(all_lp_ops).items() if count > 1]
assert ret == [], "Elements " + str(ret) + " exist in more than 1 AMP list."
# Check the coverage
covered_ops = set(all_lp_ops)
all_mxnet_ops = get_all_registered_operators_grouped()
required_ops = {op for op in all_mxnet_ops if not "backward" in op}
extra_ops = covered_ops - required_ops
assert not extra_ops, f"{len(extra_ops)} operators are not needed in the AMP lists: {sorted(extra_ops)}"
guidelines = f"""Please follow these guidelines for choosing a proper list:
- if your operator is not to be used in a computational graph
(e.g. image manipulation operators, optimizers) or does not have
inputs, put it in {lp_name.upper()}_FP32_FUNCS list,
- if your operator requires FP32 inputs or is not safe to use with lower
precision, put it in FP32_FUNCS list,
- if your operator supports both FP32 and lower precision, has
multiple inputs and expects all inputs to be of the same
type, put it in WIDEST_TYPE_CASTS list,
- if your operator supports both FP32 and lower precision and has
either a single input or supports inputs of different type,
put it in {lp_name.upper()}_FP32_FUNCS list,
- if your operator is both safe to use in lower precision and
it is highly beneficial to use it in lower precision, then
put it in {lp_name.upper()}_FUNCS (this is unlikely for new operators)
- If you are not sure which list to choose, FP32_FUNCS is the
safest option"""
missing_ops = required_ops - covered_ops
if len(missing_ops) > 0:
warnings.warn(f"{len(missing_ops)} operators {sorted(missing_ops)} do not exist in AMP lists "
f"(in python/mxnet/amp/lists/symbol_{lp_name.lower()}.py) - please add them. \n{guidelines}")
def test_amp_basic_use(lp_dtype):
class TestNet(nn.HybridBlock):
def __init__(self):
super().__init__()
self.fc1 = nn.Dense(4)
self.fc2 = nn.Dense(4)
def forward(self, x):
x = self.fc1(x)
x = self.fc2(x)
return x.reshape((-1, 2, 2))
data_example = mx.np.random.uniform(-1, 1, (4, 4))
net = TestNet()
net.initialize()
net = amp.convert_hybrid_block(net, data_example, lp_dtype)
lp16_casts = 1 # cast for network input
lp16_casts += 2 # cast for weights and bias of `fc1`
lp16_casts += 2 # cast for weights and bias of `fc2`
other_casts = 1 # cast for the network output (from lp16 to f32)
lp16_tensors = 1 # cast network input
lp16_tensors += 3 # cast weights and bias of `fc1`, `fc1` output
lp16_tensors += 3 # cast weights and bias of `fc2`, `fc2` output
lp16_tensors += 1 # reshape output
check_amp_net_stats(lp_dtype, net, data_example, lp16_tensors_num=lp16_tensors, lp16_casts_num=lp16_casts,
other_casts_num=other_casts)
def test_amp_offline_casting(lp_dtype):
class TestNet(nn.HybridBlock):
def __init__(self):
super().__init__()
self.lp16_op1 = nn.Conv2D(4, 3)
self.lp16_op2 = nn.Conv2DTranspose(4, 3)
self.fp32_op = nn.Dense(4)
def forward(self, x):
x = self.lp16_op1(x)
x = self.lp16_op2(x)
x = x.reshape(x.shape[0], -1)
with nn.HybridBlock.OptConstraint.disable_amp():
x = self.fp32_op(x)
return x
net = TestNet()
net.initialize()
data_example = mx.np.random.uniform(-1, 1, (4, 3, 16, 16))
lp_net = amp.convert_hybrid_block(net, data_example, lp_dtype, cast_params_offline=True)
check_amp_net_stats(lp_dtype, lp_net, data_example, lp16_tensors_num=4,
lp16_casts_num=1, other_casts_num=1)
for name, data in lp_net.collect_params().items():
assert mx.nd.get_dtype_name(data.dtype) == ('float32' if 'fp32_op' in name else lp_dtype)
def test_amp_offline_casting_shared_params(lp_dtype):
COMMON_SIZE = 4
class TestNet(nn.HybridBlock):
def __init__(self):
super().__init__()
self.lp16_op1 = nn.Dense(COMMON_SIZE)
self.lp16_op2 = nn.Dense(COMMON_SIZE)
self.lp16_op2.share_parameters({'weight': self.lp16_op1.weight})
self.fp32_op = nn.Dense(COMMON_SIZE)
self.fp32_op.share_parameters({'bias': self.lp16_op2.bias})
def forward(self, x):
x = self.lp16_op1(x)
x1 = self.lp16_op2(x)
with nn.HybridBlock.OptConstraint.disable_amp():
x2 = self.fp32_op(x)
x = mx.np.concat((x1, x2), axis=1)
return x
net = TestNet()
net.initialize()
data_example = mx.np.random.uniform(-1, 1, (4, COMMON_SIZE))
lp_net = amp.convert_hybrid_block(net, data_example, lp_dtype, cast_params_offline=True)
check_amp_net_stats(lp_dtype, lp_net, data_example, lp16_tensors_num=4,
lp16_casts_num=2, other_casts_num=2)
for name, data in lp_net.collect_params().items():
assert mx.nd.get_dtype_name(data.dtype) == ('float32' if 'fp32_op' in name else lp_dtype)
def test_lp16_fp32_ops_order_independence(lp_dtype):
class TestNet(nn.HybridBlock):
def __init__(self, lp16_fp32_is_first):
super().__init__()
if lp16_fp32_is_first:
self.first = mx.npx.batch_flatten # lp16_fp32_op
self.second = nn.Dense(4)
else:
self.first = nn.Dense(4)
self.second = mx.npx.batch_flatten # lp16_fp32_op
def forward(self, x):
x = 2**x
x1 = self.first(x)
x2 = self.second(x)
return x1, x2
data_example = mx.np.random.uniform(-1, 1, (4, 16))
for lp16_fp32_is_second in [False, True]:
net = TestNet(lp16_fp32_is_second)
net.initialize()
net = amp.convert_hybrid_block(net, data_example, lp_dtype, cast_params_offline=True)
check_amp_net_stats(lp_dtype, net, data_example, lp16_tensors_num=3,
lp16_casts_num=1, other_casts_num=2)
def test_amp_node_excluding(lp_dtype):
DISABLE_AMP_ATTR_DICT = {'__opt_constraint__': str(
mx.gluon.HybridBlock.OptConstraint.Flag.DisableAMP.value)}
data = mx.sym.var('data')
wei = mx.sym.var('weights')
bias = mx.sym.var('bias')
# manually excluded
fc1 = mx.sym.FullyConnected(data, wei, bias, num_hidden=4, name='fc1', attr=DISABLE_AMP_ATTR_DICT)
# to be excluded using the conversion API
fc2 = mx.sym.FullyConnected(data, wei, bias, num_hidden=4, name='fc2')
symnet = mx.sym.Group([fc1, fc2])
net = mx.gluon.SymbolBlock(symnet, [data])
net.initialize()
# exclude only nodes with set attribute (only 1 node - `fc1`)
data_example = mx.np.random.uniform(-1, 1, (4, 16))
net_1_excluded = amp.convert_hybrid_block(net, data_example, lp_dtype)
lp16_tensors = 4 # cast `data`, weights and bias of `fc1`, `fc1` output
lp16_casts = 3 # `data` cast, casts for weights and bias of `fc1`
other_casts = 1 # cast for the network output (from lp16 to f32)
check_amp_net_stats(lp_dtype, net_1_excluded, data_example, lp16_tensors_num=lp16_tensors,
lp16_casts_num=lp16_casts, other_casts_num=other_casts)
# exclude using the `excluded_sym_names` argument (both nodes)
net_2_excluded = amp.convert_hybrid_block(net, data_example, lp_dtype,
excluded_sym_names=['fc1', 'fc2'])
check_amp_net_stats(lp_dtype, net_2_excluded, data_example, lp16_tensors_num=0,
lp16_casts_num=0, other_casts_num=0)
def check_amp_net_stats(lp_dtype, net, data_example, lp16_tensors_num, lp16_casts_num, other_casts_num):
lp16_tensors = set()
lp16_casts = set()
other_casts = set()
def inspect_output(tensor_name, op_name, tensor):
dtype = mx.nd.get_dtype_name(tensor.dtype)
if op_name == 'amp_cast':
if dtype == lp_dtype:
lp16_casts.add(tensor_name)
else:
other_casts.add(tensor_name)
if dtype == lp_dtype:
lp16_tensors.add(tensor_name)
net.register_op_hook(inspect_output)
net(data_example)
assert len(lp16_tensors) == lp16_tensors_num, f'Bad lp16 tensors! Present tensors: {sorted(lp16_tensors)}'
assert len(lp16_casts) == lp16_casts_num, f'Bad lp16 casts! Present casts: {sorted(lp16_casts)}'
assert len(other_casts) == other_casts_num, f'Bad casts! Present casts: {sorted(other_casts)}'
|
piotrwolinski-intel/incubator-mxnet
|
python/mxnet/amp/lists/symbol_bf16.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
"""Lists of functions whitelisted/blacklisted for automatic mixed precision in symbol API."""
from ...runtime import Features
# Functions that should be cast to lower precision
BF16_FUNCS = [
'Convolution',
'Deconvolution',
'FullyConnected'
]
if Features.instance.is_enabled('ONEDNN'):
BF16_FUNCS.extend([
'_sg_onednn_conv',
'_sg_onednn_fully_connected',
'_sg_onednn_selfatt_qk',
'_sg_onednn_selfatt_valatt'
])
# Functions that should not be casted, either because
# they are irrelevant (not used in the network itself
# like image transformations or optimizers) or they
# are dtype neutral (can work in both bf16 and fp32)
BF16_FP32_FUNCS = [
'abs',
'BatchNorm',
'clip',
'Flatten',
'LRN',
'Pooling',
'relu',
'_shuffle',
'sqrt',
'square',
'tanh',
'_contrib_quantize_v2',
]
# Functions that when running with Bfloat16, the params that still need float32.
BF16_USE_FP32_PARAMS = {
'BatchNormWithReLU': ["", "gamma", "beta", "moving_mean", "moving_var"],
'BatchNorm': ["", "gamma", "beta", "moving_mean", "moving_var"],
}
# Functions that have to be cast to FP32 due to possible
# overflows
FP32_FUNCS = [
'RNN',
'BilinearSampler',
'BlockGrad',
'Cast',
'cast_storage',
'Crop',
'Dropout',
'Embedding',
'GridGenerator',
'Pad',
'ROIPooling',
'Reshape',
'SequenceLast',
'SequenceMask',
'SequenceReverse',
'SliceChannel',
'SpatialTransformer',
'SwapAxis',
'UpSampling',
'_CachedOp',
'_CrossDeviceCopy',
'_CustomFunction',
'_NoGradient',
'_adamw_update',
'_arange',
'_cond',
'_contrib_interleaved_matmul_selfatt_qk',
'_contrib_interleaved_matmul_selfatt_valatt',
'_contrib_AdaptiveAvgPooling2D',
'_contrib_BilinearResize2D',
'_contrib_bipartite_matching',
'_contrib_dequantize',
'_contrib_div_sqrt_dim',
'_contrib_boolean_mask',
'_contrib_getnnz',
'_contrib_gradientmultiplier',
'_contrib_group_adagrad_update',
'_contrib_index_array',
'_contrib_index_copy',
'_contrib_quadratic',
'_contrib_quantize',
'_contrib_quantize_asym',
'_contrib_quantized_concat',
'_contrib_quantized_conv',
'_contrib_quantized_flatten',
'_contrib_quantized_fully_connected',
'_contrib_quantized_pooling',
'_contrib_quantized_elemwise_add',
'_contrib_quantized_act',
'_contrib_quantized_rnn',
'_image_crop',
'_linspace',
'_contrib_requantize',
'_copy',
'_copyto',
'_cvcopyMakeBorder',
'_cvimdecode',
'_cvimread',
'_cvimresize',
'_div_scalar',
'_equal_scalar',
'_eye',
'_foreach',
'_while_loop',
'_full',
'_grad_add',
'_greater_scalar',
'_greater_equal_scalar',
'_histogram',
'_identity_with_attr_like_rhs',
'_image_adjust_lighting',
'_image_flip_left_right',
'_image_flip_top_bottom',
'_image_normalize',
'_image_random_brightness',
'_image_random_color_jitter',
'_image_random_contrast',
'_image_random_flip_left_right',
'_image_random_flip_top_bottom',
'_image_random_hue',
'_image_random_lighting',
'_image_random_saturation',
'_image_resize',
'_image_to_tensor',
'_imdecode',
'_lesser_scalar',
'_lesser_equal_scalar',
'_logical_and_scalar',
'_logical_or_scalar',
'_logical_xor_scalar',
'_maximum_scalar',
'_minimum_scalar',
'_minus_scalar',
'_mod_scalar',
'_mp_adamw_update',
'_mul_scalar',
'_not_equal_scalar',
'_onehot_encode',
'_ones',
'_plus_scalar',
'_random_exponential',
'_random_exponential_like',
'_random_gamma',
'_random_gamma_like',
'_random_generalized_negative_binomial',
'_random_generalized_negative_binomial_like',
'_random_negative_binomial',
'_random_negative_binomial_like',
'_random_normal',
'_random_normal_like',
'_random_poisson',
'_random_poisson_like',
'_random_randint',
'_random_uniform',
'_random_uniform_like',
'_ravel_multi_index',
'_rminus_scalar',
'_rmod_scalar',
'_rnn_param_concat',
'_sample_exponential',
'_sample_gamma',
'_sample_generalized_negative_binomial',
'_sample_multinomial',
'_sample_negative_binomial',
'_sample_normal',
'_sample_poisson',
'_sample_uniform',
'_sample_unique_zipfian',
'_scatter_set_nd',
'_set_value',
'_slice_assign',
'_slice_assign_scalar',
'_sparse_adagrad_update',
'_sparse_retain',
'_split_v2',
'_unravel_index',
'_zeros',
'_zeros_without_dtype',
'adam_update',
'all_finite',
# 'amp_cast',
# 'amp_multicast',
'arccosh',
'arcsinh',
'arctan',
'argmax',
'argmax_channel',
'argmin',
'batch_take',
'broadcast_axis',
'broadcast_like',
'broadcast_to',
'cbrt',
'ceil',
'cos',
'degrees',
'depth_to_space',
'diag',
'erf',
'expand_dims',
'fill_element_0index',
'fix',
'floor',
'ftml_update',
'ftrl_update',
'gather_nd',
'hard_sigmoid',
'logical_not',
'log_sigmoid',
'max',
'min',
'mish',
'mp_sgd_mom_update',
'mp_sgd_update',
'multi_all_finite',
'multi_mp_sgd_mom_update',
'multi_mp_sgd_update',
'multi_sgd_mom_update',
'multi_sgd_update',
'negative',
'one_hot',
'ones_like',
'pick',
'radians',
'repeat',
'reshape_like',
'reverse',
'rint',
'rmsprop_update',
'rmspropalex_update',
'round',
'scatter_nd',
'sgd_mom_update',
'sgd_update',
'shape_array',
'sigmoid',
'sign',
'signsgd_update',
'signum_update',
'sin',
'size_array',
'slice',
'slice_axis',
'slice_like',
'softsign',
'sort',
'space_to_depth',
'squeeze',
'take',
'tile',
'transpose',
'trunc',
'zeros_like',
'broadcast_mul',
'IdentityAttachKLSparseReg',
'arccos',
'arcsin',
'cosh',
'erfinv',
'sinh',
'tan',
'arctanh',
# Exponents
'exp',
'expm1',
'log',
'log10',
'log2',
'log1p',
# Powers
'broadcast_power',
'reciprocal',
'_rdiv_scalar',
'rsqrt',
'rcbrt',
'_power',
'_power_scalar',
'_rpower_scalar',
'_hypot',
'_hypot_scalar',
'broadcast_hypot',
'_square_sum',
'_contrib_hawkesll',
# Reductions
'sum',
'nansum',
'prod',
'nanprod',
'mean',
'norm',
'softmin',
'khatri_rao',
'moments',
# Misc
'gamma',
'gammaln',
'_linalg_gelqf',
'_linalg_gemm',
'_linalg_gemm2',
'_linalg_potrf',
'_linalg_potri',
'_linalg_sumlogdiag',
'_linalg_syevd',
'_linalg_syrk',
'_linalg_trmm',
'_linalg_trsm',
'_linalg_makediag',
'_linalg_extractdiag',
'_linalg_maketrian',
'_linalg_extracttrian',
'_linalg_inverse',
'_linalg_det',
'_linalg_slogdet',
'_NDArray',
'_Native',
'_contrib_count_sketch',
'_contrib_SyncBatchNorm',
'_contrib_fft',
'argsort',
'topk',
# Neural network
'softmax',
'log_softmax',
'masked_softmax',
'masked_log_softmax',
'InstanceNorm',
'LayerNorm',
'GroupNorm',
'L2Normalization',
'SoftmaxActivation',
'softmax_cross_entropy',
'smooth_l1',
'MakeLoss',
'make_loss',
'Custom',
'CTCLoss',
'_npx_deformable_convolution',
'_contrib_DeformablePSROIPooling',
]
# Functions that have to be cast to FP32 only for
# some values of their parameters
CONDITIONAL_FP32_FUNCS = [
('Activation', 'act_type', ['softrelu']),
('LeakyReLU', 'act_type', ['elu', 'selu']),
]
# Functions with multiple inputs, that need the same
# type of all their inputs
WIDEST_TYPE_CASTS = [
'_npi_add',
'Concat',
'_equal',
'_greater',
'_greater_equal',
'_lesser',
'_lesser_equal',
'_logical_and',
'_logical_or',
'_logical_xor',
'_maximum',
'_minimum',
'_mod',
'_not_equal',
'Correlation',
'add_n',
'batch_dot',
'broadcast_add',
'broadcast_div',
'broadcast_equal',
'broadcast_greater',
'broadcast_greater_equal',
'broadcast_lesser',
'broadcast_lesser_equal',
'broadcast_logical_and',
'broadcast_logical_or',
'broadcast_logical_xor',
'broadcast_maximum',
'broadcast_minimum',
'broadcast_mod',
'broadcast_not_equal',
'broadcast_sub',
'dot',
'elemwise_add',
'elemwise_div',
'elemwise_mul',
'elemwise_sub',
'stack',
'_contrib_MultiBoxDetection',
'_contrib_MultiBoxPrior',
'_contrib_MultiBoxTarget',
'_contrib_MultiProposal',
'_contrib_PSROIPooling',
'_contrib_Proposal',
'_contrib_ROIAlign',
'_contrib_box_iou',
'_contrib_box_nms',
'_contrib_dgl_adjacency',
'_contrib_dgl_csr_neighbor_non_uniform_sample',
'_contrib_dgl_csr_neighbor_uniform_sample',
'_contrib_dgl_graph_compact',
'_contrib_dgl_subgraph',
'_contrib_edge_id',
'where',
'_random_pdf_gamma',
'_random_pdf_exponential',
'_random_pdf_uniform',
'_random_pdf_negative_binomial',
'_random_pdf_generalized_negative_binomial',
'_random_pdf_dirichlet',
'_random_pdf_normal',
'_random_pdf_poisson',
]
LOSS_OUTPUT_FUNCTIONS = [
]
|
piotrwolinski-intel/incubator-mxnet
|
tests/python/dnnl/subgraphs/test_amp_subgraph.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import mxnet as mx
from mxnet import amp
from mxnet.gluon import nn
from mxnet.test_utils import assert_almost_equal
from subgraph_common import SG_PASS_NAME, QUANTIZE_SG_PASS_NAME
from test_matmul_subgraph import MultiHeadAttention
import sys
from pathlib import Path
curr_path = Path(__file__).resolve().parent
sys.path.insert(0, str(curr_path.parent.parent))
from amp.common import check_amp_net_stats
AMP_SG_PASS_NAME = '<PASSWORD>'
AMP_DTYPE = 'bfloat16'
# Checks if amp (after the AMP_SG_PASS_NAME fuse) changes the name of tensors for calibration
def check_amp_with_quantization(net, data_example, quantized_nodes):
net.optimize_for(data_example, backend=QUANTIZE_SG_PASS_NAME)
symnet = net.export(None)[0]
nodes = {n['name'] for n in json.loads(symnet.tojson())['nodes'] if n['op'] != 'null'}
quant_excluded_nodes = list(nodes - set(quantized_nodes))
_, calib_tensors1 = mx.contrib.quantization._quantize_symbol(
symnet, mx.current_context(), excluded_symbols=quant_excluded_nodes)
lp_net = amp.convert_hybrid_block(net, data_example, target_dtype=AMP_DTYPE,
excluded_sym_names=quantized_nodes, cast_params_offline=True,
device=mx.current_context())
lp_net.optimize_for(data_example, backend=AMP_SG_PASS_NAME)
lp_symnet = lp_net.export(None, remove_amp_cast=False)[0]
_, calib_tensors2 = mx.contrib.quantization._quantize_symbol(
lp_symnet, mx.cpu(), excluded_symbols=quant_excluded_nodes)
assert calib_tensors1 == calib_tensors2
def same_graph_structure(symnet_observed, symnet_expected, expected):
nodes_obs = json.loads(symnet_observed.tojson(remove_amp_cast=False))['nodes']
nodes_exp = json.loads(symnet_expected.tojson(remove_amp_cast=False))['nodes']
assert (len(nodes_obs) == len(nodes_exp)) == expected
for node_obs, node_exp in zip(nodes_obs, nodes_exp):
if node_obs['op'] != node_exp['op'] or node_obs['inputs'] != node_exp['inputs']:
assert expected == False
break
def check_amp_fuse(net, data_example, expected_sym=None, quantized_nodes=[], rtol=0.05):
net.hybridize()
out_ref = net(*data_example)
net.optimize_for(data_example, backend=SG_PASS_NAME) # amp pass works only on oneDNN nodes
lp_net = amp.convert_hybrid_block(net, data_example, target_dtype=AMP_DTYPE,
excluded_sym_names=quantized_nodes, cast_params_offline=True,
device=mx.current_context())
lp_net.optimize_for(data_example, backend=AMP_SG_PASS_NAME)
out_lp_net = lp_net(*data_example)
# check outputs
out_ref = [out_ref] if not isinstance(out_ref, list) else out_ref
out_lp_net = [out_lp_net] if not isinstance(out_ref, list) else out_lp_net
for ref_out, lp_out in zip(out_ref, out_lp_net):
assert_almost_equal(ref_out, lp_out, rtol=rtol, atol=1.0)
# check graph
if expected_sym is not None:
lp_symnet = lp_net.export(None, remove_amp_cast=False)[0]
same_graph_structure(lp_symnet, expected_sym, True)
# check amp with quantization
check_amp_with_quantization(net, data_example, quantized_nodes)
@mx.util.use_np
def test_amp_fc():
class TestNet(nn.HybridBlock):
def __init__(self):
super(TestNet, self).__init__()
self.fc1 = nn.Dense(16)
self.fc2 = nn.Dense(16)
def forward(self, x):
x = self.fc1(x)
x = self.fc2(x)
return x
net = TestNet()
net.initialize()
exp_data = mx.symbol.Variable('data')
exp_weight = [mx.symbol.Variable('weight{}'.format(i)) for i in range(2)]
exp_bias = [mx.symbol.Variable('bias{}'.format(i)) for i in range(2)]
exp_sym = mx.symbol.amp_cast(exp_data, dtype=AMP_DTYPE)
for weight, bias in zip(exp_weight, exp_bias):
exp_sym = mx.symbol.FullyConnected(exp_sym, weight, bias, num_hidden=1)
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
data_example = mx.np.random.uniform(-1, 1, (1, 8))
check_amp_fuse(net, [data_example], exp_sym)
check_amp_fuse(net, [data_example], exp_sym, ['sg_onednn_fully_connected_1'])
@mx.util.use_np
def test_amp_conv():
class TestNet(nn.HybridBlock):
def __init__(self):
super(TestNet, self).__init__()
self.conv1 = nn.Conv2D(16, (3, 3))
self.conv2 = nn.Conv2D(16, (3, 3))
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
return x
net = TestNet()
net.initialize()
data_example = mx.np.random.uniform(-1, 1, (1, 3, 8, 8))
exp_data = mx.symbol.Variable('data')
exp_weight = [mx.symbol.Variable('weight{}'.format(i)) for i in range(2)]
exp_bias = [mx.symbol.Variable('bias{}'.format(i)) for i in range(2)]
exp_sym = mx.symbol.amp_cast(exp_data, dtype=AMP_DTYPE)
for weight, bias in zip(exp_weight, exp_bias):
exp_sym = mx.symbol.Convolution(exp_sym, weight, bias, kernel=(3, 3), num_filter=1)
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
check_amp_fuse(net, [data_example], exp_sym)
exp_sym = mx.symbol.amp_cast(exp_data, dtype=AMP_DTYPE)
for weight, bias in zip(exp_weight, exp_bias):
exp_sym = mx.symbol.Convolution(exp_sym, weight, bias, kernel=(3, 3), num_filter=1)
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
check_amp_fuse(net, [data_example], exp_sym, ['sg_onednn_conv_1'])
@mx.util.use_np
def test_amp_transformers():
batch_size = 16
seq_length = 32
units = 8
num_heads = 8
in_data = mx.np.random.uniform(size=(batch_size, seq_length, units), dtype='float32')
mask = mx.np.random.randint(0, 2, (batch_size, seq_length, seq_length), dtype='int32')
net = MultiHeadAttention(units, num_heads)
net.initialize()
check_amp_fuse(net, [in_data, mask], None)
check_amp_fuse(net, [in_data, mask], None, ['sg_onednn_fully_connected_0'])
@mx.util.use_np
def test_amp_concat():
class TestNet(nn.HybridBlock):
def __init__(self):
super(TestNet, self).__init__()
self.fc1 = nn.Dense(16)
self.fc2 = nn.Dense(16)
self.fc2.share_parameters(self.fc1.collect_params())
def forward(self, x):
x1 = self.fc1(x)
x2 = self.fc2(x)
x = mx.np.concat((x1, x2), axis=1)
return x
net = TestNet()
net.initialize()
data_example = mx.np.random.uniform(-1, 1, (1, 16))
exp_data = mx.symbol.Variable('data')
exp_amp_data = mx.symbol.amp_cast(exp_data, dtype=AMP_DTYPE)
exp_weight = mx.symbol.Variable('weight')
exp_bias = mx.symbol.Variable('bias')
exp_fc = [mx.symbol.FullyConnected(exp_amp_data, exp_weight, exp_bias, num_hidden=1)
for _ in range(2)]
exp_sym = mx.symbol.Concat(*exp_fc)
exp_sym = mx.symbol.amp_cast(exp_sym, dtype='float32')
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
check_amp_fuse(net, [data_example], exp_sym)
amp_weight = mx.symbol.amp_cast(exp_weight, dtype=AMP_DTYPE)
amp_bias = mx.symbol.amp_cast(exp_bias, dtype=AMP_DTYPE)
exp_fc[0] = mx.symbol.FullyConnected(exp_amp_data, amp_weight, amp_bias, num_hidden=1)
exp_fc[1] = mx.symbol.FullyConnected(exp_data, exp_weight, exp_bias, num_hidden=1)
exp_sym = mx.symbol.Concat(*exp_fc)
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
check_amp_fuse(net, [data_example], exp_sym, ['sg_onednn_fully_connected_1'])
@mx.util.use_np
def test_amp_fuse_with_branch():
class TestNet(nn.HybridBlock):
def __init__(self, **kwargs):
super(TestNet, self).__init__(**kwargs)
self.fc1 = nn.Dense(16)
self.fc2 = nn.Dense(16)
def forward(self, x, *args):
out = self.fc1(x)
out1 = self.fc2(out)
out1 = nn.Activation('relu')(out1)
out2 = mx.npx.softmax(out)
return out1, out2
net = TestNet()
net.initialize()
data_example = mx.np.ones((10,))
# |---> lp16_op_2
# lp16_op_1 ---|
# |---> f32_amp_cast ---> f32_op
#
# `lp16_op_1` cannot fuse the `f32_amp_cast` node, since `lp16_op_2` already uses its lp16 output
exp_data = mx.sym.Variable('data')
exp_weight = [mx.symbol.Variable('weight{}'.format(i)) for i in range(2)]
exp_bias = [mx.symbol.Variable('bias{}'.format(i)) for i in range(2)]
amp_data = mx.sym.amp_cast(exp_data, dtype=AMP_DTYPE)
lp16_op_1 = mx.sym.FullyConnected(amp_data, exp_weight[0], exp_bias[0], num_hidden=16)
lp16_op_2 = mx.sym.FullyConnected(lp16_op_1, exp_weight[1], exp_bias[1], num_hidden=16)
f32_amp_cast = mx.sym.amp_cast(lp16_op_1, dtype='float32')
f32_op = mx.sym.softmax(f32_amp_cast)
exp_sym = mx.sym.Group([lp16_op_2, f32_op])
exp_sym = exp_sym.get_backend_symbol(SG_PASS_NAME)
check_amp_fuse(net, [data_example], exp_sym)
def test_amp_excluding_after_graph_pass():
class TestNet(nn.HybridBlock):
def __init__(self):
super(TestNet, self).__init__()
self.fc1 = nn.Dense(16)
self.fc2 = nn.Dense(16)
def forward(self, x):
x = self.fc1(x)
with nn.HybridBlock.OptConstraint.disable_amp():
x = self.fc2(x)
return x
data_example = mx.np.random.uniform(-1, 1, (1, 8))
net = TestNet()
net.initialize()
net_before = amp.convert_hybrid_block(net, data_example, AMP_DTYPE, cast_params_offline=True)
check_amp_net_stats(AMP_DTYPE, net_before, data_example, lp16_tensors_num=2,
lp16_casts_num=1, other_casts_num=1)
net.optimize_for(data_example, backend=SG_PASS_NAME) # introduces new nodes
net_after = amp.convert_hybrid_block(net, data_example, AMP_DTYPE, cast_params_offline=True)
check_amp_net_stats(AMP_DTYPE, net_after, data_example, lp16_tensors_num=2,
lp16_casts_num=1, other_casts_num=1)
|
piotrwolinski-intel/incubator-mxnet
|
tests/python/dnnl/test_amp.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from pathlib import Path
curr_path = Path(__file__).resolve().parent
sys.path.insert(0, str(curr_path.parent))
import mxnet as mx
import amp.common as amp_common_tests
AMP_DTYPE = 'bfloat16'
def test_bf16_coverage():
amp_common_tests.test_amp_coverage(AMP_DTYPE, 'BF16')
@mx.util.use_np
def test_bf16_basic_use():
amp_common_tests.test_amp_basic_use(AMP_DTYPE)
@mx.util.use_np
def test_bf16_offline_casting():
amp_common_tests.test_amp_offline_casting(AMP_DTYPE)
@mx.util.use_np
def test_bf16_offline_casting_shared_params():
amp_common_tests.test_amp_offline_casting_shared_params(AMP_DTYPE)
@mx.util.use_np
def test_bf16_fp32_ops_order_independence():
amp_common_tests.test_lp16_fp32_ops_order_independence(AMP_DTYPE)
@mx.util.use_np
def test_bf16_test_node_excluding():
amp_common_tests.test_amp_node_excluding(AMP_DTYPE)
|
CornellLenard/Deep-Learning-Course-Exercises
|
Exercise 06/exercise_code/hyperparameter_tuning.py
|
<filename>Exercise 06/exercise_code/hyperparameter_tuning.py
import random
from math import log10
from itertools import product
from exercise_code.solver import Solver
from exercise_code.networks.layer import Sigmoid, Relu
from exercise_code.networks import (ClassificationNet, BCE, CrossEntropyFromLogits)
ALLOWED_RANDOM_SEARCH_PARAMS = ["log", "int", "float", "item"]
def grid_search(train_loader, val_loader,
grid_search_spaces={
"learning_rate": [0.0001, 0.001, 0.01, 0.1],
"reg": [1e-4, 1e-5, 1e-6]
},
model_class=ClassificationNet, epochs=20, patience=5):
"""
A simple grid search based on nested loops to tune learning rate and
regularization strengths.
Keep in mind that you should not use grid search for higher-dimensional
parameter tuning, as the search space explodes quickly.
Required arguments:
- train_loader: A generator object returning training data
- val_loader: A generator object returning validation data
Optional arguments:
- grid_search_spaces: a dictionary where every key corresponds to a
to-tune-hyper-parameter and every value contains a list of possible
values. Our function will test all value combinations which can take
quite a long time. If we don't specify a value here, we will use the
default values of both our chosen model as well as our solver
- model: our selected model for this exercise
- epochs: number of epochs we are training each model
- patience: if we should stop early in our solver
Returns:
- The best performing model
- A list of all configurations and results
"""
configs = []
"""
# Simple implementation with nested loops
for lr in grid_search_spaces["learning_rate"]:
for reg in grid_search_spaces["reg"]:
configs.append({"learning_rate": lr, "reg": reg})
"""
# More general implementation using itertools
for instance in product(*grid_search_spaces.values()):
configs.append(dict(zip(grid_search_spaces.keys(), instance)))
return findBestConfig(train_loader, val_loader, configs, epochs, patience,
model_class)
def random_search(train_loader, val_loader,
random_search_spaces={
"learning_rate": ([0.0001, 0.1], "log"),
"hidden_size": ([100, 400], "int"),
"activation": ([Sigmoid(), Relu()], "item"),
},
model_class=ClassificationNet, num_search=20, epochs=20,
patience=5):
"""
Samples N_SEARCH hyper parameter sets within the provided search spaces
and returns the best model.
See the grid search documentation above.
Additional/different optional arguments:
- random_search_spaces: similar to grid search but values are of the
form
(<list of values>, <mode as specified in ALLOWED_RANDOM_SEARCH_PARAMS>)
- num_search: number of times we sample in each int/float/log list
"""
configs = []
for _ in range(num_search):
configs.append(random_search_spaces_to_config(random_search_spaces))
return findBestConfig(train_loader, val_loader, configs, epochs, patience,
model_class)
def findBestConfig(train_loader, val_loader, configs, epochs, patience,
model_class):
"""
Get a list of hyper-parameter configs for random search or grid search,
trains a model on all configs and returns the one performing best
on validation set
"""
best_val = None
best_config = None
best_model = None
results = []
for i in range(len(configs)):
print("\nEvaluating Config #{} [of {}]:\n".format((i+1), len(configs)), configs[i])
model = model_class(**configs[i])
solver = Solver(model, train_loader, val_loader, **configs[i])
solver.train(epochs=epochs, patience=patience)
results.append(solver.best_model_stats)
if not best_val or solver.best_model_stats["val_loss"] < best_val:
best_val, best_model, best_config = solver.best_model_stats["val_loss"], model, configs[i]
print("\nSearch done. Best Val Loss = {}".format(best_val))
print("Best Config:", best_config)
return best_model, list(zip(configs, results))
def random_search_spaces_to_config(random_search_spaces):
""""
Takes search spaces for random search as input; samples accordingly
from these spaces and returns the sampled hyper-params as a config-object,
which will be used to construct solver & network
"""
config = {}
for key, (rng, mode) in random_search_spaces.items():
if mode not in ALLOWED_RANDOM_SEARCH_PARAMS:
print("'{}' is not a valid random sampling mode. "
"Ignoring hyper-param '{}'".format(mode, key))
elif mode == "log":
if rng[0] <= 0 or rng[-1] <= 0:
print("Invalid value encountered for logarithmic sampling "
"of '{}'. Ignoring this hyper param.".format(key))
continue
sample = random.uniform(log10(rng[0]), log10(rng[-1]))
config[key] = 10 ** sample
elif mode == "int":
config[key] = random.randint(rng[0], rng[-1])
elif mode == "float":
config[key] = random.uniform(rng[0], rng[-1])
elif mode == "item":
config[key] = random.choice(rng)
return config
|
CornellLenard/Deep-Learning-Course-Exercises
|
Exercise 09/exercise_code/data/base_dataset.py
|
<gh_stars>0
"""Dataset Base Class"""
from torch.utils.data import Dataset
from .download_utils import download_dataset
class BaseDataset(Dataset):
"""
Abstract Dataset Base Class
All subclasses must define __getitem__() and __len__()
"""
def __init__(self, root, download_url=None, force_download=False):
self.root_path = root
# The actual archive name should be all the text of the url after the
# last '/'.
if download_url is not None:
dataset_zip_name = download_url[download_url.rfind('/')+1:]
self.dataset_zip_name = dataset_zip_name
download_dataset(
url=download_url,
data_dir=root,
dataset_zip_name=dataset_zip_name,
force_download=force_download,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.