text
stringlengths
0
828
if lc:
freshthresh = self.freshthresh
nw = dt.datetime.now()
freshness = (nw - lc).total_seconds() / 60.0
if freshness <= freshthresh:
docache = False
smrp = SymbolReport(self.name)
if docache:
data = []
cols = ['final', 'override_feed000', 'failsafe_feed999']
if len(self.feeds) == 0:
err_msg = ""Symbol has no Feeds. Can't cache a feed-less Symbol.""
raise Exception(err_msg)
try:
datt = datadefs[self.dtype.datadef]
indtt = indexingtypes[self.index.indimp]
indkwargs = self.index.getkwargs()
indt = indtt(self.index.case, **indkwargs)
rp = ReportPoint('datadef', 'class', datt)
smrp.add_reportpoint(rp)
for afeed in self.feeds:
fdrp = afeed.cache(allowraise)
smrp.add_feedreport(fdrp)
tmp = datt(afeed.data).converted
tmp = indt.process_post_feed_cache(tmp)
data.append(tmp)
cols.append(afeed.data.name)
except:
point = ""caching""
smrp = self._generic_exception(point, smrp, allowraise)
try:
data = pd.concat(data, axis=1)
except:
point = ""concatenation""
smrp = self._generic_exception(point, smrp, allowraise)
# We shouldn't need to do anything here, as the concatenation
# should be smooth...
# preindlen = len(data)
#
#
# if preindlen > 0 :
# #indt = indtt(data, self.index.case, indkwargs)
# #data = indt.final_dataframe()
# data = indt.process_post_concat(data)
#
# postindlen = len(data)
# if postindlen == 0 and preindlen > 0:
# raise Exception(""Indexing Implementer likely poorly designed"")
# else:
# postindlen = 0
def build_hi_df(which, colname):
objs = object_session(self)
qry = objs.query(which.ind,
func.max(which.dt_log).label('max_dt_log'))
qry = qry.filter_by(symname = self.name)
grb = qry.group_by(which.ind).subquery()
qry = objs.query(which)
ords = qry.join((grb, and_(which.ind == grb.c.ind,
which.dt_log == grb.c.max_dt_log))).all()
if len(ords):
orind = [row.ind for row in ords]
orval = [row.val for row in ords]
ordf = indt.build_ordf(orind, orval, colname)
else:
ordf = pd.DataFrame(columns=[colname])
return ordf
ordf = build_hi_df(Override, 'override_feed000')
fsdf = build_hi_df(FailSafe, 'failsafe_feed999')
orfsdf = pd.merge(ordf, fsdf, how='outer', left_index=True, right_index=True)
data = pd.merge(orfsdf, data, how='outer', left_index=True, right_index=True)
data = indt.process_post_orfs(data)
try:
data = data.fillna(value=pd.np.nan)
data = data[sorted_feed_cols(data)]
data['final'] = FeedAggregator(self.agg_method).aggregate(data)
except:
point = ""aggregation""