text
stringlengths 1
93.6k
|
|---|
)
|
fig.add_trace(go.Scatter(x=[dt.fromordinal(i - 366).strftime('%Y-%m-%d') for i in Time],
|
y=Res["Res"]["Z"][:,0]*Res["Res"]["C"][idxSeries,0],
|
mode='lines',
|
name="Common Factor",
|
line=dict(color='black', width=1.5))
|
)
|
# Plot common factor and standardized data
|
fig.update_layout({'plot_bgcolor': 'rgba(0, 0, 0, 0)'} ,
|
title_text="Common Factor and Standardized Data"
|
)
|
fig.show()
|
#-------------------------------------------------Plot projection of common factor onto Payroll Employment and GDP
|
# Two plots in one graph
|
fig = make_subplots(rows=2, cols=1,
|
subplot_titles=("Payroll Employment", "Real Gross Domestic Product"))
|
# Create an array of the data series that we are interested in looping through to plot the projection
|
series = ["PAYEMS","GDPC1"]
|
# For a particular series:
|
# 1.) plot the common factor
|
# 2.) plot the data series (with NAs removed)
|
for i in range(len(series)):
|
idxSeries = np.where(Spec.SeriesID == series[i])[0][0]
|
t_obs = ~np.isnan(X[:,idxSeries])
|
CommonFactor = np.matmul(Res["Res"]["C"][idxSeries,:5].reshape(1,-1),Res["Res"]["Z"][:,:5].T) * \
|
Res["Res"]["Wx"][idxSeries] + Res["Res"]["Mx"][idxSeries]
|
fig.append_trace(go.Scatter(
|
x=[dt.fromordinal(i - 366).strftime('%Y-%m-%d') for i in Time],
|
y=CommonFactor[0,:],
|
name="Common Factor ({})".format(series[i])
|
), row=i+1, col=1)
|
fig.append_trace(go.Scatter(
|
x=[dt.fromordinal(i - 366).strftime('%Y-%m-%d') for i in Time[t_obs]],
|
y=X[t_obs,idxSeries],
|
name="Data ({})".format(series[i])
|
), row=i+1, col=1)
|
fig.update_yaxes(title_text=Spec.Units[idxSeries] + " ({})".format(Spec.UnitsTransformed[idxSeries]), row=i+1, col=1)
|
fig.update_layout({'plot_bgcolor': 'rgba(0, 0, 0, 0)'} ,
|
title_text="Projection of Common Factor")
|
fig.show()
|
# <FILESEP>
|
import pytorch_lightning as pl
|
import torch
|
import os
|
from math import pi
|
from PIL import Image
|
from munch import Munch
|
from torchvision.transforms import ToPILImage, ToTensor
|
from networks import find_model_using_name, create_model
|
from argparse import ArgumentParser as AP
|
def main(ap):
|
CHECKPOINT = ap.checkpoint
|
OUTPUT_DIR = ap.output_dir
|
INPUT_DIR = ap.input_dir
|
EXEMPLAR_IMAGE = ap.exemplar_image
|
# Load parameters
|
#with open(os.path.join(root_dir, 'hparams.yaml')) as cfg_file:
|
ckpt_path = torch.load(CHECKPOINT, map_location='cpu')
|
hparams = ckpt_path['hyper_parameters']
|
opt = Munch(hparams).opt
|
opt.phase = 'val'
|
opt.no_flip = True
|
# Load parameters to the model, load the checkpoint
|
model = create_model(opt)
|
model = model.load_from_checkpoint(CHECKPOINT)
|
# Transfer the model to the GPU
|
model.to('cuda')
|
val_ds = INPUT_DIR
|
im_ref = Image.open(EXEMPLAR_IMAGE).resize((480, 256), Image.BILINEAR)
|
im_ref = ToTensor()(im_ref) * 2 - 1
|
im_ref = im_ref.cuda().unsqueeze(0)
|
os.makedirs('{}/exemplar'.format(OUTPUT_DIR), exist_ok=True)
|
for index, im_path in enumerate(os.listdir(val_ds)):
|
print(index)
|
im = Image.open(os.path.join(val_ds, im_path)).resize((480, 256), Image.BILINEAR)
|
im = ToTensor()(im) * 2 - 1
|
im = im.cuda().unsqueeze(0)
|
style_array = torch.randn(1, 8, 1, 1).cuda()
|
result = model.forward(im, style_array, type='exemplar', ref_image=im_ref)
|
result = torch.clamp(result, -1, 1)
|
img_global = ToPILImage()((result[0] + 1) / 2)
|
img_global.save('{}/exemplar/{}'.format(OUTPUT_DIR, im_path))
|
if __name__ == '__main__':
|
ap = AP()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.