text
stringlengths
1
93.6k
"""
M_ImageCaptioner2_Prompt = """
Describe this image in detail please.
The language of reply is English only!!!
Starts with "In the image,"
"""
ImageCaptionerPostProcessing_System = """
I want you to write me a detailed list of tips for Content.
Write a very short description of the scene and put it in the 'short_describes' field
Write complete [moods, styles, lights, elements, objects] of the word array and put it in the '$_tags' field
Don't include anything that isn't in Content.
The language of reply is English only!!!
"""
# <FILESEP>
# Loading the RDT into a hash table:
# Noe that this is less memory and cpu efficient
# as compared to using rdt.pkl (based on marisa_trie and numpy)
import codecs
from collections import defaultdict
from traceback import format_exc
from time import time
import gzip
from os.path import splitext
# enter path to graph of words here
dt_fpath = "all.norm-sz500-w10-cb0-it3-min5.w2v.vocab_1100000_similar250.gz"
VERBOSE = False
SEP = "\t"
SEP_SCORE = ":"
SEP_LIST = ","
UNSEP = "_"
MIN_SIM = 0.0
tic = time()
with gzip.open(dt_fpath) if splitext(dt_fpath)[-1] == ".gz" else codecs.open(dt_fpath,"r","utf-8") as input_file:
dt = defaultdict(lambda: defaultdict(float))
rel_num = 0
for i, line in enumerate(input_file):
#if i > 10: break
try:
word_i, neighbors = line.split(SEP)
word_i = word_i.replace(SEP, UNSEP)
for word_j_sim_ij in neighbors.split(SEP_LIST):
word_j, sim_ij = word_j_sim_ij.split(SEP_SCORE)
word_j = word_j.replace(SEP, UNSEP)
sim_ij = float(sim_ij)
if sim_ij < MIN_SIM: continue
rel_num += 1
dt[word_i][word_j] = sim_ij
except:
print(format_exc())
if VERBOSE: print("bad line:", i, line)
print(time()-tic, "sec.")
print("Sample entries:")
i = 0
for w1 in dt:
for w2 in dt[w1]:
print(w1, w2, dt[w1][w2])
i += 1
if i > 1000: break
# <FILESEP>
"""
main.py: Main code to drive LSC-CNN
Authors : svp, mns, dbs
"""
import argparse
import random
from data_reader import DataReader
import matplotlib
from matplotlib import pyplot as plt
import cv2
import numpy as np
import os
import random, string
import math
import pickle
from collections import OrderedDict
import torch
from torch import nn as nn, optim as optim
from torch.autograd import Variable
import datetime
from error_function import offset_sum
from scipy.misc import imsave, imresize
from utils import apply_nms
from network import LSCCNN
from utils.logging_tools import *
from utils.loss_weights import *
################ Architecture Hyper-parameters ################