text stringlengths 1 93.6k |
|---|
tf.app.flags.DEFINE_float(
|
'learning_rate_decay_factor', 0.96, 'Learning rate decay factor.')
|
tf.app.flags.DEFINE_float(
|
'decay_steps', 1000,
|
'Number of epochs after which learning rate decays.')
|
# for learning rate piecewise_constant decay
|
tf.app.flags.DEFINE_string(
|
'decay_boundaries', '70000, 90000',
|
'Learning rate decay boundaries by global_step (comma-separated list).')
|
tf.app.flags.DEFINE_string(
|
'lr_decay_factors', '1, 0.8, 0.1',
|
'The values of learning_rate decay factor for each segment between boundaries (comma-separated list).')
|
# checkpoint related configuration
|
tf.app.flags.DEFINE_string(
|
'checkpoint_path', './model/resnet50',#None,
|
'The path to a checkpoint from which to fine-tune.')
|
tf.app.flags.DEFINE_string(
|
'checkpoint_model_scope', '',
|
'Model scope in the checkpoint. None if the same as the trained model.')
|
tf.app.flags.DEFINE_string(
|
'model_scope', 'xdet_resnet',
|
'Model scope name used to replace the name_scope in checkpoint.')
|
tf.app.flags.DEFINE_string(
|
'checkpoint_exclude_scopes', 'xdet_resnet/xdet_head, xdet_resnet/xdet_multi_path, xdet_resnet/xdet_additional_conv',#None
|
'Comma-separated list of scopes of variables to exclude when restoring from a checkpoint.')
|
tf.app.flags.DEFINE_boolean(
|
'ignore_missing_vars', True,
|
'When restoring a checkpoint would ignore missing variables.')
|
tf.app.flags.DEFINE_boolean(
|
'run_on_cloud', True,
|
'Wether we will train on cloud (pre-trained model will be placed in the "data_dir/cloud_checkpoint_path").')
|
tf.app.flags.DEFINE_string(
|
'cloud_checkpoint_path', 'resnet50/model.ckpt',
|
'The path to a checkpoint from which to fine-tune.')
|
#CUDA_VISIBLE_DEVICES
|
FLAGS = tf.app.flags.FLAGS
|
def input_pipeline():
|
image_preprocessing_fn = lambda image_, shape_, glabels_, gbboxes_ : preprocessing_factory.get_preprocessing(
|
'xdet_resnet', is_training=True)(image_, glabels_, gbboxes_, out_shape=[FLAGS.train_image_size] * 2, data_format=('NCHW' if FLAGS.data_format=='channels_first' else 'NHWC'))
|
anchor_creator = anchor_manipulator.AnchorCreator([FLAGS.train_image_size] * 2,
|
layers_shapes = [(40, 40)],
|
anchor_scales = [[0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]],
|
extra_anchor_scales = [[0.1]],
|
anchor_ratios = [[1., 2., 3., .5, 0.3333]],
|
layer_steps = [8])
|
def input_fn():
|
all_anchors, num_anchors_list = anchor_creator.get_all_anchors()
|
anchor_encoder_decoder = anchor_manipulator.AnchorEncoder(all_anchors,
|
num_classes = FLAGS.num_classes,
|
allowed_borders = [0.05],
|
positive_threshold = FLAGS.match_threshold,
|
ignore_threshold = FLAGS.neg_threshold,
|
prior_scaling=[0.1, 0.1, 0.2, 0.2])
|
list_from_batch, _ = dataset_factory.get_dataset(FLAGS.dataset_name,
|
FLAGS.dataset_split_name,
|
FLAGS.data_dir,
|
image_preprocessing_fn,
|
file_pattern = None,
|
reader = None,
|
batch_size = FLAGS.batch_size,
|
num_readers = FLAGS.num_readers,
|
num_preprocessing_threads = FLAGS.num_preprocessing_threads,
|
num_epochs = FLAGS.train_epochs,
|
anchor_encoder = anchor_encoder_decoder.encode_all_anchors)
|
return list_from_batch[-1], {'targets': list_from_batch[:-1],
|
'decode_fn': lambda pred : anchor_encoder_decoder.decode_all_anchors([pred])[0],
|
'num_anchors_list': num_anchors_list}
|
return input_fn
|
def modified_smooth_l1(bbox_pred, bbox_targets, bbox_inside_weights = 1., bbox_outside_weights = 1., sigma = 1.):
|
"""
|
ResultLoss = outside_weights * SmoothL1(inside_weights * (bbox_pred - bbox_targets))
|
SmoothL1(x) = 0.5 * (sigma * x)^2, if |x| < 1 / sigma^2
|
|x| - 0.5 / sigma^2, otherwise
|
"""
|
sigma2 = sigma * sigma
|
inside_mul = tf.multiply(bbox_inside_weights, tf.subtract(bbox_pred, bbox_targets))
|
smooth_l1_sign = tf.cast(tf.less(tf.abs(inside_mul), 1.0 / sigma2), tf.float32)
|
smooth_l1_option1 = tf.multiply(tf.multiply(inside_mul, inside_mul), 0.5 * sigma2)
|
smooth_l1_option2 = tf.subtract(tf.abs(inside_mul), 0.5 / sigma2)
|
smooth_l1_result = tf.add(tf.multiply(smooth_l1_option1, smooth_l1_sign),
|
tf.multiply(smooth_l1_option2, tf.abs(tf.subtract(smooth_l1_sign, 1.0))))
|
outside_mul = tf.multiply(bbox_outside_weights, smooth_l1_result)
|
return outside_mul
|
def xdet_model_fn(features, labels, mode, params):
|
"""Our model_fn for ResNet to be used with our Estimator."""
|
num_anchors_list = labels['num_anchors_list']
|
num_feature_layers = len(num_anchors_list)
|
shape = labels['targets'][-1]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.