File size: 5,649 Bytes
0b888cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
{
"module": "keras_hub.src.models.parseq.parseq_causal_lm",
"class_name": "PARSeqCausalLM",
"config": {
"backbone": {
"module": "keras_hub.src.models.parseq.parseq_backbone",
"class_name": "PARSeqBackbone",
"config": {
"name": "par_seq_backbone",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_encoder": {
"module": "keras_hub.src.models.vit.vit_backbone",
"class_name": "ViTBackbone",
"config": {
"name": "encoder",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_shape": [
32,
128,
3
],
"patch_size": [
4,
8
],
"num_layers": 12,
"num_heads": 6,
"hidden_dim": 384,
"mlp_dim": 1536,
"dropout_rate": 0.0,
"attention_dropout": 0.0,
"layer_norm_epsilon": 1e-06,
"use_mha_bias": true,
"use_mlp_bias": true,
"use_class_token": false,
"use_patch_bias": true
},
"registered_name": "keras_hub>ViTBackbone"
},
"vocabulary_size": 97,
"max_label_length": 25,
"decoder_hidden_dim": 384,
"num_decoder_layers": 1,
"num_decoder_heads": 12,
"decoder_mlp_dim": 1536,
"dropout_rate": 0.1,
"attention_dropout": 0.1
},
"registered_name": "keras_hub>PARSeqBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.parseq.parseq_causal_lm_preprocessor",
"class_name": "PARSeqCausalLMPreprocessor",
"config": {
"name": "par_seq_causal_lm_preprocessor",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"tokenizer": {
"module": "keras_hub.src.models.parseq.parseq_tokenizer",
"class_name": "PARSeqTokenizer",
"config": {
"name": "par_seq_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json"
},
"registered_name": "keras_hub>PARSeqTokenizer"
},
"image_converter": {
"module": "keras_hub.src.models.parseq.parseq_image_converter",
"class_name": "PARSeqImageConverter",
"config": {
"name": "par_seq_image_converter",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_size": [
32,
128
],
"scale": 0.00784313725490196,
"offset": -1.0,
"interpolation": "bicubic",
"antialias": false,
"crop_to_aspect_ratio": true,
"pad_to_aspect_ratio": false,
"bounding_box_format": "yxyx"
},
"registered_name": "keras_hub>PARSeqImageConverter"
},
"config_file": "preprocessor.json",
"sequence_length": 25,
"add_start_token": true,
"add_end_token": true
},
"registered_name": "keras_hub>PARSeqCausalLMPreprocessor"
},
"name": "par_seq_causal_lm",
"num_perms": 6,
"add_forward_perms": true,
"add_mirrored_perms": true,
"seed": null,
"end_token_id": 0
},
"registered_name": "keras_hub>PARSeqCausalLM"
} |