{ "_name_or_path": "./checkpoints/qwen14b-finetune_all-3m-ocr-448-1024-zero3/checkpoint-4500", "architectures": [ "HuazangQWenForCausalLM" ], "attn_dropout_prob": 0.0, "auto_map": { "AutoConfig": "configuration_qwen.QWenConfig", "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel" }, "bf16": true, "emb_dropout_prob": 0.0, "fp16": false, "fp32": false, "hidden_size": 5120, "image_aspect_ratio": "pad", "initializer_range": 0.02, "intermediate_size": 27392, "kv_channels": 128, "layer_norm_epsilon": 1e-06, "max_position_embeddings": 8192, "mm_hidden_size": 1024, "mm_projector_lr": null, "mm_projector_type": "mlp2x_gelu", "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "mm_vision_tower": "./huazang_sxp/pretrain_weight/qwen_vit_448", "model_type": "Huazang_Qwen", "no_bias": true, "num_attention_heads": 40, "num_hidden_layers": 40, "onnx_safe": null, "rotary_emb_base": 10000, "rotary_pct": 1.0, "scale_attn_weights": true, "seq_length": 2048, "softmax_in_fp32": false, "tie_word_embeddings": false, "tokenizer_class": "QWenTokenizer", "tokenizer_model_max_length": 2048, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.34.0", "tune_mm_mlp_adapter": true, "use_cache": true, "use_cache_kernel": false, "use_cache_quantization": false, "use_dynamic_ntk": true, "use_flash_attn": true, "use_logn_attn": true, "use_mm_proj": true, "vocab_size": 152064 }