From 50cc88034729449d0c8fa104af1b4619b1666947 Mon Sep 17 00:00:00 2001 From: YYJ-aaaa <1058387004@qq.com> Date: Thu, 5 Dec 2024 14:32:35 +0800 Subject: [PATCH] first commit --- config.json | 44 ++++++++++++++++++++++++++++++++++++++++ preprocessor_config.json | 15 ++++++++++++++ pytorch_model.bin | 3 +++ 3 files changed, 62 insertions(+) create mode 100644 config.json create mode 100644 preprocessor_config.json create mode 100644 pytorch_model.bin diff --git a/config.json b/config.json new file mode 100644 index 0000000..c015d3a --- /dev/null +++ b/config.json @@ -0,0 +1,44 @@ +{ + "_name_or_path": "google/vit-base-patch16-224-in21k", + "architectures": [ + "ViTForImageClassification" + ], + "attention_probs_dropout_prob": 0.0, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.0, + "hidden_size": 768, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2", + "3": "LABEL_3", + "4": "LABEL_4", + "5": "LABEL_5", + "6": "LABEL_6", + "7": "LABEL_7", + "8": "LABEL_8", + "9": "LABEL_9" + }, + "image_size": 224, + "initializer_range": 0.02, + "intermediate_size": 3072, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2, + "LABEL_3": 3, + "LABEL_4": 4, + "LABEL_5": 5, + "LABEL_6": 6, + "LABEL_7": 7, + "LABEL_8": 8, + "LABEL_9": 9 + }, + "layer_norm_eps": 1e-12, + "model_type": "vit", + "num_attention_heads": 12, + "num_channels": 3, + "num_hidden_layers": 12, + "patch_size": 16, + "transformers_version": "4.5.0.dev0" +} diff --git a/preprocessor_config.json b/preprocessor_config.json new file mode 100644 index 0000000..70fbc14 --- /dev/null +++ b/preprocessor_config.json @@ -0,0 +1,15 @@ +{ + "do_normalize": true, + "do_resize": true, + "image_mean": [ + 0.5, + 0.5, + 0.5 + ], + "image_std": [ + 0.5, + 0.5, + 0.5 + ], + "size": 224 +} diff --git a/pytorch_model.bin b/pytorch_model.bin new file mode 100644 index 0000000..e01bce1 --- /dev/null +++ b/pytorch_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5888ea39698a0fe8bb8ddeb8b41aa0d62e26585bcfc119948be1999d1fc49698 +size 343306743