first commit

This commit is contained in:
xxl 2024-12-25 10:43:07 +08:00
parent 3f4df1f268
commit d5571c97a5
11 changed files with 64108 additions and 2 deletions

2818
README.md

File diff suppressed because it is too large Load Diff

BIN
model.safetensors (Stored with Git LFS) Normal file

Binary file not shown.

20
modules.json Normal file
View File

@ -0,0 +1,20 @@
[
{
"idx": 0,
"name": "0",
"path": "",
"type": "sentence_transformers.models.Transformer"
},
{
"idx": 1,
"name": "1",
"path": "1_Pooling",
"type": "sentence_transformers.models.Pooling"
},
{
"idx": 2,
"name": "2",
"path": "2_Normalize",
"type": "sentence_transformers.models.Normalize"
}
]

BIN
onnx/model.onnx (Stored with Git LFS) Normal file

Binary file not shown.

BIN
onnx/model_quantized.onnx (Stored with Git LFS) Normal file

Binary file not shown.

BIN
pytorch_model.bin (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,4 @@
{
"max_seq_length": 8192,
"do_lower_case": false
}

7
special_tokens_map.json Normal file
View File

@ -0,0 +1,7 @@
{
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"unk_token": "[UNK]"
}

30672
tokenizer.json Normal file

File diff suppressed because it is too large Load Diff

55
tokenizer_config.json Normal file
View File

@ -0,0 +1,55 @@
{
"added_tokens_decoder": {
"0": {
"content": "[PAD]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100": {
"content": "[UNK]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"101": {
"content": "[CLS]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"102": {
"content": "[SEP]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"103": {
"content": "[MASK]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"clean_up_tokenization_spaces": true,
"cls_token": "[CLS]",
"do_lower_case": true,
"mask_token": "[MASK]",
"model_max_length": 8192,
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "BertTokenizer",
"unk_token": "[UNK]"
}

30522
vocab.txt Normal file

File diff suppressed because it is too large Load Diff