37 lines
1.1 KiB
JSON
37 lines
1.1 KiB
JSON
{
|
|
"_name_or_path": "/jxm/cde/cde-small-v2/checkpoint-2635",
|
|
"architecture": "transductive",
|
|
"architectures": [
|
|
"ContextualDocumentEmbeddingTransformer"
|
|
],
|
|
"attn_implementation": null,
|
|
"auto_map": {
|
|
"AutoConfig": "model.ContextualModelConfig",
|
|
"AutoModel": "model.ContextualDocumentEmbeddingTransformer"
|
|
},
|
|
"autoregressive_backbone": false,
|
|
"cache_dir": null,
|
|
"config_name": null,
|
|
"dataset_backbone": null,
|
|
"disable_dropout": true,
|
|
"disable_transductive_rotary_embedding": true,
|
|
"embedder": "answerdotai/ModernBERT-base",
|
|
"embedder_rerank": "sentence-transformers/gtr-t5-base",
|
|
"embedding_output_dim": null,
|
|
"limit_layers": null,
|
|
"limit_layers_first_stage": null,
|
|
"logit_scale": 50.0,
|
|
"max_seq_length": 512,
|
|
"model_revision": "main",
|
|
"pool_ignore_contextual_tokens": true,
|
|
"pool_ignore_instruction_tokens": true,
|
|
"pooling_strategy": "mean",
|
|
"tokenizer_name": null,
|
|
"torch_dtype": "float32",
|
|
"transductive_corpus_size": 512,
|
|
"transductive_sequence_dropout_prob": 0.0,
|
|
"transductive_tie_token_embeddings": false,
|
|
"transductive_tokens_per_document": 1,
|
|
"transformers_version": "4.48.0.dev0"
|
|
}
|