first commit
This commit is contained in:
parent
22f5a55802
commit
673f3a3a5f
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"architectures": [
|
||||
"RobertaForMaskedLM"
|
||||
],
|
||||
"attention_probs_dropout_prob": 0.1,
|
||||
"bos_token_id": 0,
|
||||
"eos_token_id": 2,
|
||||
"hidden_act": "gelu",
|
||||
"hidden_dropout_prob": 0.1,
|
||||
"hidden_size": 768,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 3072,
|
||||
"layer_norm_eps": 1e-05,
|
||||
"max_position_embeddings": 514,
|
||||
"model_type": "roberta",
|
||||
"num_attention_heads": 12,
|
||||
"num_hidden_layers": 12,
|
||||
"pad_token_id": 1,
|
||||
"type_vocab_size": 1,
|
||||
"vocab_size": 50265
|
||||
}
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
{"model_max_length": 512}
|
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue