pos_quechua / config.json
rjzevallos's picture
Upload config.json
d38116f
raw
history blame
1.55 kB
{
"_name_or_path": "Llamacha/QuBERTa",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "s",
"1": "adj.",
"2": "v",
"3": "adv.",
"4": "det.",
"5": "pron.demostrativo",
"6": "pron.",
"7": "conj.",
"8": "pron.interrog.",
"9": "pron.definido",
"10": "loc.",
"11": "num",
"12": "interj.",
"13": "S",
"14": "det. numeral",
"15": "adv. interr",
"16": "adv. lugar",
"17": "prep",
"18": "V",
"19": "pron.indef.",
"20": "adj. indefinido"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"S": 13,
"V": 18,
"adj.": 1,
"adj. indefinido": 20,
"adv.": 3,
"adv. interr": 15,
"adv. lugar": 16,
"conj.": 7,
"det.": 4,
"det. numeral": 14,
"interj.": 12,
"loc.": 10,
"num": 11,
"prep": 17,
"pron.": 6,
"pron.definido": 9,
"pron.demostrativo": 5,
"pron.indef.": 19,
"pron.interrog.": 8,
"s": 0,
"v": 2
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 6,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.21.3",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 52000
}