{ "_name_or_path": "microsoft/deberta-base", "architectures": [ "DebertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "event organization", "1": "executive statement", "2": "regulatory approval", "3": "hiring", "4": "foundation", "5": "closing", "6": "partnerships & alliances", "7": "expanding industry", "8": "new initiatives or programs", "9": "m&a", "10": "service & product providing", "11": "event organisation", "12": "new initiatives & programs", "13": "subsidiary establishment", "14": "product launching & presentation", "15": "product updates", "16": "executive appointment", "17": "alliance & partnership", "18": "ipo exit", "19": "article publication", "20": "clinical trial sponsorship", "21": "company description", "22": "investment in public company", "23": "other", "24": "expanding geography", "25": "participation in an event", "26": "support & philanthropy", "27": "department establishment", "28": "funding round", "29": "patent publication" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "alliance & partnership": 17, "article publication": 19, "clinical trial sponsorship": 20, "closing": 5, "company description": 21, "department establishment": 27, "event organisation": 11, "event organization": 0, "executive appointment": 16, "executive statement": 1, "expanding geography": 24, "expanding industry": 7, "foundation": 4, "funding round": 28, "hiring": 3, "investment in public company": 22, "ipo exit": 18, "m&a": 9, "new initiatives & programs": 12, "new initiatives or programs": 8, "other": 23, "participation in an event": 25, "partnerships & alliances": 6, "patent publication": 29, "product launching & presentation": 14, "product updates": 15, "regulatory approval": 2, "service & product providing": 10, "subsidiary establishment": 13, "support & philanthropy": 26 }, "layer_norm_eps": 1e-07, "max_position_embeddings": 512, "max_relative_positions": -1, "model_type": "deberta", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_dropout": 0, "pooler_hidden_act": "gelu", "pooler_hidden_size": 768, "pos_att_type": [ "c2p", "p2c" ], "position_biased_input": false, "problem_type": "multi_label_classification", "relative_attention": true, "torch_dtype": "float32", "transformers_version": "4.38.2", "type_vocab_size": 0, "vocab_size": 50265 }