biobert-ehr-ner / config.json
Lya's picture
Upload config.json
16ea14b
raw
history blame
No virus
1.18 kB
{
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "B-DRUG",
"1": "I-DRUG",
"2": "B-STR",
"3": "I-STR",
"4": "B-DUR",
"5": "I-DUR",
"6": "B-ROU",
"7": "I-ROU",
"8": "B-FOR",
"9": "I-FOR",
"10": "B-ADE",
"11": "I-ADE",
"12": "B-DOS",
"13": "I-DOS",
"14": "B-REA",
"15": "I-REA",
"16": "B-FRE",
"17": "I-FRE",
"18": "O"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-ADE": 10,
"B-DOS": 12,
"B-DRUG": 0,
"B-DUR": 4,
"B-FOR": 8,
"B-FRE": 16,
"B-REA": 14,
"B-ROU": 6,
"B-STR": 2,
"I-ADE": 11,
"I-DOS": 13,
"I-DRUG": 1,
"I-DUR": 5,
"I-FOR": 9,
"I-FRE": 17,
"I-REA": 15,
"I-ROU": 7,
"I-STR": 3,
"O": 18
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"type_vocab_size": 2,
"vocab_size": 58996
}