danschr commited on
Commit
2e14ea0
1 Parent(s): 1366939

Upload DebertaArgClassifier

Browse files
config.json CHANGED
@@ -30,7 +30,6 @@
30
  },
31
  "label2id": null,
32
  "model_type": "deberta_arg_classifier",
33
- "number_labels": 20,
34
  "torch_dtype": "float32",
35
- "transformers_version": "4.26.1"
36
  }
 
30
  },
31
  "label2id": null,
32
  "model_type": "deberta_arg_classifier",
 
33
  "torch_dtype": "float32",
34
+ "transformers_version": "4.27.1"
35
  }
configuration_deberta_arg_classifier.py CHANGED
@@ -3,6 +3,7 @@ from transformers import PretrainedConfig
3
  class DebertaConfig(PretrainedConfig):
4
  model_type = "deberta_arg_classifier"
5
 
6
- def __init__(self, num_labels: int=20, **kwargs):
7
- self.number_labels = num_labels
8
  super().__init__(**kwargs)
 
 
 
3
  class DebertaConfig(PretrainedConfig):
4
  model_type = "deberta_arg_classifier"
5
 
6
+ def __init__(self, **kwargs):
 
7
  super().__init__(**kwargs)
8
+
9
+ #%%
modeling_deberta_arg_classifier.py CHANGED
@@ -11,7 +11,7 @@ class DebertaArgClassifier(PreTrainedModel):
11
  def __init__(self, config):
12
  super().__init__(config)
13
  self.bert = AutoModel.from_pretrained("microsoft/deberta-large")
14
- self.classifier = nn.Linear(self.bert.config.hidden_size, config.number_labels)
15
  self.criterion = nn.BCEWithLogitsLoss()
16
 
17
 
@@ -20,10 +20,11 @@ class DebertaArgClassifier(PreTrainedModel):
20
  output = self._cls_embeddings(output)
21
  output_cls = self.classifier(output)
22
  output = torch.sigmoid(output_cls)
 
23
  if labels is not None:
24
  loss = self.cirterion(output_cls, labels)
25
- return {"loss": loss, "logits": output}
26
- return {"logits": output}
27
 
28
 
29
  def _cls_embeddings(self, output):
 
11
  def __init__(self, config):
12
  super().__init__(config)
13
  self.bert = AutoModel.from_pretrained("microsoft/deberta-large")
14
+ self.classifier = nn.Linear(self.bert.config.hidden_size, config.num_labels)
15
  self.criterion = nn.BCEWithLogitsLoss()
16
 
17
 
 
20
  output = self._cls_embeddings(output)
21
  output_cls = self.classifier(output)
22
  output = torch.sigmoid(output_cls)
23
+ loss = None
24
  if labels is not None:
25
  loss = self.cirterion(output_cls, labels)
26
+ return {"loss": loss, "output": output}
27
+ return {"loss": loss, "output": output}
28
 
29
 
30
  def _cls_embeddings(self, output):