Kowsher commited on
Commit
97c8b2e
1 Parent(s): e755826

Update ChatFalcon.py

Browse files
Files changed (1) hide show
  1. ChatFalcon.py +3 -4
ChatFalcon.py CHANGED
@@ -23,6 +23,7 @@ from torch import nn
23
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, LayerNorm, MSELoss
24
  from torch.nn import functional as F
25
  from transformers import AutoModel,AutoModelForCausalLM, AutoConfig
 
26
  from transformers.modeling_outputs import (
27
  BaseModelOutputWithPastAndCrossAttentions,
28
  CausalLMOutputWithCrossAttentions,
@@ -33,7 +34,7 @@ from transformers.modeling_outputs import (
33
  from transformers.modeling_utils import PreTrainedModel
34
  from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
35
  from .configuration_falcon import FalconConfig
36
- from transformers.models.auto import modeling_auto
37
 
38
  logger = logging.get_logger(__name__)
39
 
@@ -1266,6 +1267,4 @@ class FalconForQuestionAnswering(FalconPreTrainedModel):
1266
  attentions=outputs.attentions,
1267
  )
1268
 
1269
- AutoConfig.register("falcon", FalconConfig)
1270
- AutoModel.register(FalconConfig, FalconModel)
1271
- AutoModelForCausalLM.register(FalconConfig, FalconForCausalLM)
 
23
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, LayerNorm, MSELoss
24
  from torch.nn import functional as F
25
  from transformers import AutoModel,AutoModelForCausalLM, AutoConfig
26
+ from transformers.models.auto import modeling_auto
27
  from transformers.modeling_outputs import (
28
  BaseModelOutputWithPastAndCrossAttentions,
29
  CausalLMOutputWithCrossAttentions,
 
34
  from transformers.modeling_utils import PreTrainedModel
35
  from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
36
  from .configuration_falcon import FalconConfig
37
+
38
 
39
  logger = logging.get_logger(__name__)
40
 
 
1267
  attentions=outputs.attentions,
1268
  )
1269
 
1270
+