Yaofu3 commited on
Commit
3020792
1 Parent(s): 5fd4d0a

add base class

Browse files
backend-cli.py CHANGED
@@ -406,8 +406,8 @@ if __name__ == "__main__":
406
  if local_debug:
407
  debug_model_names = ["mistralai/Mixtral-8x7B-Instruct-v0.1"]
408
  # debug_model_names = ["TheBloke/Mixtral-8x7B-v0.1-GPTQ"]
409
- # debug_task_name = 'selfcheck'
410
- debug_task_name = "mmlu"
411
  task_lst = TASKS_HARNESS.copy()
412
  for task in task_lst:
413
  for debug_model_name in debug_model_names:
 
406
  if local_debug:
407
  debug_model_names = ["mistralai/Mixtral-8x7B-Instruct-v0.1"]
408
  # debug_model_names = ["TheBloke/Mixtral-8x7B-v0.1-GPTQ"]
409
+ debug_task_name = 'selfcheckgpt'
410
+ # debug_task_name = "mmlu"
411
  task_lst = TASKS_HARNESS.copy()
412
  for task in task_lst:
413
  for debug_model_name in debug_model_names:
src/backend/huggingface_generate_until.py CHANGED
@@ -2,12 +2,13 @@ from typing import List, Literal, Optional, Tuple, Union
2
  import torch
3
  import transformers
4
 
5
- from lm_eval.models.huggingface import HFLM
6
  from lm_eval.api.registry import register_model
7
 
 
 
8
 
9
  @register_model("hf-chat")
10
- class HFLMwithChatTemplate(HFLM):
11
  def __init__(self, use_chat_template=True, **kwargs):
12
  super().__init__(**kwargs)
13
  self.use_chat_template = use_chat_template
 
2
  import torch
3
  import transformers
4
 
 
5
  from lm_eval.api.registry import register_model
6
 
7
+ from src.backend.hflm_with_measurement import HFLMWithMeasurement
8
+
9
 
10
  @register_model("hf-chat")
11
+ class HFLMwithChatTemplate(HFLMWithMeasurement):
12
  def __init__(self, use_chat_template=True, **kwargs):
13
  super().__init__(**kwargs)
14
  self.use_chat_template = use_chat_template
src/backend/moe_infinity.py CHANGED
@@ -5,12 +5,13 @@ from transformers import AutoModelForCausalLM
5
  from moe_infinity import MoE
6
  from typing import List, Tuple, Optional, Union
7
 
8
- from lm_eval.models.huggingface import HFLM
9
  from lm_eval.api.registry import register_model
10
 
 
 
11
 
12
  @register_model("moe-infinity")
13
- class MoEHFLM(HFLM):
14
  def __init__(
15
  self,
16
  pretrained: str = "mistralai/Mixtral-8x7B-Instruct-v0.1",
 
5
  from moe_infinity import MoE
6
  from typing import List, Tuple, Optional, Union
7
 
 
8
  from lm_eval.api.registry import register_model
9
 
10
+ from src.backend.hflm_with_measurement import HFLMWithMeasurement
11
+
12
 
13
  @register_model("moe-infinity")
14
+ class MoEHFLM(HFLMWithMeasurement):
15
  def __init__(
16
  self,
17
  pretrained: str = "mistralai/Mixtral-8x7B-Instruct-v0.1",