Rafal commited on
Commit
c2cd680
1 Parent(s): 6419576

Added back LLAMA 7b and updated the model loading

Browse files
Files changed (2) hide show
  1. app.py +1 -1
  2. mgr_bias_scoring.py +4 -4
app.py CHANGED
@@ -872,7 +872,7 @@ with gr.Blocks(theme=soft, title="Social Bias Testing in Language Models",
872
  gen_title = gr.Markdown("### Select Tested Model", visible=True)
873
 
874
  # Tested Model Selection - "openlm-research/open_llama_7b", "tiiuae/falcon-7b"
875
- tested_model_name = gr.Dropdown( ["bert-base-uncased","bert-large-uncased","gpt2","gpt2-medium","gpt2-large","emilyalsentzer/Bio_ClinicalBERT","microsoft/biogpt","openlm-research/open_llama_3b"], value="bert-base-uncased",
876
  multiselect=None,
877
  interactive=True,
878
  label="Tested Language Model",
 
872
  gen_title = gr.Markdown("### Select Tested Model", visible=True)
873
 
874
  # Tested Model Selection - "openlm-research/open_llama_7b", "tiiuae/falcon-7b"
875
+ tested_model_name = gr.Dropdown( ["bert-base-uncased","bert-large-uncased","gpt2","gpt2-medium","gpt2-large","emilyalsentzer/Bio_ClinicalBERT","microsoft/biogpt","openlm-research/open_llama_3b","openlm-research/open_llama_7b"], value="bert-base-uncased",
876
  multiselect=None,
877
  interactive=True,
878
  label="Tested Language Model",
mgr_bias_scoring.py CHANGED
@@ -70,8 +70,8 @@ def _getModel(model_name, device):
70
  torch_dtype=torch.bfloat16,
71
  low_cpu_mem_usage=True, ##
72
  #use_safetensors=True, ##
73
- offload_folder="offload",
74
- offload_state_dict = True,
75
  device_map='auto')
76
  elif "falcon" in model_name.lower():
77
  print(f"Getting FALCON model: {model_name}")
@@ -81,8 +81,8 @@ def _getModel(model_name, device):
81
  trust_remote_code=True,
82
  low_cpu_mem_usage=True, ##
83
  #use_safetensors=True, ##
84
- offload_folder="offload",
85
- offload_state_dict = True,
86
  device_map='auto')
87
  #model.tie_weights()
88
  if model == None:
 
70
  torch_dtype=torch.bfloat16,
71
  low_cpu_mem_usage=True, ##
72
  #use_safetensors=True, ##
73
+ #offload_folder="offload",
74
+ #offload_state_dict = True,
75
  device_map='auto')
76
  elif "falcon" in model_name.lower():
77
  print(f"Getting FALCON model: {model_name}")
 
81
  trust_remote_code=True,
82
  low_cpu_mem_usage=True, ##
83
  #use_safetensors=True, ##
84
+ #offload_folder="offload",
85
+ #offload_state_dict = True,
86
  device_map='auto')
87
  #model.tie_weights()
88
  if model == None: