hanzla commited on
Commit
e1e2d64
1 Parent(s): db2e21a
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -9,14 +9,13 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
9
  model_name = "ModularityAI/gemma-2b-datascience-it-raft"
10
  tokenizer_name = "google/gemma-2b-it"
11
 
12
- model = AutoModelForCausalLM.from_pretrained(model_name,torch_dtype=torch.bfloat16,device='cuda')
13
- tokenizer = AutoTokenizer.from_pretrained(tokenizer_name,device='cuda')
14
 
15
  pipeline = transformers.pipeline(
16
  "text-generation",
17
  model=model,
18
  tokenizer=tokenizer,
19
- device="cuda",
20
  )
21
 
22
  def format_test_question(q):
 
9
  model_name = "ModularityAI/gemma-2b-datascience-it-raft"
10
  tokenizer_name = "google/gemma-2b-it"
11
 
12
+ model = AutoModelForCausalLM.from_pretrained(model_name,torch_dtype=torch.bfloat16,device_map='cuda')
13
+ tokenizer = AutoTokenizer.from_pretrained(tokenizer_name,device_map='cuda')
14
 
15
  pipeline = transformers.pipeline(
16
  "text-generation",
17
  model=model,
18
  tokenizer=tokenizer,
 
19
  )
20
 
21
  def format_test_question(q):