John6666 commited on
Commit
9402170
1 Parent(s): e677307

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +2 -6
  2. mod.py +6 -3
app.py CHANGED
@@ -9,7 +9,7 @@ import copy
9
  import random
10
  import time
11
  from mod import (models, clear_cache, get_repo_safetensors, change_base_model,
12
- description_ui, num_loras, compose_lora_json, is_valid_lora, fuse_loras, get_trigger_word)
13
  from flux import (search_civitai_lora, select_civitai_lora, search_civitai_lora_json,
14
  download_my_lora, get_all_lora_tupled_list, apply_lora_prompt,
15
  update_loras)
@@ -22,10 +22,6 @@ from tagger.fl2flux import predict_tags_fl2_flux
22
  with open('loras.json', 'r') as f:
23
  loras = json.load(f)
24
 
25
- # Initialize the base model
26
- base_model = models[0]
27
- pipe = DiffusionPipeline.from_pretrained(base_model, torch_dtype=torch.bfloat16)
28
-
29
  MAX_SEED = 2**32-1
30
 
31
  class calculateDuration:
@@ -228,7 +224,7 @@ with gr.Blocks(theme=gr.themes.Soft(), fill_width=True, css=css) as app:
228
  outputs=[result, seed]
229
  )
230
 
231
- model_name.change(change_base_model, [model_name], None)
232
 
233
  gr.on(
234
  triggers=[lora_search_civitai_submit.click, lora_search_civitai_query.submit],
 
9
  import random
10
  import time
11
  from mod import (models, clear_cache, get_repo_safetensors, change_base_model,
12
+ description_ui, num_loras, compose_lora_json, is_valid_lora, fuse_loras, get_trigger_word, pipe)
13
  from flux import (search_civitai_lora, select_civitai_lora, search_civitai_lora_json,
14
  download_my_lora, get_all_lora_tupled_list, apply_lora_prompt,
15
  update_loras)
 
22
  with open('loras.json', 'r') as f:
23
  loras = json.load(f)
24
 
 
 
 
 
25
  MAX_SEED = 2**32-1
26
 
27
  class calculateDuration:
 
224
  outputs=[result, seed]
225
  )
226
 
227
+ model_name.change(change_base_model, [model_name], None, queue=False)
228
 
229
  gr.on(
230
  triggers=[lora_search_civitai_submit.click, lora_search_civitai_query.submit],
mod.py CHANGED
@@ -68,12 +68,15 @@ def get_repo_safetensors(repo_id: str):
68
  else: return gr.update(value=files[0], choices=files)
69
 
70
 
 
 
 
 
 
71
  def change_base_model(repo_id: str):
72
- from huggingface_hub import HfApi
73
  global pipe
74
- api = HfApi()
75
  try:
76
- if " " in repo_id or not api.repo_exists(repo_id): return
77
  clear_cache()
78
  pipe = DiffusionPipeline.from_pretrained(repo_id, torch_dtype=torch.bfloat16)
79
  except Exception as e:
 
68
  else: return gr.update(value=files[0], choices=files)
69
 
70
 
71
+ # Initialize the base model
72
+ base_model = models[0]
73
+ pipe = DiffusionPipeline.from_pretrained(base_model, torch_dtype=torch.bfloat16)
74
+
75
+
76
  def change_base_model(repo_id: str):
 
77
  global pipe
 
78
  try:
79
+ if not is_repo_name(repo_id) or not is_repo_exists(repo_id): return
80
  clear_cache()
81
  pipe = DiffusionPipeline.from_pretrained(repo_id, torch_dtype=torch.bfloat16)
82
  except Exception as e: