Spaces:
Running
Running
Commit
·
f51d9da
1
Parent(s):
61b4672
fix lora detection issues
Browse files
vms/ui/project/services/previewing.py
CHANGED
@@ -35,22 +35,44 @@ class PreviewingService:
|
|
35 |
def find_latest_lora_weights(self) -> Optional[str]:
|
36 |
"""Find the latest LoRA weights file"""
|
37 |
try:
|
|
|
38 |
lora_path = self.app.output_path / "pytorch_lora_weights.safetensors"
|
39 |
if lora_path.exists():
|
40 |
return str(lora_path)
|
41 |
|
42 |
-
#
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
-
|
53 |
-
|
|
|
|
|
|
|
|
|
|
|
54 |
|
55 |
return None
|
56 |
except Exception as e:
|
|
|
35 |
def find_latest_lora_weights(self) -> Optional[str]:
|
36 |
"""Find the latest LoRA weights file"""
|
37 |
try:
|
38 |
+
# Check if the root level file exists (this should be the primary location)
|
39 |
lora_path = self.app.output_path / "pytorch_lora_weights.safetensors"
|
40 |
if lora_path.exists():
|
41 |
return str(lora_path)
|
42 |
|
43 |
+
# Check in lora_weights directory
|
44 |
+
lora_weights_dir = self.app.output_path / "lora_weights"
|
45 |
+
if lora_weights_dir.exists():
|
46 |
+
# Look for the latest checkpoint directory in lora_weights
|
47 |
+
lora_checkpoints = [d for d in lora_weights_dir.glob("*") if d.is_dir() and d.name.isdigit()]
|
48 |
+
if lora_checkpoints:
|
49 |
+
latest_lora_checkpoint = max(lora_checkpoints, key=lambda x: int(x.name))
|
50 |
+
|
51 |
+
# Check for weights in the latest LoRA checkpoint
|
52 |
+
possible_weight_files = [
|
53 |
+
"pytorch_lora_weights.safetensors",
|
54 |
+
"adapter_model.safetensors",
|
55 |
+
"pytorch_model.safetensors",
|
56 |
+
"model.safetensors"
|
57 |
+
]
|
58 |
+
|
59 |
+
for weight_file in possible_weight_files:
|
60 |
+
weight_path = latest_lora_checkpoint / weight_file
|
61 |
+
if weight_path.exists():
|
62 |
+
return str(weight_path)
|
63 |
+
|
64 |
+
# Check if any .safetensors files exist
|
65 |
+
safetensors_files = list(latest_lora_checkpoint.glob("*.safetensors"))
|
66 |
+
if safetensors_files:
|
67 |
+
return str(safetensors_files[0])
|
68 |
|
69 |
+
# If not found in lora_weights, try to find in finetrainers checkpoints
|
70 |
+
checkpoints = list(self.app.output_path.glob("finetrainers_step_*"))
|
71 |
+
if checkpoints:
|
72 |
+
latest_checkpoint = max(checkpoints, key=lambda x: int(x.name.split("_")[-1]))
|
73 |
+
lora_path = latest_checkpoint / "pytorch_lora_weights.safetensors"
|
74 |
+
if lora_path.exists():
|
75 |
+
return str(lora_path)
|
76 |
|
77 |
return None
|
78 |
except Exception as e:
|
vms/ui/project/tabs/preview_tab.py
CHANGED
@@ -225,12 +225,34 @@ class PreviewTab(BaseTab):
|
|
225 |
if lora_path.exists():
|
226 |
return True
|
227 |
|
228 |
-
#
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
233 |
|
|
|
|
|
234 |
for checkpoint in checkpoints:
|
235 |
lora_path = checkpoint / "pytorch_lora_weights.safetensors"
|
236 |
if lora_path.exists():
|
|
|
225 |
if lora_path.exists():
|
226 |
return True
|
227 |
|
228 |
+
# Check in lora_weights directory
|
229 |
+
lora_weights_dir = self.app.output_path / "lora_weights"
|
230 |
+
if lora_weights_dir.exists():
|
231 |
+
# Look for the latest checkpoint directory in lora_weights
|
232 |
+
lora_checkpoints = [d for d in lora_weights_dir.glob("*") if d.is_dir() and d.name.isdigit()]
|
233 |
+
if lora_checkpoints:
|
234 |
+
latest_lora_checkpoint = max(lora_checkpoints, key=lambda x: int(x.name))
|
235 |
+
|
236 |
+
# Check for weights in the latest LoRA checkpoint
|
237 |
+
possible_weight_files = [
|
238 |
+
"pytorch_lora_weights.safetensors",
|
239 |
+
"adapter_model.safetensors",
|
240 |
+
"pytorch_model.safetensors",
|
241 |
+
"model.safetensors"
|
242 |
+
]
|
243 |
+
|
244 |
+
for weight_file in possible_weight_files:
|
245 |
+
weight_path = latest_lora_checkpoint / weight_file
|
246 |
+
if weight_path.exists():
|
247 |
+
return True
|
248 |
+
|
249 |
+
# Check if any .safetensors files exist
|
250 |
+
safetensors_files = list(latest_lora_checkpoint.glob("*.safetensors"))
|
251 |
+
if safetensors_files:
|
252 |
+
return True
|
253 |
|
254 |
+
# If not found in lora_weights, try to find in finetrainers checkpoints
|
255 |
+
checkpoints = list(self.app.output_path.glob("finetrainers_step_*"))
|
256 |
for checkpoint in checkpoints:
|
257 |
lora_path = checkpoint / "pytorch_lora_weights.safetensors"
|
258 |
if lora_path.exists():
|