product-image-update-port-10 / tests /test_full_pipeline.py
GitHub Actions
Deploy to Hugging Face Space: product-image-update-port-10
18faf97
# ----------------------------------------------------------------------
# FULL PIPELINE TEST
# ----------------------------------------------------------------------
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import time
import json
from PIL import Image
from config import TEST_IMAGE_URL, TEST_PRODUCT_TYPE
# ----------------------------------------------------------------------
# INTEGRATION TEST - PROCESS REAL IMAGE WITH ALL MODELS
# ----------------------------------------------------------------------
def test_process_image_through_entire_pipeline():
print(f"\n{'='*60}")
print(f"TESTING FULL PIPELINE WITH: {TEST_IMAGE_URL}")
print(f"Product Type: {TEST_PRODUCT_TYPE}")
print(f"{'='*60}\n")
# Import needed modules
from src.utils import ProcessingContext
from src.pipeline import run_functions_in_sequence, PIPELINE_STEPS
from src.models import model_loader
# ----------------------------------------------------------------------
# CHECK MODEL LOADING STATE
# ----------------------------------------------------------------------
print("\nπŸ” CHECKING MODEL LOADING STATE...")
# In Zero GPU environment, models are loaded on-demand
if os.getenv("SPACE_ID"):
print("βœ… Running in Zero GPU environment - models will load on-demand")
print(f" MODELS_LOADED: {model_loader.MODELS_LOADED}")
print(f" LOAD_ERROR: {model_loader.LOAD_ERROR}")
else:
# For local testing, try to ensure models are loaded
print("πŸ”₯ LOADING ALL MODELS...")
try:
model_loader.ensure_models_loaded()
if model_loader.MODELS_LOADED:
print("βœ… Models loaded successfully!")
else:
print("⚠️ Models not fully loaded but continuing...")
except Exception as e:
print(f"⚠️ Model loading encountered issues: {e}")
print("Continuing with test anyway...")
# ----------------------------------------------------------------------
# PREPARE TEST DATA
# ----------------------------------------------------------------------
print("\nπŸ“¦ PREPARING TEST DATA...")
# Create processing context
contexts = [ProcessingContext(url=TEST_IMAGE_URL, product_type=TEST_PRODUCT_TYPE, keywords=[])]
# Track processing steps
batch_logs = []
# ----------------------------------------------------------------------
# RUN PIPELINE
# ----------------------------------------------------------------------
print("\nπŸš€ RUNNING FULL PIPELINE...")
print(f" Pipeline steps: {[step.__name__ for step in PIPELINE_STEPS]}")
start_time = time.time()
# Run the entire pipeline
batch_logs = run_functions_in_sequence(contexts, PIPELINE_STEPS)
end_time = time.time()
processing_time = end_time - start_time
# ----------------------------------------------------------------------
# ANALYZE RESULTS
# ----------------------------------------------------------------------
ctx = contexts[0]
print(f"\nπŸ“Š PROCESSING RESULTS:")
print(f" Total processing time: {processing_time:.2f} seconds")
# Check if processing was skipped
if hasattr(ctx, 'skip_processing') and ctx.skip_processing:
print(f"❌ Processing was skipped")
if hasattr(ctx, 'error') and ctx.error:
print(f" Error: {ctx.error}")
# Check each processing step
print("\nπŸ“‹ STEP-BY-STEP RESULTS:")
# 1. Image Download
if "original" in ctx.pil_img:
print(f"βœ… Step 1: Image downloaded - Size: {ctx.pil_img['original'].size}")
else:
print(f"❌ Step 1: Image download failed")
# 2. Background Removal
if "background_removed" in ctx.pil_img:
print(f"βœ… Step 2: Background removed successfully")
else:
print(f"⚠️ Step 2: Background removal skipped")
if os.getenv("SPACE_ID"):
print(f" (Expected in Zero GPU - models load on-demand)")
# 3. Object Detection
if hasattr(ctx, 'detection_result') and ctx.detection_result:
print(f"βœ… Step 3: Objects detected - {len(ctx.detection_result)} detections")
# Show detection details
for det_type, detections in ctx.detection_result.items():
if detections:
print(f" - {det_type}: {len(detections)} objects")
else:
print(f"⚠️ Step 3: Object detection skipped")
# 4. Cropping/Padding
if "cropped" in ctx.pil_img:
print(f"βœ… Step 4: Image cropped - Size: {ctx.pil_img['cropped'].size}")
elif "final" in ctx.pil_img:
print(f"βœ… Step 4: Final image created - Size: {ctx.pil_img['final'].size}")
else:
print(f"⚠️ Step 4: Cropping/padding skipped")
# 5. Base64 Encoding
if hasattr(ctx, 'result_image') and ctx.result_image:
print(f"βœ… Step 5: Image encoded to base64 - Length: {len(ctx.result_image)}")
else:
print(f"❌ Step 5: Base64 encoding failed")
# ----------------------------------------------------------------------
# SHOW BATCH LOGS
# ----------------------------------------------------------------------
if batch_logs:
print(f"\nπŸ“ BATCH LOGS ({len(batch_logs)} entries):")
for i, log in enumerate(batch_logs):
print(f"\nLog {i+1}:")
print(json.dumps(log, indent=2))
# ----------------------------------------------------------------------
# SHOW PROCESSING LOGS
# ----------------------------------------------------------------------
if hasattr(ctx, 'processing_logs') and ctx.processing_logs:
print(f"\nπŸ“œ PROCESSING LOGS ({len(ctx.processing_logs)} entries):")
for i, log in enumerate(ctx.processing_logs[-10:]): # Show last 10 logs
print(f" {i+1}. {log}")
# ----------------------------------------------------------------------
# TEST SUMMARY
# ----------------------------------------------------------------------
print(f"\n{'='*60}")
# Determine overall status
if hasattr(ctx, 'result_image') and ctx.result_image:
print(f"βœ… PIPELINE TEST COMPLETED SUCCESSFULLY")
print(f" Processing time: {processing_time:.2f}s")
if "final" in ctx.pil_img:
print(f" Output image size: {ctx.pil_img['final'].size}")
else:
print(f"⚠️ PIPELINE TEST COMPLETED WITH WARNINGS")
if os.getenv("SPACE_ID"):
print(f" Note: Limited processing expected in Zero GPU environment")
print(f"{'='*60}\n")
# ----------------------------------------------------------------------
# MAIN EXECUTION
# ----------------------------------------------------------------------
if __name__ == "__main__":
test_process_image_through_entire_pipeline()