File size: 1,135 Bytes
bfb2e8a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from PIL import Image
import torch
from typing import IO
from model_loader import models

class ImagePreprocessor:

    def __init__(self):
        self.preprocess = models.clip_preprocess
        self.device = models.device

    def process(self, image_file: IO) -> torch.Tensor:
        """
        Opens an image file, preprocesses it, and returns it as a tensor.

        Args:
            image_file (IO): The image file object (e.g., from a file upload).

        Returns:
            torch.Tensor: The preprocessed image as a tensor, ready for the model.
        """
        try:
            # Open the image from the file-like object
            image = Image.open(image_file).convert("RGB")
        except Exception as e:
            print(f"Error opening image: {e}")
            # You might want to raise a custom exception here
            raise ValueError("Invalid or corrupted image file.")

        # Apply the CLIP preprocessing transformations and move to the correct device
        image_tensor = self.preprocess(image).unsqueeze(0).to(self.device)
        return image_tensor
    
preprocessor = ImagePreprocessor()