Spaces:
Running
Running
File size: 929 Bytes
2c50826 199a7d9 2c50826 199a7d9 2c50826 199a7d9 2c50826 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
from typing import Dict
import numpy as np
import torch
from PIL import Image
from torchmetrics.multimodal.clip_score import CLIPScore
class CLIPMetric:
def __init__(self, model_name_or_path: str = "openai/clip-vit-large-patch14"):
self.device = torch.device(
"cuda"
if torch.cuda.is_available()
else "mps"
if torch.backends.mps.is_available()
else "cpu"
)
self.metric = CLIPScore(model_name_or_path="openai/clip-vit-large-patch14")
self.metric.to(self.device)
@property
def name(self) -> str:
return "clip"
def compute_score(self, image: Image.Image, prompt: str) -> Dict[str, float]:
image_tensor = torch.from_numpy(np.array(image)).permute(2, 0, 1).float()
image_tensor = image_tensor.to(self.device)
score = self.metric(image_tensor, prompt)
return {"clip": score.item()}
|