File size: 2,374 Bytes
912670d
22379c6
 
 
912670d
22379c6
 
 
 
912670d
 
22379c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
912670d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22379c6
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import json
from functools import wraps
from typing import Generic, Optional, TypeVar

import pydash
from fastapi import HTTPException
from pydantic import BaseModel
from starlette.responses import JSONResponse

from ocr.core.config import settings

T = TypeVar('T')


class ErrorOcrResponse(BaseModel):
    message: str


class OcrResponseWrapper(BaseModel, Generic[T]):
    data: Optional[T] = None
    successful: bool = True
    error: Optional[ErrorOcrResponse] = None

    def response(self, status_code: int):
        return JSONResponse(
            status_code=status_code,
            content={
                "data": self.data,
                "successful": self.successful,
                "error": self.error.dict() if self.error else None
            }
        )


def exception_wrapper(http_error: int, error_message: str):
    def decorator(func):
        @wraps(func)
        async def wrapper(*args, **kwargs):
            try:
                return await func(*args, **kwargs)
            except Exception as e:
                raise HTTPException(status_code=http_error, detail=error_message) from e

        return wrapper

    return decorator


def openai_wrapper(
        temperature: int | float = 0, model: str = "gpt-4o-mini", is_json: bool = False, return_: str = None
):
    def decorator(func):
        @wraps(func)
        async def wrapper(*args, **kwargs) -> str:
            messages = await func(*args, **kwargs)
            completion = await settings.OPENAI_CLIENT.chat.completions.create(
                messages=messages,
                temperature=temperature,
                n=1,
                model=model,
                response_format={"type": "json_object"} if is_json else {"type": "text"}
            )
            response = completion.choices[0].message.content
            if is_json:
                response = json.loads(response)
                if return_:
                    return pydash.get(response, return_)
            return response

        return wrapper

    return decorator


def background_task():
    def decorator(func):
        @wraps(func)
        async def wrapper(*args, **kwargs) -> str:
            try:
                result = await func(*args, **kwargs)
                return result
            except Exception as e:
                pass

        return wrapper

    return decorator