CingenAI / core /gemini_handler.py
mgbam's picture
Update core/gemini_handler.py
4afbb07 verified
raw
history blame
3.2 kB
# core/gemini_handler.py
import google.generativeai as genai
import json
import re
class GeminiHandler:
def __init__(self, api_key):
genai.configure(api_key=api_key)
self.model = genai.GenerativeModel('gemini-1.5-flash-latest') # Or your preferred Gemini model
def _clean_json_response(self, text_response):
# Attempt to find JSON within backticks or directly
match = re.search(r"```json\s*([\s\S]*?)\s*```", text_response, re.DOTALL)
if match:
json_str = match.group(1).strip()
else:
# Try to find the start of a list or object
json_start_list = text_response.find('[')
json_start_obj = text_response.find('{')
if json_start_list != -1 and (json_start_obj == -1 or json_start_list < json_start_obj):
json_str = text_response[json_start_list:]
elif json_start_obj != -1:
json_str = text_response[json_start_obj:]
else:
return text_response # Not clearly JSON
# Remove trailing characters that might break parsing if JSON is incomplete
# This is a bit aggressive, might need refinement
# Find last '}' or ']'
last_bracket = max(json_str.rfind('}'), json_str.rfind(']'))
if last_bracket != -1:
json_str = json_str[:last_bracket+1]
return json_str.strip()
def _execute_gemini_call(self, prompt_text, expect_json=False):
try:
response = self.model.generate_content(prompt_text)
text_content = response.text
if expect_json:
cleaned_response = self._clean_json_response(text_content)
# print(f"DEBUG: Cleaned JSON for prompt:\n{prompt_text[:200]}...\nResponse:\n{cleaned_response}") # Debug
return json.loads(cleaned_response)
return text_content.strip()
except json.JSONDecodeError as e:
print(f"Error decoding JSON from Gemini response: {e}")
print(f"Problematic Gemini Raw Response:\n{text_content if 'text_content' in locals() else 'No response object'}")
print(f"Cleaned attempt was:\n{cleaned_response if 'cleaned_response' in locals() else 'N/A'}")
raise # Re-raise to be caught by caller
except Exception as e:
print(f"Error in Gemini call: {e}")
print(f"Problematic Gemini Raw Response (if available):\n{response.text if 'response' in locals() else 'No response object'}")
raise # Re-raise
def generate_story_breakdown(self, prompt_text):
return self._execute_gemini_call(prompt_text, expect_json=True)
def generate_image_prompt(self, prompt_text): # This is for generating a new image prompt string
return self._execute_gemini_call(prompt_text, expect_json=False)
def regenerate_scene_script_details(self, prompt_text): # Expects JSON for a single scene
return self._execute_gemini_call(prompt_text, expect_json=True)
def regenerate_image_prompt_from_feedback(self, prompt_text): # Expects a string (the new prompt)
return self._execute_gemini_call(prompt_text, expect_json=False)