Upload folder using huggingface_hub
Browse files- .env +19 -0
- README.md +3 -9
- __pycache__/cosmosConnector.cpython-311.pyc +0 -0
- chatbot-gradio.py +323 -0
- cosmosConnector.py +219 -0
- models/__pycache__/converterModels.cpython-311.pyc +0 -0
- models/__pycache__/converterVectorStoreModels.cpython-311.pyc +0 -0
- models/converterModels.py +60 -0
- models/converterVectorStoreModels.py +64 -0
- plugins/__pycache__/converterPlugin.cpython-311.pyc +0 -0
- plugins/__pycache__/sqlGenerationPlugin.cpython-311.pyc +0 -0
- plugins/converterPlugin.py +96 -0
- requirements.txt +1 -0
.env
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
AZURE_OPENAI_KEY = 74fO9RE4s4f7HTSd9SM19Adw6rnECwUuBnfY593dPI7xSHa057RHJQQJ99BEACfhMk5XJ3w3AAAAACOGFVJQ
|
2 |
+
|
3 |
+
OPENAI_API_TYPE = azure
|
4 |
+
|
5 |
+
AZURE_OPENAI_API_VERSION = 2024-12-01-preview
|
6 |
+
AZURE_OPENAI_DEPLOYMENT_NAME = gpt-4o-mini
|
7 |
+
AZURE_OPENAI_ENDPOINT = https://tal-chatbot-resource2.cognitiveservices.azure.com/openai/deployments/gpt-4o-mini/chat/completions?api-version=2025-01-01-preview
|
8 |
+
|
9 |
+
OPENAI_EMBEDDINGS_MODEL_NAME = text-embedding-ada-002
|
10 |
+
OPENAI_EMBEDDINGS_MODEL_DEPLOYMENT = text-embedding-ada-002
|
11 |
+
OPENAI_API_ENDPOINT = https://tal-chatbot-resource2.cognitiveservices.azure.com/
|
12 |
+
|
13 |
+
LANGSMITH_API_KEY = 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
|
14 |
+
LANGSMITH_TRACING=true
|
15 |
+
|
16 |
+
|
17 |
+
AZURE_COSMOS_DB_ENDPOINT = https://tal-chatbot.documents.azure.com:443/
|
18 |
+
AZURE_COSMOS_DB_KEY = 6XG3CwRPJeHWAufiMNbWNS2PhBfoSMtPEP5qNGPQJFulXqgJfR9K3xO1sgegOq9vkjwSgmIDqA7hACDbWIzPVA==
|
19 |
+
AZURE_COSMOS_DB_DATABASE = tal-chatbot
|
README.md
CHANGED
@@ -1,12 +1,6 @@
|
|
1 |
---
|
2 |
-
title: TAL
|
3 |
-
|
4 |
-
colorFrom: purple
|
5 |
-
colorTo: pink
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 5.
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: TAL-RAGandSQLGen-Chatbot
|
3 |
+
app_file: chatbot-gradio.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
+
sdk_version: 5.31.0
|
|
|
|
|
6 |
---
|
|
|
|
__pycache__/cosmosConnector.cpython-311.pyc
ADDED
Binary file (12.4 kB). View file
|
|
chatbot-gradio.py
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from semantic_kernel import Kernel
|
3 |
+
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
|
4 |
+
from semantic_kernel.functions import kernel_function
|
5 |
+
from azure.cosmos import CosmosClient
|
6 |
+
from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import (
|
7 |
+
AzureChatPromptExecutionSettings,
|
8 |
+
)
|
9 |
+
from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior
|
10 |
+
from models.converterModels import PowerConverter
|
11 |
+
from plugins.converterPlugin import ConverterPlugin
|
12 |
+
import os
|
13 |
+
import gradio as gr
|
14 |
+
|
15 |
+
from dotenv import load_dotenv
|
16 |
+
load_dotenv()
|
17 |
+
|
18 |
+
logger = logging.getLogger("kernel")
|
19 |
+
logger.setLevel(logging.DEBUG)
|
20 |
+
handler = logging.StreamHandler()
|
21 |
+
handler.setFormatter(logging.Formatter(
|
22 |
+
"[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s"
|
23 |
+
))
|
24 |
+
logger.addHandler(handler)
|
25 |
+
|
26 |
+
|
27 |
+
# Initialize Semantic Kernel
|
28 |
+
kernel = Kernel()
|
29 |
+
|
30 |
+
# Add Azure OpenAI Chat Service
|
31 |
+
kernel.add_service(AzureChatCompletion(
|
32 |
+
service_id="chat",
|
33 |
+
deployment_name=os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"),
|
34 |
+
endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
|
35 |
+
api_key=os.getenv("AZURE_OPENAI_KEY")
|
36 |
+
))
|
37 |
+
allowed_fields = [
|
38 |
+
"id","artnr","type", "converter_description","strain_relief","location","dimmability","efficiency_full_load",
|
39 |
+
"ip","class","nom_input_voltage_v.min","nom_input_voltage_v.max","output_voltage_v.min","output_voltage_v.max",
|
40 |
+
"name","listprice","unit","lifecycle","lamps","size"
|
41 |
+
]
|
42 |
+
# SQL Generation Plugin
|
43 |
+
class NL2SQLPlugin:
|
44 |
+
@kernel_function(name="generate_sql", description="Generate Cosmos DB SQL query")
|
45 |
+
async def generate_sql(self, question: str) -> str:
|
46 |
+
sql = await self._generate_sql_helper(question)
|
47 |
+
# Block DML commands first
|
48 |
+
if any(command in sql.upper() for command in ["DELETE", "UPDATE", "INSERT"]):
|
49 |
+
return ""
|
50 |
+
|
51 |
+
# # Validate fields before proceeding
|
52 |
+
# if not self._validate_fields(sql, allowed_fields):
|
53 |
+
# return "ERROR: Query contains invalid fields"
|
54 |
+
|
55 |
+
if "FROM converters c" in sql:
|
56 |
+
sql = sql.replace("FROM converters c", "FROM c")
|
57 |
+
if "SELECT *" not in sql and "FROM c" in sql:
|
58 |
+
sql = sql.replace("SELECT c.*,", "SELECT *")
|
59 |
+
sql = sql.replace("SELECT c.*", "SELECT *")
|
60 |
+
sql = sql.replace("SELECT c", "SELECT *")
|
61 |
+
|
62 |
+
logger.info(f"finalSQL {sql}")
|
63 |
+
|
64 |
+
return sql
|
65 |
+
|
66 |
+
async def _generate_sql_helper(self, question: str) -> str:
|
67 |
+
from semantic_kernel.contents import ChatHistory
|
68 |
+
|
69 |
+
chat_service = kernel.get_service("chat")
|
70 |
+
chat_history = ChatHistory()
|
71 |
+
chat_history.add_user_message(f"""Convert to Cosmos DB SQL: {question}
|
72 |
+
Collection: converters (alias 'c')
|
73 |
+
Fields:
|
74 |
+
- type (e.g., '350mA')
|
75 |
+
- artnr (numeric (int) article number e.g., 930546)
|
76 |
+
- output_voltage_v: dictionary with min/max values for output voltage
|
77 |
+
- output_voltage_v.min (e.g., 15)
|
78 |
+
- output_voltage_v.max (e.g., 40)
|
79 |
+
- nom_input_voltage_v: dictionary with min/max values for input voltage
|
80 |
+
- nom_input_voltage_v.min (e.g., 198)
|
81 |
+
- nom_input_voltage_v.max (e.g., 264)
|
82 |
+
- lamps: dictionary with min/max values for lamp types for this converter
|
83 |
+
- lamps["lamp_name"].min (e.g., 1)
|
84 |
+
- lamps["lamp_name"].max (e.g., 10)
|
85 |
+
- class (safety class)
|
86 |
+
- dimmability (e.g. if not dimmable 'NOT DIMMABLE'. if supports dimming, 'DALI/TOUCHDIM','MAINS DIM LC' etc)
|
87 |
+
- listprice (e.g., 58)
|
88 |
+
- lifecycle (e.g., 'Active')
|
89 |
+
- size (e.g., '150x30x30')
|
90 |
+
- dimlist_type (e.g., 'DALI')
|
91 |
+
- pdf_link (link to product PDF)
|
92 |
+
- converter_description (e.g., 'POWERLED CONVERTER REMOTE 180mA 8W IP20 1-10V')
|
93 |
+
- ip (Ingress Protection, integer values e.g., 20,67)
|
94 |
+
- efficiency_full_load (e.g., 0.9)
|
95 |
+
- name (e.g., 'Power Converter 350mA')
|
96 |
+
- unit (e.g., 'PC')
|
97 |
+
- strain_relief (e.g., "NO", "YES")
|
98 |
+
SQL Guidelines (if needed):
|
99 |
+
- Always use SELECT * and never individual fields
|
100 |
+
- Always refer to fields in WHERE clause using c.<field_name>
|
101 |
+
- For exact matches use: WHERE c.[field] = value
|
102 |
+
- For ranges use: WHERE c.[field].min = X AND c.[field].max = Y
|
103 |
+
- Check for dimmability support by using either !="NOT DIMMABLE" or ="NOT DIMMABLE"
|
104 |
+
- Do not use AS and cast key names
|
105 |
+
Return ONLY SQL without explanations""")
|
106 |
+
|
107 |
+
response = await chat_service.get_chat_message_content(
|
108 |
+
chat_history=chat_history,
|
109 |
+
settings=AzureChatPromptExecutionSettings()
|
110 |
+
)
|
111 |
+
logger.info(f"Response dB schema{response}")
|
112 |
+
|
113 |
+
return str(response)
|
114 |
+
|
115 |
+
|
116 |
+
# Register plugins
|
117 |
+
kernel.add_plugin(ConverterPlugin(logger=logger), "CosmosDBPlugin")
|
118 |
+
kernel.add_plugin(NL2SQLPlugin(), "NL2SQLPlugin")
|
119 |
+
|
120 |
+
# Updated query handler using function calling
|
121 |
+
async def handle_query(user_input: str):
|
122 |
+
|
123 |
+
settings = AzureChatPromptExecutionSettings(
|
124 |
+
function_choice_behavior=FunctionChoiceBehavior.Auto(auto_invoke=True)
|
125 |
+
)
|
126 |
+
|
127 |
+
prompt = f"""
|
128 |
+
You are a converter database expert. Process this user query:
|
129 |
+
{user_input}
|
130 |
+
|
131 |
+
Available functions:
|
132 |
+
- generate_sql: Creates SQL queries (use only for complex queries or schema keywords)
|
133 |
+
- query_converters: Executes SQL queries
|
134 |
+
- get_compatible_lamps: Simple artnr-based lamp queries
|
135 |
+
- get_converters_by_lamp_type: Simple lamp type searches
|
136 |
+
- get_lamp_limits: Simple artnr+lamp combinations
|
137 |
+
- RAG_search: questions about dimmability or current (if query contains mains c, dali, 350mA, dali drivers, dimming)
|
138 |
+
|
139 |
+
Decision Flow:
|
140 |
+
1. Identify synonyms :
|
141 |
+
output voltage = voltage forward = forward voltage = Vf
|
142 |
+
Driver = ledconverter = converter = power supply = gear
|
143 |
+
lamps = luminares
|
144 |
+
|
145 |
+
2. Use simple functions if query matches these patterns:
|
146 |
+
- "lamps for [artnr]" → get_compatible_lamps
|
147 |
+
- "converters for [lamp type]" → get_converters_by_lamp_type
|
148 |
+
- "min/max [lamp] for [artnr]" → get_lamp_limits
|
149 |
+
|
150 |
+
3. Use SQL generation ONLY when:
|
151 |
+
- Query contains schema keywords: voltage, price, type, ip, efficiency, size, class
|
152 |
+
- Combining multiple conditions (AND/OR/NOT)
|
153 |
+
- Needs complex filtering/sorting
|
154 |
+
- Requesting technical specifications
|
155 |
+
|
156 |
+
Examples:
|
157 |
+
User: "Show IP67 converters under €100" → generate_sql
|
158 |
+
User: "What lamps are compatible with 930560?" → get_compatible_lamps
|
159 |
+
User: "What converters are compatible with haloled lamps?" → get_converters_by_lamp_type
|
160 |
+
User: "Voltage range for 930562" → generate_sql
|
161 |
+
"""
|
162 |
+
|
163 |
+
result = await kernel.invoke_prompt(
|
164 |
+
prompt=prompt,
|
165 |
+
settings=settings
|
166 |
+
)
|
167 |
+
|
168 |
+
return str(result)
|
169 |
+
|
170 |
+
# Example usage
|
171 |
+
async def main():
|
172 |
+
|
173 |
+
while True:
|
174 |
+
try:
|
175 |
+
query = input("User: ")
|
176 |
+
if query.lower() in ["exit", "quit"]:
|
177 |
+
break
|
178 |
+
|
179 |
+
response = await handle_query(query)
|
180 |
+
print(response)
|
181 |
+
|
182 |
+
except KeyboardInterrupt:
|
183 |
+
break
|
184 |
+
|
185 |
+
|
186 |
+
|
187 |
+
# --- Gradio UI ---
|
188 |
+
|
189 |
+
custom_css = """
|
190 |
+
#chatbot-toggle-btn {
|
191 |
+
position: fixed;
|
192 |
+
bottom: 30px;
|
193 |
+
right: 30px;
|
194 |
+
z-index: 10001;
|
195 |
+
background-color: #ED1C24;
|
196 |
+
color: white;
|
197 |
+
border: none;
|
198 |
+
border-radius: 50%;
|
199 |
+
width: 56px;
|
200 |
+
height: 56px;
|
201 |
+
font-size: 28px;
|
202 |
+
font-weight: bold;
|
203 |
+
cursor: pointer;
|
204 |
+
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
|
205 |
+
display: flex;
|
206 |
+
align-items: center;
|
207 |
+
justify-content: center;
|
208 |
+
transition: all 0.3s ease;
|
209 |
+
}
|
210 |
+
|
211 |
+
#chatbot-panel {
|
212 |
+
position: fixed;
|
213 |
+
bottom: 100px;
|
214 |
+
right: 30px;
|
215 |
+
z-index: 10000;
|
216 |
+
width: 600px;
|
217 |
+
height: 700px;
|
218 |
+
background-color: #ffffff;
|
219 |
+
border-radius: 20px;
|
220 |
+
box-shadow: 0 4px 24px rgba(0,0,0,0.25);
|
221 |
+
overflow: hidden;
|
222 |
+
display: flex;
|
223 |
+
flex-direction: column;
|
224 |
+
justify-content: space-between; /* keep input box pinned at the bottom */
|
225 |
+
font-family: 'Arial', sans-serif;
|
226 |
+
}
|
227 |
+
|
228 |
+
#chatbot-panel.hide {
|
229 |
+
display: none !important;
|
230 |
+
}
|
231 |
+
|
232 |
+
#chat-header {
|
233 |
+
background-color: #ED1C24;
|
234 |
+
color: white;
|
235 |
+
padding: 16px;
|
236 |
+
font-weight: bold;
|
237 |
+
font-size: 16px;
|
238 |
+
display: flex;
|
239 |
+
align-items: center;
|
240 |
+
gap: 12px;
|
241 |
+
}
|
242 |
+
|
243 |
+
#chat-header img {
|
244 |
+
border-radius: 50%;
|
245 |
+
width: 32px;
|
246 |
+
height: 32px;
|
247 |
+
}
|
248 |
+
|
249 |
+
.gr-chatbot {
|
250 |
+
flex: 1;
|
251 |
+
overflow-y: auto;
|
252 |
+
padding: 12px;
|
253 |
+
background-color: #f8f8f8;
|
254 |
+
border: none;
|
255 |
+
}
|
256 |
+
|
257 |
+
.gr-textbox {
|
258 |
+
border-top: 1px solid #eee;
|
259 |
+
padding: 10px;
|
260 |
+
background-color: #fff;
|
261 |
+
display: flex;
|
262 |
+
align-items: center;
|
263 |
+
justify-content: space-between;
|
264 |
+
gap: 10px;
|
265 |
+
}
|
266 |
+
|
267 |
+
.gr-textbox textarea {
|
268 |
+
flex: 1;
|
269 |
+
resize: none;
|
270 |
+
padding: 10px;
|
271 |
+
background-color: white;
|
272 |
+
border: 1px solid #ccc;
|
273 |
+
border-radius: 8px;
|
274 |
+
font-family: inherit;
|
275 |
+
font-size: 14px;
|
276 |
+
}
|
277 |
+
|
278 |
+
footer {
|
279 |
+
display: none !important;
|
280 |
+
}
|
281 |
+
"""
|
282 |
+
panel_visible = False
|
283 |
+
|
284 |
+
def toggle_panel():
|
285 |
+
global panel_visible
|
286 |
+
panel_visible = not panel_visible
|
287 |
+
return gr.Column(visible=panel_visible)
|
288 |
+
|
289 |
+
with gr.Blocks(css=custom_css) as demo:
|
290 |
+
# Toggle button (floating action button)
|
291 |
+
toggle_btn = gr.Button("💬", elem_id="chatbot-toggle-btn")
|
292 |
+
|
293 |
+
# Chat panel (initially hidden)
|
294 |
+
chat_panel = gr.Column(visible=panel_visible, elem_id="chatbot-panel")
|
295 |
+
with chat_panel:
|
296 |
+
# Chat header
|
297 |
+
with gr.Row(elem_id="chat-header"):
|
298 |
+
gr.HTML("""
|
299 |
+
<div id='chat-header'>
|
300 |
+
<img src="https://www.svgrepo.com/download/490283/pixar-lamp.svg" />
|
301 |
+
Lofty the TAL Bot
|
302 |
+
</div>
|
303 |
+
""")
|
304 |
+
# Chatbot and input
|
305 |
+
chatbot = gr.Chatbot(elem_id="gr-chatbot", type="messages")
|
306 |
+
msg = gr.Textbox(placeholder="Type your question here...", elem_id="gr-textbox")
|
307 |
+
clear = gr.ClearButton([msg, chatbot])
|
308 |
+
|
309 |
+
|
310 |
+
# Function to handle messages
|
311 |
+
async def respond(message, chat_history):
|
312 |
+
response = await handle_query(message)
|
313 |
+
# Convert existing history to OpenAI format if it's in tuples
|
314 |
+
|
315 |
+
# Add new messages
|
316 |
+
chat_history.append({"role": "user", "content": message})
|
317 |
+
chat_history.append({"role": "assistant", "content": response})
|
318 |
+
return "", chat_history
|
319 |
+
|
320 |
+
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
321 |
+
toggle_btn.click(toggle_panel, outputs=chat_panel)
|
322 |
+
|
323 |
+
demo.launch(share=True)
|
cosmosConnector.py
ADDED
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# cosmosConnector.py
|
2 |
+
from jsonschema import ValidationError
|
3 |
+
from langchain_openai import AzureOpenAIEmbeddings
|
4 |
+
from models.converterModels import PowerConverter
|
5 |
+
from models.converterVectorStoreModels import PowerConverterVector
|
6 |
+
import os
|
7 |
+
from azure.cosmos import CosmosClient
|
8 |
+
from typing import List, Optional, Dict
|
9 |
+
from rapidfuzz import fuzz
|
10 |
+
import logging
|
11 |
+
import os
|
12 |
+
from dotenv import load_dotenv
|
13 |
+
from semantic_kernel.functions import kernel_function
|
14 |
+
|
15 |
+
load_dotenv()
|
16 |
+
# Initialize logging
|
17 |
+
logger = logging.getLogger(__name__)
|
18 |
+
|
19 |
+
class CosmosLampHandler:
|
20 |
+
|
21 |
+
def __init__(self, logger: Optional[logging.Logger] = None):
|
22 |
+
self.client = CosmosClient(
|
23 |
+
os.getenv("AZURE_COSMOS_DB_ENDPOINT"),
|
24 |
+
os.getenv("AZURE_COSMOS_DB_KEY")
|
25 |
+
)
|
26 |
+
self.database = self.client.get_database_client("TAL_DB")
|
27 |
+
self.container = self.database.get_container_client("Converters_with_embeddings")
|
28 |
+
self.logger = logger
|
29 |
+
self.embedding_model = AzureOpenAIEmbeddings(
|
30 |
+
azure_endpoint=os.environ["OPENAI_API_ENDPOINT"],
|
31 |
+
azure_deployment=os.environ["OPENAI_EMBEDDINGS_MODEL_DEPLOYMENT"],
|
32 |
+
api_key=os.environ["AZURE_OPENAI_KEY"]
|
33 |
+
)
|
34 |
+
|
35 |
+
async def _generate_embedding(self, query: str) -> List[float]:
|
36 |
+
"""Generate embedding for the given query using Azure OpenAI"""
|
37 |
+
try:
|
38 |
+
return self.embedding_model.embed_query(query)
|
39 |
+
except Exception as e:
|
40 |
+
logger.error(f"Embedding generation failed: {str(e)}")
|
41 |
+
raise
|
42 |
+
|
43 |
+
async def get_compatible_lamps(self, artnr: int) -> List[str]:
|
44 |
+
"""Get compatible lamps for a converter with fuzzy matching"""
|
45 |
+
try:
|
46 |
+
parameters = [{"name": "@artnr", "value": artnr}]
|
47 |
+
query = "SELECT * FROM c WHERE c.artnr = @artnr"
|
48 |
+
|
49 |
+
# Collect results properly
|
50 |
+
results = [item for item in list(self.container.query_items(
|
51 |
+
query=query,
|
52 |
+
parameters=parameters
|
53 |
+
))]
|
54 |
+
|
55 |
+
if not results:
|
56 |
+
return []
|
57 |
+
|
58 |
+
return list(results[0]["lamps"].keys())
|
59 |
+
|
60 |
+
except Exception as e:
|
61 |
+
logger.error(f"Failed to get compatible lamps: {str(e)}")
|
62 |
+
return []
|
63 |
+
|
64 |
+
async def get_converters_by_lamp_type(self, lamp_type: str, threshold: int = 75) -> List[PowerConverter]:
|
65 |
+
"""Get converters with fuzzy-matched lamp types"""
|
66 |
+
try:
|
67 |
+
# Case-insensitive search with fuzzy matching
|
68 |
+
query = """
|
69 |
+
SELECT
|
70 |
+
*
|
71 |
+
FROM c WHERE IS_DEFINED(c.lamps)"""
|
72 |
+
converters = []
|
73 |
+
results = list(self.container.query_items(
|
74 |
+
query=query,
|
75 |
+
enable_cross_partition_query=True))
|
76 |
+
for item in results:
|
77 |
+
lamp_keys = item.get("lamps", {}).keys()
|
78 |
+
matches = [key for key in lamp_keys
|
79 |
+
if fuzz.ratio(key.lower(), lamp_type.lower()) >= threshold]
|
80 |
+
|
81 |
+
if matches:
|
82 |
+
converters.append(PowerConverter(**item))
|
83 |
+
|
84 |
+
return converters
|
85 |
+
|
86 |
+
except Exception as e:
|
87 |
+
logger.error(f"Lamp type search failed: {str(e)}")
|
88 |
+
return []
|
89 |
+
|
90 |
+
|
91 |
+
async def get_lamp_limits(self, artnr: int, lamp_type: str) -> Dict[str, int]:
|
92 |
+
"""Get lamp limits with typo tolerance"""
|
93 |
+
try:
|
94 |
+
parameters = [{"name": "@artnr", "value": artnr}]
|
95 |
+
query = """
|
96 |
+
SELECT c.lamps FROM c
|
97 |
+
WHERE c.artnr = @artnr
|
98 |
+
"""
|
99 |
+
results_iter = list(self.container.query_items(
|
100 |
+
query=query,
|
101 |
+
parameters=parameters
|
102 |
+
))
|
103 |
+
|
104 |
+
results = [item for item in results_iter] # Collect results asynchronously
|
105 |
+
|
106 |
+
if not results:
|
107 |
+
return {}
|
108 |
+
|
109 |
+
lamps = results[0]["lamps"]
|
110 |
+
|
111 |
+
# Fuzzy match lamp type
|
112 |
+
best_match = max(
|
113 |
+
lamps.keys(),
|
114 |
+
key=lambda x: fuzz.ratio(x.lower(), lamp_type.lower())
|
115 |
+
)
|
116 |
+
|
117 |
+
if fuzz.ratio(best_match.lower(), lamp_type.lower()) < 65:
|
118 |
+
raise ValueError("No matching lamp type found")
|
119 |
+
|
120 |
+
return {
|
121 |
+
"min": int(lamps[best_match]["min"]),
|
122 |
+
"max": int(lamps[best_match]["max"])
|
123 |
+
}
|
124 |
+
|
125 |
+
except Exception as e:
|
126 |
+
logger.error(f"Failed to get lamp limits: {str(e)}")
|
127 |
+
raise
|
128 |
+
|
129 |
+
|
130 |
+
|
131 |
+
async def query_converters(self, query: str) -> str:
|
132 |
+
try:
|
133 |
+
print(f"Executing query: {query}")
|
134 |
+
items = list(self.container.query_items(
|
135 |
+
query=query,
|
136 |
+
enable_cross_partition_query=True
|
137 |
+
))
|
138 |
+
print(f"Query returned {len(items)} items")
|
139 |
+
items = items[:10]
|
140 |
+
# self.logger.debug(f"Raw items: {items}")
|
141 |
+
|
142 |
+
items = [PowerConverter(**item) for item in items] if items else []
|
143 |
+
|
144 |
+
self.logger.info(f"Query returned {len(items)} items after conversion")
|
145 |
+
|
146 |
+
return str(items)
|
147 |
+
|
148 |
+
except Exception as e:
|
149 |
+
self.logger.info(f"Query failed: {str(e)}")
|
150 |
+
return f"Query failed: {str(e)}"
|
151 |
+
|
152 |
+
|
153 |
+
async def RAG_search(self, query: str, artnr: Optional[int] = None, threshold: int = 75) -> List[PowerConverterVector]:
|
154 |
+
"""Hybrid search using raw Cosmos DB vector search"""
|
155 |
+
try:
|
156 |
+
# Generate embedding
|
157 |
+
print(f"Performing hybrid search for query: {query} (ARTNR: {artnr})")
|
158 |
+
query_vector = await self._generate_embedding(query)
|
159 |
+
|
160 |
+
sql_query = """
|
161 |
+
SELECT TOP 5
|
162 |
+
c.id,
|
163 |
+
c.converter_description,
|
164 |
+
c.ip,
|
165 |
+
c.efficiency_full_load,
|
166 |
+
c.name,
|
167 |
+
c.artnr,
|
168 |
+
c.type,
|
169 |
+
c.lamps,
|
170 |
+
c.pdf_link,
|
171 |
+
c.nom_input_voltage_v,
|
172 |
+
c.output_voltage_v,
|
173 |
+
c.unit,
|
174 |
+
c["listprice"],
|
175 |
+
c["lifecycle"],
|
176 |
+
c.size,
|
177 |
+
c.ccr_amplitude,
|
178 |
+
c.dimmability,
|
179 |
+
c.dimlist_type,
|
180 |
+
c.strain_relief,
|
181 |
+
c.gross_weight,
|
182 |
+
VectorDistance(c.embedding, @vector) AS SimilarityScore
|
183 |
+
FROM c
|
184 |
+
ORDER BY VectorDistance(c.embedding, @vector)
|
185 |
+
"""
|
186 |
+
|
187 |
+
parameters = [{"name": "@vector", "value": query_vector}]
|
188 |
+
|
189 |
+
# Execute query
|
190 |
+
results = list(self.container.query_items(
|
191 |
+
query=sql_query,
|
192 |
+
parameters=parameters,
|
193 |
+
enable_cross_partition_query=True
|
194 |
+
))
|
195 |
+
|
196 |
+
items = []
|
197 |
+
for item in results:
|
198 |
+
items.append(item)
|
199 |
+
|
200 |
+
converters = []
|
201 |
+
for item in items:
|
202 |
+
# Convert float values to integers before validation
|
203 |
+
if "lamps" in item:
|
204 |
+
for lamp_key in item["lamps"]:
|
205 |
+
lamp_data = item["lamps"][lamp_key]
|
206 |
+
lamp_data["min"] = int(lamp_data["min"])
|
207 |
+
lamp_data["max"] = int(lamp_data["max"])
|
208 |
+
|
209 |
+
converters.append(PowerConverterVector(**item))
|
210 |
+
|
211 |
+
return converters
|
212 |
+
except ValidationError as exc:
|
213 |
+
print(exc)
|
214 |
+
|
215 |
+
except Exception as e:
|
216 |
+
logger.error(f"Hybrid search failed: {str(e)}")
|
217 |
+
print(f"Hybrid search failed: {str(e)}")
|
218 |
+
|
219 |
+
|
models/__pycache__/converterModels.cpython-311.pyc
ADDED
Binary file (4.32 kB). View file
|
|
models/__pycache__/converterVectorStoreModels.cpython-311.pyc
ADDED
Binary file (5.59 kB). View file
|
|
models/converterModels.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# models/converterModels.py
|
2 |
+
from pydantic import BaseModel, ConfigDict, Field, field_validator, validator
|
3 |
+
from typing import Dict, Optional
|
4 |
+
|
5 |
+
class LampConnections(BaseModel):
|
6 |
+
min: float
|
7 |
+
max: float
|
8 |
+
|
9 |
+
@field_validator('min', 'max', mode='before')
|
10 |
+
def convert_values(cls, v):
|
11 |
+
v_str = str(v)
|
12 |
+
# Handle comma decimals
|
13 |
+
v_str = v_str.replace(',', '.')
|
14 |
+
return float(v_str)
|
15 |
+
|
16 |
+
|
17 |
+
class VoltageRange(BaseModel):
|
18 |
+
min: float
|
19 |
+
max: float
|
20 |
+
|
21 |
+
@field_validator('min', 'max', mode='before')
|
22 |
+
def convert_values(cls, v):
|
23 |
+
v_str = str(v)
|
24 |
+
# Handle comma decimals
|
25 |
+
v_str = v_str.replace(',', '.')
|
26 |
+
return float(v_str)
|
27 |
+
|
28 |
+
|
29 |
+
class PowerConverter(BaseModel):
|
30 |
+
doc_id: Optional[str] = Field(None, alias="id")
|
31 |
+
artnr: Optional[int] = Field(None, alias="artnr")
|
32 |
+
ip_rating: Optional[int] = Field(None, alias="ip")
|
33 |
+
lamps: Optional[Dict[str, LampConnections]] = Field(default_factory=dict, alias="lamps")
|
34 |
+
|
35 |
+
type: Optional[str] = Field(None, alias="type")
|
36 |
+
name: Optional[str] = Field(None, alias="name")
|
37 |
+
efficiency: Optional[float] = Field(None, alias="efficiency_full_load")
|
38 |
+
pdf_link: Optional[str] = Field(None, alias="pdf_link")
|
39 |
+
converter_description: Optional[str] = Field(None, alias="converter_description")
|
40 |
+
|
41 |
+
nom_input_voltage: Optional[VoltageRange] = Field(None, alias="nom_input_voltage_v")
|
42 |
+
output_voltage:Optional[VoltageRange]= Field(None, alias="output_voltage_v")
|
43 |
+
|
44 |
+
unit: Optional[str] = Field(None, alias="unit")
|
45 |
+
price: Optional[float] = Field(None, alias="listprice")
|
46 |
+
life_cycle: Optional[str] = Field(None, alias="lifecycle")
|
47 |
+
size: Optional[str] = Field(None, alias="size")
|
48 |
+
ccr_amplitude: Optional[str] = Field(None, alias="ccr_amplitude")
|
49 |
+
dimmability: Optional[str] = Field(None, alias="dimmability")
|
50 |
+
dim_list_type: Optional[str] = Field(None, alias="dimlist_type")
|
51 |
+
|
52 |
+
strain_relief: Optional[str] = Field(None, alias="strain_relief")
|
53 |
+
gross_weight: Optional[float] = Field(None, alias="gross_weight")
|
54 |
+
|
55 |
+
similarity_score: Optional[float] = Field(None, alias="SimilarityScore") # For hybrid search results
|
56 |
+
model_config = ConfigDict(
|
57 |
+
populate_by_name=True, # Critical fix
|
58 |
+
|
59 |
+
extra="ignore"
|
60 |
+
)
|
models/converterVectorStoreModels.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dataclasses import dataclass
|
2 |
+
from typing import Dict, List, Optional, Annotated
|
3 |
+
from pydantic import BaseModel, BeforeValidator, Field, field_validator # Use Pydantic's Field
|
4 |
+
from semantic_kernel.data import (
|
5 |
+
DistanceFunction,
|
6 |
+
IndexKind,
|
7 |
+
VectorStoreRecordDataField,
|
8 |
+
VectorStoreRecordKeyField,
|
9 |
+
VectorStoreRecordVectorField,
|
10 |
+
vectorstoremodel
|
11 |
+
)
|
12 |
+
|
13 |
+
class LampConnections(BaseModel):
|
14 |
+
min: float
|
15 |
+
max: float
|
16 |
+
|
17 |
+
@field_validator('min', 'max', mode='before')
|
18 |
+
def convert_values(cls, v):
|
19 |
+
v_str = str(v)
|
20 |
+
# Handle comma decimals
|
21 |
+
v_str = v_str.replace(',', '.')
|
22 |
+
return float(v_str)
|
23 |
+
|
24 |
+
class VoltageRange(BaseModel):
|
25 |
+
min: float
|
26 |
+
max: float
|
27 |
+
|
28 |
+
@field_validator('min', 'max', mode='before')
|
29 |
+
def convert_values(cls, v):
|
30 |
+
v_str = str(v)
|
31 |
+
# Handle comma decimals
|
32 |
+
v_str = v_str.replace(',', '.')
|
33 |
+
return float(v_str)
|
34 |
+
|
35 |
+
@vectorstoremodel
|
36 |
+
class PowerConverterVector(BaseModel):
|
37 |
+
id: Annotated[str, VectorStoreRecordKeyField]=Field(None, alias="id")
|
38 |
+
artnr: Annotated[int, VectorStoreRecordDataField()] = Field(..., alias="artnr")
|
39 |
+
ip: Annotated[int, VectorStoreRecordDataField()] = Field(..., alias="ip")
|
40 |
+
type: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="type")
|
41 |
+
lamps: Optional[Dict[str, LampConnections]] = None
|
42 |
+
|
43 |
+
name: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="name")
|
44 |
+
efficiency_full_load: Optional[Annotated[float, VectorStoreRecordDataField()]] = Field(None, alias="efficiency_full_load")
|
45 |
+
pdf_link: Optional[Annotated[str, VectorStoreRecordDataField()]] = None
|
46 |
+
converter_description: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="converter_description")
|
47 |
+
|
48 |
+
nom_input_voltage: Optional[VoltageRange] = Field(None, alias="nom_input_voltage_v")
|
49 |
+
output_voltage: Optional[VoltageRange] = Field(None, alias="output_voltage_v")
|
50 |
+
|
51 |
+
unit: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="unit")
|
52 |
+
price: Optional[Annotated[float, VectorStoreRecordDataField()]] = Field(None, alias="listprice")
|
53 |
+
life_cycle: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="lifecycle")
|
54 |
+
size: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="size")
|
55 |
+
ccr_amplitude: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="ccr_amplitude")
|
56 |
+
dimmability: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="dimmability")
|
57 |
+
dim_list_type: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="dimlist_type")
|
58 |
+
gross_weight: Optional[Annotated[float, VectorStoreRecordDataField()]] = Field(None, alias="gross_weight")
|
59 |
+
strain_relief: Optional[Annotated[str, VectorStoreRecordDataField()]] = Field(None, alias="strain_relief")
|
60 |
+
embedding: Annotated[List[float], VectorStoreRecordVectorField(
|
61 |
+
dimensions=1536,
|
62 |
+
distance_function=DistanceFunction.COSINE_DISTANCE,
|
63 |
+
index_kind=IndexKind.DISK_ANN
|
64 |
+
)] = Field(default_factory=list)
|
plugins/__pycache__/converterPlugin.cpython-311.pyc
ADDED
Binary file (6.68 kB). View file
|
|
plugins/__pycache__/sqlGenerationPlugin.cpython-311.pyc
ADDED
Binary file (3.78 kB). View file
|
|
plugins/converterPlugin.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#converterPlugin.py
|
2 |
+
from typing import Annotated, Optional
|
3 |
+
from cosmosConnector import CosmosLampHandler
|
4 |
+
from semantic_kernel.functions import kernel_function
|
5 |
+
|
6 |
+
class ConverterPlugin:
|
7 |
+
def __init__(self, logger):
|
8 |
+
self.logger = logger
|
9 |
+
self.db = CosmosLampHandler(logger=logger)
|
10 |
+
|
11 |
+
|
12 |
+
@kernel_function(
|
13 |
+
name="query_converters",
|
14 |
+
description="Execute SQL query against Cosmos DB converters collection"
|
15 |
+
)
|
16 |
+
async def query_converters(self, query: str) -> str:
|
17 |
+
try:
|
18 |
+
items = await self.db.query_converters(query)
|
19 |
+
self.logger.info(f"Executed query: {query}")
|
20 |
+
if not items:
|
21 |
+
return "No items found for the given query."
|
22 |
+
return str(items)
|
23 |
+
except Exception as e:
|
24 |
+
return f"Query failed: {str(e)}"
|
25 |
+
|
26 |
+
|
27 |
+
@kernel_function
|
28 |
+
async def get_compatible_lamps(
|
29 |
+
self,
|
30 |
+
artnr: Annotated[int, "Converter ARTNR (partition key)"]
|
31 |
+
) -> str:
|
32 |
+
"""Get compatible lamps for a converter by ARTNR"""
|
33 |
+
try:
|
34 |
+
lamps = await self.db.get_compatible_lamps(artnr)
|
35 |
+
self.logger.info(f"Used get_compatible_lamps with ARTNR: {artnr}")
|
36 |
+
return f"Compatible lamps: {', '.join(lamps)}" if lamps else "No lamps found"
|
37 |
+
# return "\n".join([f"{c.model_dump()})" for c in lamps]) if lamps else "No lamps found"
|
38 |
+
except Exception as e:
|
39 |
+
return f"Error retrieving compatible lamps: {str(e)}"
|
40 |
+
|
41 |
+
|
42 |
+
@kernel_function(
|
43 |
+
name="get_converters_by_lamp_type",
|
44 |
+
description="Find converters compatible with a specific lamp type"
|
45 |
+
)
|
46 |
+
async def get_converters_by_lamp_type(
|
47 |
+
self,
|
48 |
+
lamp_type: Annotated[str, "Lamp model (e.g., Haloled, B4)"]
|
49 |
+
) -> str:
|
50 |
+
"""Find converters compatible with specific lamp type"""
|
51 |
+
try:
|
52 |
+
converters = await self.db.get_converters_by_lamp_type(lamp_type)
|
53 |
+
self.logger.info(f"Used get_converters_by_lamp_type with lamp_type: {lamp_type}")
|
54 |
+
if not converters:
|
55 |
+
return "No compatible converters found"
|
56 |
+
# return "\n".join([f"{c.name} (ARTNR: {c.artnr})\nTYPE: {c.type}\nMANUAL: {c.pdf_link}" for c in converters])
|
57 |
+
return "\n".join([f"{c.model_dump()})" for c in converters]) if converters else "No converters found"
|
58 |
+
except Exception as e:
|
59 |
+
return f"Error retrieving converters: {str(e)}"
|
60 |
+
|
61 |
+
|
62 |
+
@kernel_function(
|
63 |
+
name="get_lamp_limits",
|
64 |
+
description="Get min/max lamps for a converter by ARTNR and lamp type"
|
65 |
+
)
|
66 |
+
async def get_lamp_limits(
|
67 |
+
self,
|
68 |
+
artnr: Annotated[int, "Converter ARTNR"],
|
69 |
+
lamp_type: Annotated[str, "Lamp model (e.g., Haloled)"]
|
70 |
+
) -> str:
|
71 |
+
"""Get min/max lamps for a converter"""
|
72 |
+
try:
|
73 |
+
limits = await self.db.get_lamp_limits(artnr, lamp_type)
|
74 |
+
self.logger.info(f"Used get_lamp_limits with ARTNR: {artnr} and lamp_type: {lamp_type}")
|
75 |
+
return f"{lamp_type}: Min {limits['min']} - Max {limits['max']} lamps"
|
76 |
+
except Exception as e:
|
77 |
+
return f"Error retrieving lamp limits: {str(e)}"
|
78 |
+
|
79 |
+
@kernel_function(
|
80 |
+
name = "RAG_search",
|
81 |
+
description="Get data based on vector similarity scores for queries regarding dimmability or in cases where SQL generation fails"
|
82 |
+
)
|
83 |
+
async def RAG_search(
|
84 |
+
self,
|
85 |
+
query: Annotated[str, "User query"]
|
86 |
+
) -> str:
|
87 |
+
try:
|
88 |
+
converters = await self.db.RAG_search(query=query)
|
89 |
+
self.logger.info(f"Used RAG search for user query: {query}")
|
90 |
+
if not converters:
|
91 |
+
return "No converters found"
|
92 |
+
# return "\n".join([f"{c.name} (ARTNR: {c.artnr})\nTYPE: {c.type}\nMANUAL: {c.pdf_link}" for c in converters])
|
93 |
+
return "\n".join([f"{c.model_dump()})" for c in converters]) if converters else "No converters found"
|
94 |
+
except Exception as e:
|
95 |
+
return f"Error retrieving converters: {str(e)}"
|
96 |
+
|
requirements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
azure-cosmos
|