Spaces:
Running
Running
Commit
·
e821a83
1
Parent(s):
12cfb7b
setting up the public stun server
Browse files
examples/voice_agent_webrtc_langgraph/pipeline.py
CHANGED
|
@@ -452,6 +452,30 @@ if UI_DIST_DIR.exists():
|
|
| 452 |
app.mount("/", StaticFiles(directory=str(UI_DIST_DIR), html=True), name="static")
|
| 453 |
|
| 454 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 455 |
if __name__ == "__main__":
|
| 456 |
parser = argparse.ArgumentParser(description="WebRTC demo")
|
| 457 |
parser.add_argument("--host", default="0.0.0.0", help="Host for HTTP server (default: localhost)")
|
|
|
|
| 452 |
app.mount("/", StaticFiles(directory=str(UI_DIST_DIR), html=True), name="static")
|
| 453 |
|
| 454 |
|
| 455 |
+
@app.get("/rtc-config")
|
| 456 |
+
async def rtc_config():
|
| 457 |
+
"""Expose browser RTC ICE configuration based on environment variables.
|
| 458 |
+
|
| 459 |
+
Reads TURN_SERVER_URL, TURN_USERNAME, TURN_PASSWORD and returns a structure
|
| 460 |
+
consumable by the browser: { "iceServers": [ { urls, username?, credential? } ] }.
|
| 461 |
+
Always includes a public STUN as a fallback.
|
| 462 |
+
"""
|
| 463 |
+
ice_servers: list[dict] = []
|
| 464 |
+
turn_url = os.getenv("TURN_SERVER_URL") or os.getenv("TURN_URL")
|
| 465 |
+
turn_user = os.getenv("TURN_USERNAME") or os.getenv("TURN_USER")
|
| 466 |
+
turn_pass = os.getenv("TURN_PASSWORD") or os.getenv("TURN_PASS")
|
| 467 |
+
if turn_url:
|
| 468 |
+
server: dict = {"urls": turn_url}
|
| 469 |
+
if turn_user:
|
| 470 |
+
server["username"] = turn_user
|
| 471 |
+
if turn_pass:
|
| 472 |
+
server["credential"] = turn_pass
|
| 473 |
+
ice_servers.append(server)
|
| 474 |
+
# Public STUN fallback to aid connectivity when TURN is not provided
|
| 475 |
+
ice_servers.append({"urls": "stun:stun.l.google.com:19302"})
|
| 476 |
+
return {"iceServers": ice_servers}
|
| 477 |
+
|
| 478 |
+
|
| 479 |
if __name__ == "__main__":
|
| 480 |
parser = argparse.ArgumentParser(description="WebRTC demo")
|
| 481 |
parser.add_argument("--host", default="0.0.0.0", help="Host for HTTP server (default: localhost)")
|
examples/voice_agent_webrtc_langgraph/ui/src/App.tsx
CHANGED
|
@@ -6,7 +6,7 @@ import { useEffect, useState } from "react";
|
|
| 6 |
import { AudioStream } from "./AudioStream";
|
| 7 |
import { AudioWaveForm } from "./AudioWaveForm";
|
| 8 |
import { Toaster } from "./components/ui/sonner";
|
| 9 |
-
import {
|
| 10 |
import usePipecatWebRTC from "./hooks/use-pipecat-webrtc";
|
| 11 |
import { Transcripts } from "./Transcripts";
|
| 12 |
import WebRTCButton from "./WebRTCButton";
|
|
@@ -19,10 +19,11 @@ function App() {
|
|
| 19 |
const [assistants, setAssistants] = useState<Array<{ assistant_id: string; name?: string | null; graph_id?: string | null; display_name?: string | null }>>([]);
|
| 20 |
const [selectedAssistant, setSelectedAssistant] = useState<string | null>(null);
|
| 21 |
const [selectedAssistantName, setSelectedAssistantName] = useState<string>("Speech to Speech Demo");
|
|
|
|
| 22 |
|
| 23 |
const webRTC = usePipecatWebRTC({
|
| 24 |
url: RTC_OFFER_URL,
|
| 25 |
-
rtcConfig
|
| 26 |
onError: (e) => toast.error(e.message),
|
| 27 |
assistant: selectedAssistant,
|
| 28 |
});
|
|
@@ -77,6 +78,23 @@ function App() {
|
|
| 77 |
fetchAssistants();
|
| 78 |
}, []);
|
| 79 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
// Send current prompt IMMEDIATELY when WebRTC connection is established
|
| 81 |
useEffect(() => {
|
| 82 |
if (webRTC.status === "connected" && currentPrompt.trim()) {
|
|
|
|
| 6 |
import { AudioStream } from "./AudioStream";
|
| 7 |
import { AudioWaveForm } from "./AudioWaveForm";
|
| 8 |
import { Toaster } from "./components/ui/sonner";
|
| 9 |
+
import { RTC_OFFER_URL, DYNAMIC_PROMPT, POLL_PROMPT_URL, ASSISTANTS_URL } from "./config";
|
| 10 |
import usePipecatWebRTC from "./hooks/use-pipecat-webrtc";
|
| 11 |
import { Transcripts } from "./Transcripts";
|
| 12 |
import WebRTCButton from "./WebRTCButton";
|
|
|
|
| 19 |
const [assistants, setAssistants] = useState<Array<{ assistant_id: string; name?: string | null; graph_id?: string | null; display_name?: string | null }>>([]);
|
| 20 |
const [selectedAssistant, setSelectedAssistant] = useState<string | null>(null);
|
| 21 |
const [selectedAssistantName, setSelectedAssistantName] = useState<string>("Speech to Speech Demo");
|
| 22 |
+
const [rtcConfig, setRtcConfig] = useState<ConstructorParameters<typeof RTCPeerConnection>[0]>({});
|
| 23 |
|
| 24 |
const webRTC = usePipecatWebRTC({
|
| 25 |
url: RTC_OFFER_URL,
|
| 26 |
+
rtcConfig,
|
| 27 |
onError: (e) => toast.error(e.message),
|
| 28 |
assistant: selectedAssistant,
|
| 29 |
});
|
|
|
|
| 78 |
fetchAssistants();
|
| 79 |
}, []);
|
| 80 |
|
| 81 |
+
// Fetch ICE configuration for RTC
|
| 82 |
+
useEffect(() => {
|
| 83 |
+
const fetchRtcConfig = async () => {
|
| 84 |
+
try {
|
| 85 |
+
const res = await fetch("/rtc-config");
|
| 86 |
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
| 87 |
+
const data = await res.json();
|
| 88 |
+
setRtcConfig(data || {});
|
| 89 |
+
} catch (e) {
|
| 90 |
+
console.warn("Failed to fetch rtc-config", e);
|
| 91 |
+
// Fallback STUN if server not available
|
| 92 |
+
setRtcConfig({ iceServers: [{ urls: "stun:stun.l.google.com:19302" }] });
|
| 93 |
+
}
|
| 94 |
+
};
|
| 95 |
+
fetchRtcConfig();
|
| 96 |
+
}, []);
|
| 97 |
+
|
| 98 |
// Send current prompt IMMEDIATELY when WebRTC connection is established
|
| 99 |
useEffect(() => {
|
| 100 |
if (webRTC.status === "connected" && currentPrompt.trim()) {
|