

feat: add chat, speech, and transcription dependencies
@dc7bb9d51ff1e10178144b51762f168491886dec
--- src/faster_whisper_server/config.py
+++ src/faster_whisper_server/config.py
... | ... | @@ -236,3 +236,14 @@ |
236 | 236 |
Controls how many latest seconds of audio are being passed through VAD. |
237 | 237 |
Should be greater than `max_inactivity_seconds` |
238 | 238 |
""" |
239 |
+ |
|
240 |
+ chat_completion_base_url: str = "https://api.openai.com/v1" |
|
241 |
+ chat_completion_api_key: str | None = None |
|
242 |
+ |
|
243 |
+ speech_base_url: str | None = None |
|
244 |
+ speech_api_key: str | None = None |
|
245 |
+ speech_model: str = "piper" |
|
246 |
+ speech_extra_body: dict = {"sample_rate": 24000} |
|
247 |
+ |
|
248 |
+ transcription_base_url: str | None = None |
|
249 |
+ transcription_api_key: str | None = None |
--- src/faster_whisper_server/dependencies.py
+++ src/faster_whisper_server/dependencies.py
... | ... | @@ -3,6 +3,10 @@ |
3 | 3 |
|
4 | 4 |
from fastapi import Depends, HTTPException, status |
5 | 5 |
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer |
6 |
+from httpx import ASGITransport, AsyncClient |
|
7 |
+from openai import AsyncOpenAI |
|
8 |
+from openai.resources.audio import AsyncSpeech, AsyncTranscriptions |
|
9 |
+from openai.resources.chat.completions import AsyncCompletions |
|
6 | 10 |
|
7 | 11 |
from faster_whisper_server.config import Config |
8 | 12 |
from faster_whisper_server.model_manager import PiperModelManager, WhisperModelManager |
... | ... | @@ -45,3 +49,56 @@ |
45 | 49 |
|
46 | 50 |
|
47 | 51 |
ApiKeyDependency = Depends(verify_api_key) |
52 |
+ |
|
53 |
+ |
|
54 |
+@lru_cache |
|
55 |
+def get_completion_client() -> AsyncCompletions: |
|
56 |
+ config = get_config() # HACK |
|
57 |
+ oai_client = AsyncOpenAI(base_url=config.chat_completion_base_url, api_key=config.chat_completion_api_key) |
|
58 |
+ return oai_client.chat.completions |
|
59 |
+ |
|
60 |
+ |
|
61 |
+CompletionClientDependency = Annotated[AsyncCompletions, Depends(get_completion_client)] |
|
62 |
+ |
|
63 |
+ |
|
64 |
+@lru_cache |
|
65 |
+def get_speech_client() -> AsyncSpeech: |
|
66 |
+ config = get_config() # HACK |
|
67 |
+ if config.speech_base_url is None: |
|
68 |
+ # this might not work as expected if the `speech_router` won't have shared state with the main FastAPI `app`. TODO: verify # noqa: E501 |
|
69 |
+ from faster_whisper_server.routers.speech import ( |
|
70 |
+ router as speech_router, |
|
71 |
+ ) |
|
72 |
+ |
|
73 |
+ http_client = AsyncClient( |
|
74 |
+ transport=ASGITransport(speech_router), base_url="http://test/v1" |
|
75 |
+ ) # NOTE: "test" can be replaced with any other value |
|
76 |
+ oai_client = AsyncOpenAI(http_client=http_client, api_key=config.speech_api_key) |
|
77 |
+ else: |
|
78 |
+ oai_client = AsyncOpenAI(base_url=config.speech_base_url, api_key=config.speech_api_key) |
|
79 |
+ return oai_client.audio.speech |
|
80 |
+ |
|
81 |
+ |
|
82 |
+SpeechClientDependency = Annotated[AsyncSpeech, Depends(get_speech_client)] |
|
83 |
+ |
|
84 |
+ |
|
85 |
+@lru_cache |
|
86 |
+def get_transcription_client() -> AsyncTranscriptions: |
|
87 |
+ config = get_config() |
|
88 |
+ if config.transcription_base_url is None: |
|
89 |
+ # this might not work as expected if the `transcription_router` won't have shared state with the main FastAPI `app`. TODO: verify # noqa: E501 |
|
90 |
+ from faster_whisper_server.routers.stt import ( |
|
91 |
+ router as stt_router, |
|
92 |
+ ) |
|
93 |
+ |
|
94 |
+ http_client = AsyncClient( |
|
95 |
+ transport=ASGITransport(stt_router), base_url="http://test/v1" |
|
96 |
+ ) # NOTE: "test" can be replaced with any other value |
|
97 |
+ |
|
98 |
+ oai_client = AsyncOpenAI(http_client=http_client, api_key=config.transcription_api_key) |
|
99 |
+ else: |
|
100 |
+ oai_client = AsyncOpenAI(base_url=config.transcription_base_url, api_key=config.transcription_api_key) |
|
101 |
+ return oai_client.audio.transcriptions |
|
102 |
+ |
|
103 |
+ |
|
104 |
+TranscriptionClientDependency = Annotated[AsyncTranscriptions, Depends(get_transcription_client)] |
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?