This commit is contained in:
2026-02-02 07:11:47 -05:00
parent 1f833e0124
commit ad31e878cb
6 changed files with 12 additions and 6 deletions

View File

@@ -27,7 +27,8 @@ logger = logging.getLogger("embeddings-demo")
# Configuration # Configuration
EMBEDDINGS_URL = os.environ.get( EMBEDDINGS_URL = os.environ.get(
"EMBEDDINGS_URL", "EMBEDDINGS_URL",
"http://embeddings-predictor.ai-ml.svc.cluster.local" # Default: Ray Serve Embeddings endpoint
"http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/embeddings"
) )
MLFLOW_TRACKING_URI = os.environ.get( MLFLOW_TRACKING_URI = os.environ.get(
"MLFLOW_TRACKING_URI", "MLFLOW_TRACKING_URI",

View File

@@ -29,7 +29,8 @@ spec:
protocol: TCP protocol: TCP
env: env:
- name: EMBEDDINGS_URL - name: EMBEDDINGS_URL
value: "http://embeddings-predictor.ai-ml.svc.cluster.local" # Ray Serve endpoint - routes to /embeddings prefix
value: "http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/embeddings"
- name: MLFLOW_TRACKING_URI - name: MLFLOW_TRACKING_URI
value: "http://mlflow.mlflow.svc.cluster.local:80" value: "http://mlflow.mlflow.svc.cluster.local:80"
resources: resources:

3
stt.py
View File

@@ -29,7 +29,8 @@ logger = logging.getLogger("stt-demo")
# Configuration # Configuration
STT_URL = os.environ.get( STT_URL = os.environ.get(
"STT_URL", "STT_URL",
"http://whisper-predictor.ai-ml.svc.cluster.local" # Default: Ray Serve whisper endpoint
"http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/whisper"
) )
MLFLOW_TRACKING_URI = os.environ.get( MLFLOW_TRACKING_URI = os.environ.get(
"MLFLOW_TRACKING_URI", "MLFLOW_TRACKING_URI",

View File

@@ -29,7 +29,8 @@ spec:
protocol: TCP protocol: TCP
env: env:
- name: WHISPER_URL - name: WHISPER_URL
value: "http://whisper-predictor.ai-ml.svc.cluster.local" # Ray Serve endpoint - routes to /whisper prefix
value: "http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/whisper"
- name: MLFLOW_TRACKING_URI - name: MLFLOW_TRACKING_URI
value: "http://mlflow.mlflow.svc.cluster.local:80" value: "http://mlflow.mlflow.svc.cluster.local:80"
resources: resources:

3
tts.py
View File

@@ -29,7 +29,8 @@ logger = logging.getLogger("tts-demo")
# Configuration # Configuration
TTS_URL = os.environ.get( TTS_URL = os.environ.get(
"TTS_URL", "TTS_URL",
"http://tts-predictor.ai-ml.svc.cluster.local" # Default: Ray Serve TTS endpoint
"http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/tts"
) )
MLFLOW_TRACKING_URI = os.environ.get( MLFLOW_TRACKING_URI = os.environ.get(
"MLFLOW_TRACKING_URI", "MLFLOW_TRACKING_URI",

View File

@@ -29,7 +29,8 @@ spec:
protocol: TCP protocol: TCP
env: env:
- name: TTS_URL - name: TTS_URL
value: "http://tts-predictor.ai-ml.svc.cluster.local" # Ray Serve endpoint - routes to /tts prefix
value: "http://ai-inference-serve-svc.ai-ml.svc.cluster.local:8000/tts"
- name: MLFLOW_TRACKING_URI - name: MLFLOW_TRACKING_URI
value: "http://mlflow.mlflow.svc.cluster.local:80" value: "http://mlflow.mlflow.svc.cluster.local:80"
resources: resources: