Files
pipeline-bridge/e2e_test.go
Billy D. de4fa6ea90
Some checks failed
CI / Release (push) Has been cancelled
CI / Docker Build & Push (push) Has been cancelled
CI / Notify (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Lint (push) Has been cancelled
fix: resolve golangci-lint errcheck warnings
- Add error checks for unchecked return values (errcheck)
- Remove unused struct fields (unused)
- Fix gofmt formatting issues
2026-02-20 08:45:25 -05:00

214 lines
6.3 KiB
Go

package main
import (
"encoding/json"
"net/http"
"net/http/httptest"
"sync/atomic"
"testing"
"git.daviestechlabs.io/daviestechlabs/handler-base/messages"
)
// ────────────────────────────────────────────────────────────────────────────
// E2E tests: Argo + Kubeflow pipeline submission integration
// ────────────────────────────────────────────────────────────────────────────
func TestSubmitArgoE2E_FullPayload(t *testing.T) {
// Verify the full Argo workflow structure
var receivedBody map[string]any
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_ = json.NewDecoder(r.Body).Decode(&receivedBody)
_ = json.NewEncoder(w).Encode(map[string]any{
"metadata": map[string]any{"name": "doc-ingest-xyz"},
})
}))
defer ts.Close()
ctx := t.Context()
params := map[string]any{
"source": "s3://bucket/docs",
"collection": "knowledge-base",
"batch_size": 100,
}
runID, err := submitArgo(ctx, ts.Client(), ts.URL, "ai-ml", "document-ingestion", params, "req-e2e-001")
if err != nil {
t.Fatal(err)
}
if runID != "doc-ingest-xyz" {
t.Errorf("runID = %q", runID)
}
// Verify workflow structure
wf, ok := receivedBody["workflow"].(map[string]any)
if !ok {
t.Fatal("missing workflow key")
}
meta := wf["metadata"].(map[string]any)
if meta["namespace"] != "ai-ml" {
t.Errorf("namespace = %v", meta["namespace"])
}
labels := meta["labels"].(map[string]any)
if labels["request-id"] != "req-e2e-001" {
t.Errorf("request-id label = %v", labels["request-id"])
}
spec := wf["spec"].(map[string]any)
templateRef := spec["workflowTemplateRef"].(map[string]any)
if templateRef["name"] != "document-ingestion" {
t.Errorf("template = %v", templateRef["name"])
}
args := spec["arguments"].(map[string]any)
argoParams := args["parameters"].([]any)
if len(argoParams) != 3 {
t.Errorf("param count = %d, want 3", len(argoParams))
}
}
func TestSubmitKubeflowE2E_FullPayload(t *testing.T) {
var receivedBody map[string]any
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_ = json.NewDecoder(r.Body).Decode(&receivedBody)
_ = json.NewEncoder(w).Encode(map[string]any{
"run": map[string]any{"id": "kf-run-e2e-789"},
})
}))
defer ts.Close()
ctx := t.Context()
params := map[string]any{
"query": "what is kubernetes",
"top_k": 5,
"user_id": "test-user",
}
runID, err := submitKubeflow(ctx, ts.Client(), ts.URL, "rag-pipeline", params, "req-e2e-002")
if err != nil {
t.Fatal(err)
}
if runID != "kf-run-e2e-789" {
t.Errorf("runID = %q", runID)
}
// Verify run name format
name := receivedBody["name"].(string)
if name != "rag-pipeline-req-e2e-" {
t.Errorf("run name = %q", name)
}
}
func TestPipelineDispatchE2E_AllEngines(t *testing.T) {
// Verify all pipeline definitions dispatch to correct engine
for name, def := range pipelines {
t.Run(name, func(t *testing.T) {
if def.Engine != "argo" && def.Engine != "kubeflow" {
t.Errorf("engine = %q, want argo or kubeflow", def.Engine)
}
if def.Engine == "argo" && def.Template == "" {
t.Errorf("argo pipeline %q missing template", name)
}
if def.Engine == "kubeflow" && def.PipelineID == "" {
t.Errorf("kubeflow pipeline %q missing pipeline_id", name)
}
})
}
}
func TestPipelineDispatchE2E_UnknownPipeline(t *testing.T) {
// Verify unknown pipeline is rejected and available list is provided
pipelineName := "nonexistent-pipeline"
_, ok := pipelines[pipelineName]
if ok {
t.Error("nonexistent pipeline should not be found")
}
names := make([]string, 0, len(pipelines))
for k := range pipelines {
names = append(names, k)
}
resp := &messages.PipelineStatus{
RequestID: "req-bad",
Status: "error",
Error: "Unknown pipeline: nonexistent-pipeline",
AvailablePipelines: names,
}
if resp.Status != "error" {
t.Error("expected error status")
}
if len(resp.AvailablePipelines) != len(pipelines) {
t.Errorf("available_pipelines count mismatch")
}
}
func TestSubmitArgoE2E_ConcurrentRequests(t *testing.T) {
var count atomic.Int64
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
count.Add(1)
_ = json.NewEncoder(w).Encode(map[string]any{
"metadata": map[string]any{"name": "concurrent-wf"},
})
}))
defer ts.Close()
ctx := t.Context()
errs := make(chan error, 10)
for i := 0; i < 10; i++ {
go func() {
_, err := submitArgo(ctx, ts.Client(), ts.URL, "ai-ml", "batch-inference",
map[string]any{"batch": i}, "req-concurrent")
errs <- err
}()
}
for i := 0; i < 10; i++ {
if err := <-errs; err != nil {
t.Errorf("concurrent request %d: %v", i, err)
}
}
if got := count.Load(); got != 10 {
t.Errorf("request count = %d, want 10", got)
}
}
// ────────────────────────────────────────────────────────────────────────────
// Benchmarks
// ────────────────────────────────────────────────────────────────────────────
func BenchmarkSubmitArgo(b *testing.B) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte(`{"metadata":{"name":"bench-wf"}}`))
}))
defer ts.Close()
ctx := b.Context()
params := map[string]any{"source": "test"}
b.ResetTimer()
for b.Loop() {
_, _ = submitArgo(ctx, ts.Client(), ts.URL, "ai-ml", "document-ingestion", params, "bench-req")
}
}
func BenchmarkSubmitKubeflow(b *testing.B) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte(`{"run":{"id":"bench-run"}}`))
}))
defer ts.Close()
ctx := b.Context()
params := map[string]any{"query": "test"}
b.ResetTimer()
for b.Loop() {
_, _ = submitKubeflow(ctx, ts.Client(), ts.URL, "rag-pipeline", params, "bench-req")
}
}