fix: resolve golangci-lint errcheck warnings
- Add error checks for unchecked return values (errcheck) - Remove unused struct fields (unused) - Fix gofmt formatting issues
This commit is contained in:
28
e2e_test.go
28
e2e_test.go
@@ -30,7 +30,7 @@ t.Helper()
|
||||
m := &mockBackends{}
|
||||
|
||||
m.Embeddings = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
json.NewEncoder(w).Encode(map[string]any{
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"data": []map[string]any{
|
||||
{"embedding": []float64{0.1, 0.2, 0.3, 0.4}},
|
||||
},
|
||||
@@ -39,7 +39,7 @@ json.NewEncoder(w).Encode(map[string]any{
|
||||
t.Cleanup(m.Embeddings.Close)
|
||||
|
||||
m.Reranker = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
json.NewEncoder(w).Encode(map[string]any{
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"results": []map[string]any{
|
||||
{"index": 0, "relevance_score": 0.95},
|
||||
},
|
||||
@@ -49,8 +49,8 @@ t.Cleanup(m.Reranker.Close)
|
||||
|
||||
m.LLM = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var req map[string]any
|
||||
json.NewDecoder(r.Body).Decode(&req)
|
||||
json.NewEncoder(w).Encode(map[string]any{
|
||||
_ = json.NewDecoder(r.Body).Decode(&req)
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"choices": []map[string]any{
|
||||
{"message": map[string]any{
|
||||
"content": "Paris is the capital of France.",
|
||||
@@ -61,7 +61,7 @@ json.NewEncoder(w).Encode(map[string]any{
|
||||
t.Cleanup(m.LLM.Close)
|
||||
|
||||
m.TTS = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte{0xDE, 0xAD, 0xBE, 0xEF})
|
||||
_, _ = w.Write([]byte{0xDE, 0xAD, 0xBE, 0xEF})
|
||||
}))
|
||||
t.Cleanup(m.TTS.Close)
|
||||
|
||||
@@ -148,7 +148,7 @@ func TestChatPipeline_LLMTimeout(t *testing.T) {
|
||||
// Simulate slow LLM.
|
||||
slow := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
time.Sleep(200 * time.Millisecond)
|
||||
json.NewEncoder(w).Encode(map[string]any{
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"choices": []map[string]any{
|
||||
{"message": map[string]any{"content": "late response"}},
|
||||
},
|
||||
@@ -204,7 +204,7 @@ func TestChatPipeline_TypedDecoding(t *testing.T) {
|
||||
|
||||
func BenchmarkChatPipeline_LLMOnly(b *testing.B) {
|
||||
llmSrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`{"choices":[{"message":{"content":"answer"}}]}`))
|
||||
_, _ = w.Write([]byte(`{"choices":[{"message":{"content":"answer"}}]}`))
|
||||
}))
|
||||
defer llmSrv.Close()
|
||||
|
||||
@@ -213,23 +213,23 @@ ctx := context.Background()
|
||||
|
||||
b.ResetTimer()
|
||||
for b.Loop() {
|
||||
llm.Generate(ctx, "question", "", "")
|
||||
_, _ = llm.Generate(ctx, "question", "", "")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkChatPipeline_RAGFlow(b *testing.B) {
|
||||
embedSrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`{"data":[{"embedding":[0.1,0.2]}]}`))
|
||||
_, _ = w.Write([]byte(`{"data":[{"embedding":[0.1,0.2]}]}`))
|
||||
}))
|
||||
defer embedSrv.Close()
|
||||
|
||||
rerankSrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`{"results":[{"index":0,"relevance_score":0.9}]}`))
|
||||
_, _ = w.Write([]byte(`{"results":[{"index":0,"relevance_score":0.9}]}`))
|
||||
}))
|
||||
defer rerankSrv.Close()
|
||||
|
||||
llmSrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(`{"choices":[{"message":{"content":"answer"}}]}`))
|
||||
_, _ = w.Write([]byte(`{"choices":[{"message":{"content":"answer"}}]}`))
|
||||
}))
|
||||
defer llmSrv.Close()
|
||||
|
||||
@@ -240,8 +240,8 @@ ctx := context.Background()
|
||||
|
||||
b.ResetTimer()
|
||||
for b.Loop() {
|
||||
embed.EmbedSingle(ctx, "question")
|
||||
rerank.Rerank(ctx, "question", []string{"doc1", "doc2"}, 2)
|
||||
llm.Generate(ctx, "question", "context", "")
|
||||
_, _ = embed.EmbedSingle(ctx, "question")
|
||||
_, _ = rerank.Rerank(ctx, "question", []string{"doc1", "doc2"}, 2)
|
||||
_, _ = llm.Generate(ctx, "question", "context", "")
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user