Skip to content

Commit

Permalink
ai-proxy e2e测试支持DeepSeek、零一万物、智谱AI
Browse files Browse the repository at this point in the history
  • Loading branch information
hanxiantao committed Feb 6, 2025
1 parent 417a07d commit 80f1667
Show file tree
Hide file tree
Showing 2 changed files with 267 additions and 0 deletions.
180 changes: 180 additions & 0 deletions test/e2e/conformance/tests/go-wasm-ai-proxy.go
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,66 @@ data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}}
data: [DONE]
`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "deepseek case 1: non-streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.deepseek.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":false}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"你好,你是谁?"},"finish_reason":"stop"}],"created":10,"model":"deepseek-reasoner","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "deepseek case 2: streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.deepseek.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":true}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeTextEventStream,
Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"好"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":","}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"是"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"谁"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}}],"created":10,"model":"deepseek-reasoner","object":"chat.completion.chunk","usage":{}}
data: [DONE]
`),
},
},
Expand Down Expand Up @@ -601,6 +661,126 @@ data: [DONE]
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "yi case 1: non-streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.lingyiwanwu.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":false}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"你好,你是谁?"},"finish_reason":"stop"}],"created":10,"model":"Yi-Medium","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "yi case 2: streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.lingyiwanwu.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":true}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeTextEventStream,
Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"好"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":","}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"是"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"谁"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}}],"created":10,"model":"Yi-Medium","object":"chat.completion.chunk","usage":{}}
data: [DONE]
`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "zhipuai case 1: non-streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "open.bigmodel.cn",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":false}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"你好,你是谁?"},"finish_reason":"stop"}],"created":10,"model":"glm-4-plus","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "zhipuai case 2: streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "open.bigmodel.cn",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":true}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeTextEventStream,
Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"好"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":","}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"是"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"谁"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}}],"created":10,"model":"glm-4-plus","object":"chat.completion.chunk","usage":{}}
data: [DONE]
`),
},
},
},
}
t.Run("WasmPlugins ai-proxy", func(t *testing.T) {
for _, testcase := range testcases {
Expand Down
87 changes: 87 additions & 0 deletions test/e2e/conformance/tests/go-wasm-ai-proxy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,25 @@ spec:
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-deepseek
namespace: higress-conformance-ai-backend
spec:
ingressClassName: higress
rules:
- host: "api.deepseek.com"
http:
paths:
- pathType: Prefix
path: "/"
backend:
service:
name: llm-mock-service
port:
number: 3000
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-doubao
namespace: higress-conformance-ai-backend
Expand Down Expand Up @@ -201,6 +220,44 @@ spec:
port:
number: 3000
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-yi
namespace: higress-conformance-ai-backend
spec:
ingressClassName: higress
rules:
- host: "api.lingyiwanwu.com"
http:
paths:
- pathType: Prefix
path: "/"
backend:
service:
name: llm-mock-service
port:
number: 3000
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-zhipuai
namespace: higress-conformance-ai-backend
spec:
ingressClassName: higress
rules:
- host: "open.bigmodel.cn"
http:
paths:
- pathType: Prefix
path: "/"
backend:
service:
name: llm-mock-service
port:
number: 3000
---
apiVersion: extensions.higress.io/v1alpha1
kind: WasmPlugin
metadata:
Expand Down Expand Up @@ -241,6 +298,16 @@ spec:
type: baidu
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-baidu
- config:
provider:
apiTokens:
- fake_token
modelMapping:
'gpt-3': deepseek-reasoner
'*': deepseek-chat
type: deepseek
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-deepseek
- config:
provider:
apiTokens:
Expand Down Expand Up @@ -319,4 +386,24 @@ spec:
type: qwen
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-qwen
- config:
provider:
apiTokens:
- fake_token
modelMapping:
'gpt-3': Yi-Medium
'*': Yi-Large
type: yi
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-yi
- config:
provider:
apiTokens:
- fake_token
modelMapping:
'gpt-3': glm-4-plus
'*': glm-4-long
type: zhipuai
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-zhipuai
url: oci://higress-registry.cn-hangzhou.cr.aliyuncs.com/plugins/ai-proxy:1.0.0

0 comments on commit 80f1667

Please sign in to comment.