Skip to content

Commit 4fcab78

Browse files
committed
Add test seam to mock out actual https connection
Add test seam to mock out actual https connection, add a single test using it per chat backend
1 parent 0a75d31 commit 4fcab78

File tree

11 files changed

+135
-13
lines changed

11 files changed

+135
-13
lines changed

+llms/+internal/callAzureChatAPI.m

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,13 +58,14 @@
5858
nvp.APIKey
5959
nvp.TimeOut
6060
nvp.StreamFun
61+
nvp.sendRequestFcn
6162
end
6263

6364
URL = endpoint + "openai/deployments/" + deploymentID + "/chat/completions?api-version=" + nvp.APIVersion;
6465

6566
parameters = buildParametersCall(messages, functions, nvp);
6667

67-
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
68+
[response, streamedText] = nvp.sendRequestFcn(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
6869

6970
% For old models like GPT-3.5, we may have to change the request sent a
7071
% little. Since we cannot detect the model used other than trying to send a
@@ -74,7 +75,7 @@
7475
isfield(response.Body.Data.error,"message") && ...
7576
response.Body.Data.error.message == "Unrecognized request argument supplied: max_completion_tokens"
7677
parameters = renameStructField(parameters,'max_completion_tokens','max_tokens');
77-
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
78+
[response, streamedText] = nvp.sendRequestFcn(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
7879
end
7980

8081
% If call errors, "choices" will not be part of response.Body.Data, instead

+llms/+internal/callOllamaChatAPI.m

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
nvp.TimeOut
4242
nvp.StreamFun
4343
nvp.Endpoint
44+
nvp.sendRequestFcn
4445
end
4546

4647
URL = nvp.Endpoint + "/api/chat";
@@ -56,7 +57,7 @@
5657

5758
parameters = buildParametersCall(model, messages, functions, nvp);
5859

59-
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
60+
[response, streamedText] = nvp.sendRequestFcn(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
6061

6162
% If call errors, "choices" will not be part of response.Body.Data, instead
6263
% we get response.Body.Data.error

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,14 @@
5656
nvp.APIKey
5757
nvp.TimeOut
5858
nvp.StreamFun
59+
nvp.sendRequestFcn
5960
end
6061

6162
END_POINT = "https://api.openai.com/v1/chat/completions";
6263

6364
parameters = buildParametersCall(messages, functions, nvp);
6465

65-
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
66+
[response, streamedText] = nvp.sendRequestFcn(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
6667

6768
% If call errors, "choices" will not be part of response.Body.Data, instead
6869
% we get response.Body.Data.error

azureChat.m

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,11 @@
9999
APIVersion (1,1) string
100100
end
101101

102+
properties (Hidden)
103+
% test seam
104+
sendRequestFcn = @llms.internal.sendRequestWrapper
105+
end
106+
102107
methods
103108
function this = azureChat(systemPrompt, nvp)
104109
arguments
@@ -262,7 +267,8 @@
262267
StopSequences=nvp.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
263268
PresencePenalty=nvp.PresencePenalty, FrequencyPenalty=nvp.FrequencyPenalty, ...
264269
ResponseFormat=nvp.ResponseFormat,Seed=nvp.Seed, ...
265-
APIKey=nvp.APIKey,TimeOut=nvp.TimeOut,StreamFun=streamFun);
270+
APIKey=nvp.APIKey,TimeOut=nvp.TimeOut,StreamFun=streamFun,...
271+
sendRequestFcn=this.sendRequestFcn);
266272
catch ME
267273
if ismember(ME.identifier,...
268274
["MATLAB:webservices:UnknownHost","MATLAB:webservices:Timeout"])

ollamaChat.m

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,11 @@
7878
TailFreeSamplingZ (1,1) {mustBeNumeric,mustBeReal} = 1
7979
end
8080

81+
properties (Hidden)
82+
% test seam
83+
sendRequestFcn = @llms.internal.sendRequestWrapper
84+
end
85+
8186
methods
8287
function this = ollamaChat(modelName, systemPrompt, nvp)
8388
arguments
@@ -245,7 +250,7 @@
245250
StopSequences=nvp.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
246251
ResponseFormat=nvp.ResponseFormat,Seed=nvp.Seed, ...
247252
TimeOut=nvp.TimeOut, StreamFun=streamFun, ...
248-
Endpoint=nvp.Endpoint);
253+
Endpoint=nvp.Endpoint, sendRequestFcn=this.sendRequestFcn);
249254
catch e
250255
if e.identifier == "MATLAB:webservices:ConnectionRefused"
251256
error("llms:noOllamaFound",llms.utils.errorMessageCatalog.getMessage("llms:noOllamaFound",nvp.Endpoint));

openAIChat.m

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,10 @@
8787
ModelName
8888
end
8989

90+
properties (Hidden)
91+
% test seam
92+
sendRequestFcn = @llms.internal.sendRequestWrapper
93+
end
9094

9195
methods
9296
function this = openAIChat(systemPrompt, nvp)
@@ -255,7 +259,8 @@
255259
StopSequences=nvp.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
256260
PresencePenalty=nvp.PresencePenalty, FrequencyPenalty=nvp.FrequencyPenalty, ...
257261
ResponseFormat=nvp.ResponseFormat,Seed=nvp.Seed, ...
258-
APIKey=nvp.APIKey,TimeOut=nvp.TimeOut, StreamFun=streamFun);
262+
APIKey=nvp.APIKey,TimeOut=nvp.TimeOut, StreamFun=streamFun, ...
263+
sendRequestFcn=this.sendRequestFcn);
259264
catch e
260265
throw(e);
261266
end

tests/hstructuredOutput.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
classdef (Abstract) hstructuredOutput < matlab.unittest.TestCase
1+
classdef (Abstract) hstructuredOutput < matlab.mock.TestCase
22
% Tests for completion APIs providing structured output
33

44
% Copyright 2023-2025 The MathWorks, Inc.

tests/htoolCalls.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
classdef (Abstract) htoolCalls < matlab.unittest.TestCase
1+
classdef (Abstract) htoolCalls < matlab.mock.TestCase
22
% Tests for backends with tool calls
33

44
% Copyright 2023-2025 The MathWorks, Inc.

tests/tazureChat.m

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,32 @@ function doGenerateUsingSystemPrompt(testCase)
4949
testCase.verifyGreaterThan(strlength(response),0);
5050
end
5151

52+
function sendsSystemPrompt(testCase)
53+
import matlab.unittest.constraints.HasField
54+
[sendRequestMock,sendRequestBehaviour] = ...
55+
createMock(testCase, AddedMethods="sendRequest");
56+
testCase.assignOutputsWhen( ...
57+
withAnyInputs(sendRequestBehaviour.sendRequest),...
58+
iResponseMessage("Hello"),"This output is unused with Stream=false");
59+
60+
chat = testCase.constructor("You are a helpful assistant");
61+
chat.sendRequestFcn = @(varargin) sendRequestMock.sendRequest(varargin{:});
62+
63+
response = testCase.verifyWarningFree(@() generate(chat,"Hi"));
64+
65+
calls = testCase.getMockHistory(sendRequestMock);
66+
67+
testCase.verifySize(calls,[1,1]);
68+
sentHistory = calls.Inputs{2};
69+
testCase.verifyThat(sentHistory,HasField("messages"));
70+
testCase.verifyEqual(sentHistory.messages, ...
71+
{ ...
72+
struct(role="system",content="You are a helpful assistant"),...
73+
struct(role="user",content="Hi") ...
74+
});
75+
testCase.verifyEqual(response,"Hello");
76+
end
77+
5278
function generateMultipleResponses(testCase)
5379
chat = azureChat;
5480
[~,~,response] = generate(chat,"What is a cat?",NumCompletions=3);
@@ -503,3 +529,14 @@ function deploymentNotFound(testCase)
503529
function apiVersions = iGetAPIVersions()
504530
apiVersions = cellstr(llms.azure.apiVersions);
505531
end
532+
533+
function msg = iResponseMessage(txt)
534+
% minimal structure replacing the real matlab.net.http.ResponseMessage() in our mocks
535+
msg = struct(...
536+
StatusCode="OK",...
537+
Body=struct(...
538+
Data=struct(...
539+
choices=struct(...
540+
message=struct(...
541+
content=txt)))));
542+
end

tests/tollamaChat.m

Lines changed: 33 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,11 +47,30 @@ function doGenerate(testCase,StringInputs)
4747
testCase.verifyGreaterThan(strlength(response),0);
4848
end
4949

50-
function doGenerateUsingSystemPrompt(testCase)
51-
chat = ollamaChat(testCase.defaultModelName,"You are a helpful assistant");
50+
function sendsSystemPrompt(testCase)
51+
import matlab.unittest.constraints.HasField
52+
[sendRequestMock,sendRequestBehaviour] = ...
53+
createMock(testCase, AddedMethods="sendRequest");
54+
testCase.assignOutputsWhen( ...
55+
withAnyInputs(sendRequestBehaviour.sendRequest),...
56+
iResponseMessage("Hello"),"This output is unused with Stream=false");
57+
58+
chat = testCase.constructor("You are a helpful assistant");
59+
chat.sendRequestFcn = @(varargin) sendRequestMock.sendRequest(varargin{:});
60+
5261
response = testCase.verifyWarningFree(@() generate(chat,"Hi"));
53-
testCase.verifyClass(response,'string');
54-
testCase.verifyGreaterThan(strlength(response),0);
62+
63+
calls = testCase.getMockHistory(sendRequestMock);
64+
65+
testCase.verifySize(calls,[1,1]);
66+
sentHistory = calls.Inputs{2};
67+
testCase.verifyThat(sentHistory,HasField("messages"));
68+
testCase.verifyEqual(sentHistory.messages, ...
69+
{ ...
70+
struct(role="system",content="You are a helpful assistant"),...
71+
struct(role="user",content="Hi") ...
72+
});
73+
testCase.verifyEqual(response,"Hello");
5574
end
5675

5776
function generateOverridesProperties(testCase)
@@ -513,3 +532,13 @@ function queryModels(testCase)
513532
"Input",{{ validMessages "MaxNumTokens" 0 }},...
514533
"Error","MATLAB:validators:mustBePositive"));
515534
end
535+
536+
function msg = iResponseMessage(txt)
537+
% minimal structure replacing the real matlab.net.http.ResponseMessage() in our mocks
538+
msg = struct(...
539+
StatusCode="OK",...
540+
Body=struct(...
541+
Data=struct(...
542+
message=struct(...
543+
content=txt))));
544+
end

0 commit comments

Comments
 (0)