Skip to content

Commit 1da93c3

Browse files
authored
fix: resolved async issues for llamastack containers (#3468)
* fix: resolved async issues for llamastack containers Created a new interface that holds more information on the playground container. With this new interface I updated the llamastack backend to handle stopping starting and recreating containers based on what state they are in. Signed-off-by: Brian <[email protected]> * chore: fixed tests updated instructions rendering and aligned with svelte 5 Signed-off-by: Brian <[email protected]> * chore: renamed function createboth and removed extra log Signed-off-by: Brian <[email protected]> --------- Signed-off-by: Brian <[email protected]>
1 parent cef8552 commit 1da93c3

File tree

10 files changed

+716
-317
lines changed

10 files changed

+716
-317
lines changed

packages/backend/src/llama-stack-api-impl.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,20 +20,20 @@ import { navigation } from '@podman-desktop/api';
2020
import type { LlamaStackAPI } from '@shared/LlamaStackAPI';
2121
import type { LlamaStackContainerConfiguration } from '@shared/models/llama-stack/LlamaStackContainerConfiguration';
2222
import type { LlamaStackManager } from './managers/llama-stack/llamaStackManager';
23-
import type { LlamaStackContainerInfo } from '@shared/models/llama-stack/LlamaStackContainerInfo';
23+
import type { LlamaStackContainers } from '@shared/models/llama-stack/LlamaStackContainerInfo';
2424

2525
export class LlamaStackApiImpl implements LlamaStackAPI {
2626
constructor(private llamaStackManager: LlamaStackManager) {}
2727

28-
requestCreateLlamaStackContainer(config: LlamaStackContainerConfiguration): Promise<void> {
29-
return this.llamaStackManager.requestCreateLlamaStackContainer(config);
28+
requestcreateLlamaStackContainerss(config: LlamaStackContainerConfiguration): Promise<void> {
29+
return this.llamaStackManager.requestcreateLlamaStackContainerss(config);
3030
}
3131

3232
routeToLlamaStackContainerTerminal(containerId: string): Promise<void> {
3333
return navigation.navigateToContainerTerminal(containerId);
3434
}
3535

36-
getLlamaStackContainerInfo(): Promise<LlamaStackContainerInfo | undefined> {
37-
return this.llamaStackManager.getLlamaStackContainer();
36+
getLlamaStackContainersInfo(): Promise<LlamaStackContainers | undefined> {
37+
return this.llamaStackManager.getLlamaStackContainers();
3838
}
3939
}

packages/backend/src/managers/application/applicationManager.spec.ts

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ const recipeManager = {
7878
} as unknown as RecipeManager;
7979

8080
const llamaStackManager = {
81-
getLlamaStackContainer: vi.fn(),
81+
getLlamaStackContainers: vi.fn(),
8282
} as unknown as LlamaStackManager;
8383

8484
vi.mock('@podman-desktop/api', () => ({
@@ -145,10 +145,9 @@ beforeEach(() => {
145145
id: 'fake-task',
146146
}));
147147
vi.mocked(modelsManagerMock.uploadModelToPodmanMachine).mockResolvedValue('downloaded-model-path');
148-
vi.mocked(llamaStackManager.getLlamaStackContainer).mockResolvedValue({
149-
containerId: 'container1',
150-
port: 10001,
151-
playgroundPort: 10002,
148+
vi.mocked(llamaStackManager.getLlamaStackContainers).mockResolvedValue({
149+
server: { containerId: 'container1', port: 10001, state: 'running' },
150+
playground: { containerId: 'playground1', port: 10002, state: 'running' },
152151
});
153152
});
154153

packages/backend/src/managers/application/applicationManager.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -331,13 +331,13 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
331331
}
332332
}
333333
} else if (options.dependencies?.llamaStack) {
334-
let stack = await this.llamaStackManager.getLlamaStackContainer();
334+
let stack = await this.llamaStackManager.getLlamaStackContainers();
335335
if (!stack) {
336-
await this.llamaStackManager.createLlamaStackContainer(options.connection, labels ?? {});
337-
stack = await this.llamaStackManager.getLlamaStackContainer();
336+
await this.llamaStackManager.createLlamaStackContainers(options.connection, labels ?? {});
337+
stack = await this.llamaStackManager.getLlamaStackContainers();
338338
}
339339
if (stack) {
340-
envs = [`MODEL_ENDPOINT=http://host.containers.internal:${stack.port}`];
340+
envs = [`MODEL_ENDPOINT=http://host.containers.internal:${stack.server?.port}`];
341341
}
342342
}
343343
if (image.ports.length > 0) {

packages/backend/src/managers/llama-stack/llamaStackManager.spec.ts

Lines changed: 204 additions & 82 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)