-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathdevfile.yaml
56 lines (56 loc) · 1.49 KB
/
devfile.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
schemaVersion: 2.3.0
metadata:
generateName: cde-ollama-continue
attributes:
controller.devfile.io/storage-type: ephemeral
projects:
- name: cde-ollama-continue
git:
remotes:
origin: 'https://github.com/redhat-developer-demos/cde-ollama-continue'
checkoutFrom:
revision: main
components:
- name: udi
container:
image: quay.io/devfile/universal-developer-image:ubi8-latest
memoryLimit: 4Gi
memoryRequest: 2Gi
cpuLimit: 4000m
cpuRequest: 1000m
mountSources: true
sourceMapping: /projects
- name: ollama
attributes:
container-overrides:
resources:
limits:
cpu: 4000m
memory: 12Gi
# nvidia.com/gpu: 1 # Uncomment this if the pod shall be scheduled only on a GPU node
requests:
cpu: 1000m
memory: 8Gi
# nvidia.com/gpu: 1 # Uncomment this if the pod shall be scheduled only on a GPU node
container:
image: docker.io/ollama/ollama:latest
mountSources: true
sourceMapping: /.ollama
commands:
- id: pullmodel
exec:
component: ollama
commandLine: "ollama pull llama3:8b"
- id: pullautocompletemodel
exec:
component: ollama
commandLine: "ollama pull starcoder2:3b"
- id: copyconfig
exec:
component: udi
commandLine: "mkdir /home/user/.continue && cp /projects/cde-ollama-continue/continue-config.json /home/user/.continue/config.json"
events:
postStart:
- pullmodel
- pullautocompletemodel
- copyconfig