runner

Ansible role that deployes services on my runner machine

1.4 KBJ2
llm-stack-compose.yml.j2
1.4 KB77 lines • plaintext
1services:
2  ollama:
3    container_name: ollama
4    image: "{{ container_images.ollama }}"
5    restart: unless-stopped
6    
7    ports:
8      - "{{ llm_stack_ollama_port }}:11434"
9    
10    env_file:
11      - .env
12    
13    volumes:
14      - /etc/localtime:/etc/localtime:ro
15      - {{ llm_stack_ollama_data_dir }}:/root/.ollama
16
17{% if gpu_enabled %}
18    runtime: nvidia
19    gpus: all
20{% endif %}
21
22    networks:
23      llm-stack-net:
24        aliases:
25          - ollama
26
27  openwebui:
28    container_name: openwebui
29    image: "{{ container_images.openwebui }}"
30    restart: unless-stopped
31    
32    ports:
33      - "{{ llm_stack_openwebui_port }}:8080"
34    
35    env_file:
36      - .env
37    
38    volumes:
39      - /etc/localtime:/etc/localtime:ro
40      - {{ llm_stack_openwebui_data_dir }}:/app/backend/data
41    
42    depends_on:
43      - ollama
44    
45    networks:
46      llm-stack-net:
47        aliases:
48          - openwebui
49
50  litellm:
51    container_name: litellm
52    image: "{{ container_images.litellm }}"
53    restart: unless-stopped
54    
55    ports:
56      - "{{ llm_stack_litellm_port }}:4000"
57    
58    env_file:
59      - .env
60    
61    volumes:
62      - /etc/localtime:/etc/localtime:ro
63      - {{ llm_stack_litellm_data_dir }}:/app/data
64    
65    depends_on:
66      - ollama
67    
68    networks:
69      llm-stack-net:
70        aliases:
71          - litellm
72
73networks:
74  llm-stack-net:
75    driver: bridge
76    name: llm-stack-net
77