/
/
/
Ansible role that deployes services on my runner machine
1---
2# Runner Services - Unified LLM Stack Deployment
3# Single deployment with Ollama, OpenWebUI, and LiteLLM services
4
5- name: Check if LLM stack is enabled
6 fail:
7 msg: "LLM stack is disabled. Set llm_stack_enabled: true to enable."
8 when: not llm_stack_enabled
9 tags: always
10
11- name: Create LLM stack configuration directory structure
12 file:
13 path: "{{ item }}"
14 state: directory
15 owner: "{{ runner_user }}"
16 group: "{{ runner_group }}"
17 mode: '0775'
18 loop:
19 - "{{ llm_stack_config_dir }}"
20 - "{{ llm_stack_ollama_data_dir }}"
21 - "{{ llm_stack_openwebui_data_dir }}"
22 - "{{ llm_stack_litellm_data_dir }}"
23
24- name: Set group sticky bit on LLM stack directories for permission inheritance
25 file:
26 path: "{{ llm_stack_config_dir }}"
27 state: directory
28 mode: "g+s"
29
30- name: Create unified LLM stack Docker Compose file
31 template:
32 src: llm-stack-compose.yml.j2
33 dest: "{{ llm_stack_config_dir }}/docker-compose.yml"
34 owner: "{{ runner_user }}"
35 group: "{{ runner_group }}"
36 mode: '0664'
37 notify: restart llm-stack
38
39- name: Create unified LLM stack environment file
40 template:
41 src: llm-stack.env.j2
42 dest: "{{ llm_stack_config_dir }}/.env"
43 owner: "{{ runner_user }}"
44 group: "{{ runner_group }}"
45 mode: '0664'
46 notify: restart llm-stack
47
48- name: Create LiteLLM environment file
49 template:
50 src: llm-stack-litellm.env.j2
51 dest: "{{ llm_stack_config_dir }}/litellm.env"
52 owner: "{{ runner_user }}"
53 group: "{{ runner_group }}"
54 mode: '0664'
55 notify: restart llm-stack
56
57- name: Start unified LLM stack service
58 community.docker.docker_compose_v2:
59 project_src: "{{ llm_stack_config_dir }}"
60 state: present
61 register: llm_stack_start_result
62 check_mode: no
63
64- name: Wait for Ollama to be healthy
65 uri:
66 url: "http://localhost:{{ llm_stack_ollama_port }}/api/tags"
67 method: GET
68 status_code: 200
69 register: ollama_health
70 until: ollama_health.status == 200
71 retries: 30
72 delay: 10
73 when: llm_stack_start_result is changed
74 check_mode: no
75
76- name: Wait for OpenWebUI to be healthy
77 uri:
78 url: "http://localhost:{{ llm_stack_openwebui_port }}/api/health"
79 method: GET
80 status_code: 200
81 timeout: 30
82 register: openwebui_health
83 until: openwebui_health.status == 200
84 retries: 60
85 delay: 15
86 when: llm_stack_start_result is changed
87 check_mode: no
88
89- name: Wait for LiteLLM PostgreSQL to be healthy
90 wait_for:
91 host: localhost
92 port: "{{ llm_stack_litellm_port }}"
93 timeout: 30
94 when: llm_stack_start_result is changed
95 check_mode: no
96
97- name: Wait for LiteLLM to be healthy
98 uri:
99 url: "http://localhost:{{ llm_stack_litellm_port }}/"
100 method: GET
101 status_code: 200
102 register: litellm_health
103 until: litellm_health.status == 200
104 retries: 10
105 delay: 10
106 when: llm_stack_start_result is changed
107 check_mode: no
108
109- name: Display unified LLM stack deployment summary
110 debug:
111 msg: |
112 Unified LLM Stack Deployment:
113 - Status: {{ 'Started' if llm_stack_start_result is changed else 'Already running' }}
114
115 Ollama (Local LLM Server):
116 - Web UI: http://{{ ansible_default_ipv4.address }}:{{ llm_stack_ollama_port }}
117 - Configuration: {{ llm_stack_config_dir }}
118 - Data Storage: {{ llm_stack_ollama_data_dir }}
119 - GPU Support: {{ 'Enabled' if gpu_enabled else 'Disabled' }}
120 - Default Model: {{ llm_stack_ollama_default_model }}
121
122 OpenWebUI (Web Interface):
123 - Web UI: http://{{ ansible_default_ipv4.address }}:{{ llm_stack_openwebui_port }}
124 - Connected to Ollama: ollama:11434
125 - Name: {{ llm_stack_openwebui_name }}
126
127 LiteLLM (Unified Proxy):
128 - API Endpoint: http://{{ ansible_default_ipv4.address }}:{{ llm_stack_litellm_port }}
129 - Connected to Ollama: ollama:11434
130 - Available Models: {{ llm_stack_litellm_model_list }}
131
132 Management Commands:
133 - cd {{ llm_stack_config_dir }} && docker compose logs -f
134 - cd {{ llm_stack_config_dir }} && docker compose restart
135 - cd {{ llm_stack_config_dir }} && docker compose ps