blob: d13255386c64351e15609047f8e57c712da0d194 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
|
---
# fail is we don't have an endpoint for ES to connect to?
- include: determine_version.yaml
# allow passing in a tempdir
- name: Create temp directory for doing work in
command: mktemp -d /tmp/openshift-logging-ansible-XXXXXX
register: mktemp
changed_when: False
- set_fact:
tempdir: "{{ mktemp.stdout }}"
# This may not be necessary in this role
- name: Create templates subdirectory
file:
state: directory
path: "{{ tempdir }}/templates"
mode: 0755
changed_when: False
# we want to make sure we have all the necessary components here
# create service account
- name: Create Kibana service account
oc_serviceaccount:
state: present
name: "aggregated-logging-kibana"
namespace: "{{ openshift_logging_namespace }}"
image_pull_secrets: "{{ openshift_logging_image_pull_secret }}"
when: openshift_logging_image_pull_secret != ''
- name: Create Kibana service account
oc_serviceaccount:
state: present
name: "aggregated-logging-kibana"
namespace: "{{ openshift_logging_namespace }}"
when:
- openshift_logging_image_pull_secret == ''
- set_fact:
kibana_name: "{{ 'logging-kibana' ~ ( (openshift_logging_kibana_ops_deployment | default(false) | bool) | ternary('-ops', '')) }}"
kibana_component: "{{ 'kibana' ~ ( (openshift_logging_kibana_ops_deployment | default(false) | bool) | ternary('-ops', '')) }}"
- name: Retrieving the cert to use when generating secrets for the logging components
slurp:
src: "{{ generated_certs_dir }}/{{ item.file }}"
register: key_pairs
with_items:
- { name: "ca_file", file: "ca.crt" }
- { name: "kibana_internal_key", file: "kibana-internal.key"}
- { name: "kibana_internal_cert", file: "kibana-internal.crt"}
- { name: "server_tls", file: "server-tls.json"}
# services
- name: Set {{ kibana_name }} service
oc_service:
state: present
name: "{{ kibana_name }}"
namespace: "{{ openshift_logging_kibana_namespace }}"
selector:
component: "{{ kibana_component }}"
provider: openshift
# pending #4091
#labels:
#- logging-infra: 'support'
ports:
- port: 443
targetPort: "oaproxy"
# create routes
# TODO: set up these certs differently?
- set_fact:
kibana_key: "{{ lookup('file', openshift_logging_kibana_key) | b64encode }}"
when: "{{ openshift_logging_kibana_key | trim | length > 0 }}"
changed_when: false
- set_fact:
kibana_cert: "{{ lookup('file', openshift_logging_kibana_cert) | b64encode }}"
when: "{{ openshift_logging_kibana_cert | trim | length > 0 }}"
changed_when: false
- set_fact:
kibana_ca: "{{ lookup('file', openshift_logging_kibana_ca) | b64encode }}"
when: "{{ openshift_logging_kibana_ca | trim | length > 0 }}"
changed_when: false
- set_fact:
kibana_ca: "{{ key_pairs | entry_from_named_pair('ca_file') }}"
when: kibana_ca is not defined
changed_when: false
- name: Generating Kibana route template
template:
src: route_reencrypt.j2
dest: "{{ tempdir }}/templates/kibana-route.yaml"
vars:
obj_name: "{{ kibana_name }}"
route_host: "{{ openshift_logging_kibana_hostname }}"
service_name: "{{ kibana_name }}"
tls_key: "{{ kibana_key | default('') | b64decode }}"
tls_cert: "{{ kibana_cert | default('') | b64decode }}"
tls_ca_cert: "{{ kibana_ca | b64decode }}"
tls_dest_ca_cert: "{{ key_pairs | entry_from_named_pair('ca_file') | b64decode }}"
edge_term_policy: "{{ openshift_logging_kibana_edge_term_policy | default('') }}"
labels:
component: support
logging-infra: support
provider: openshift
changed_when: no
# This currently has an issue if the host name changes
- name: Setting Kibana route
oc_obj:
state: present
name: "{{ kibana_name }}"
namespace: "{{ openshift_logging_namespace }}"
kind: route
files:
- "{{ tempdir }}/templates/kibana-route.yaml"
# gen session_secret -- if necessary
# TODO: make idempotent
- name: Generate proxy session
set_fact:
session_secret: "{{ 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' | random_word(200) }}"
check_mode: no
# gen oauth_secret -- if necessary
# TODO: make idempotent
- name: Generate oauth client secret
set_fact:
oauth_secret: "{{ 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' | random_word(64) }}"
check_mode: no
# create oauth client
- name: Create oauth-client template
template:
src: oauth-client.j2
dest: "{{ tempdir }}/templates/oauth-client.yml"
vars:
kibana_hostname: "{{ openshift_logging_kibana_hostname }}"
secret: "{{ oauth_secret }}"
- name: Set kibana-proxy oauth-client
oc_obj:
state: present
name: "kibana-proxy"
namespace: "{{ openshift_logging_namespace }}"
kind: oauthclient
files:
- "{{ tempdir }}/templates/oauth-client.yml"
delete_after: true
# create Kibana secret
- name: Set Kibana secret
oc_secret:
state: present
name: "logging-kibana"
namespace: "{{ openshift_logging_namespace }}"
files:
- name: ca
path: "{{ generated_certs_dir }}/ca.crt"
- name: key
path: "{{ generated_certs_dir }}/system.logging.kibana.key"
- name: cert
path: "{{ generated_certs_dir }}/system.logging.kibana.crt"
# create Kibana-proxy secret
- name: Set Kibana Proxy secret
oc_secret:
state: present
name: "logging-kibana-proxy"
namespace: "{{ openshift_logging_namespace }}"
# TODO: when possible to have both files and contents for oc_secret use this
#files:
#- name: server-key
# path: "{{ generated_certs_dir }}/kibana-internal.key"
#- name: server-cert
# path: "{{ generated_certs_dir }}/kibana-internal.crt"
#- name: server-tls.json
# path: "{{ generated_certs_dir }}/server-tls.json"
contents:
- path: oauth-secret
data: "{{ oauth_secret }}"
- path: session-secret
data: "{{ session_secret }}"
- path: server-key
data: "{{ key_pairs | entry_from_named_pair('kibana_internal_key') | b64decode }}"
- path: server-cert
data: "{{ key_pairs | entry_from_named_pair('kibana_internal_cert') | b64decode }}"
- path: server-tls.json
data: "{{ key_pairs | entry_from_named_pair('server_tls') | b64decode }}"
# create Kibana DC
- name: Generate Kibana DC template
template:
src: kibana.j2
dest: "{{ tempdir }}/templates/kibana-dc.yaml"
vars:
component: "{{ kibana_component }}"
logging_component: kibana
deploy_name: "{{ kibana_name }}"
image: "{{ openshift_logging_image_prefix }}logging-kibana:{{ openshift_logging_image_version }}"
proxy_image: "{{ openshift_logging_image_prefix }}logging-auth-proxy:{{ openshift_logging_image_version }}"
es_host: "{{ openshift_logging_kibana_es_host }}"
es_port: "{{ openshift_logging_kibana_es_port }}"
kibana_cpu_limit: "{{ openshift_logging_kibana_cpu_limit }}"
kibana_memory_limit: "{{ openshift_logging_kibana_memory_limit }}"
kibana_proxy_cpu_limit: "{{ openshift_logging_kibana_proxy_cpu_limit }}"
kibana_proxy_memory_limit: "{{ openshift_logging_kibana_proxy_memory_limit }}"
replicas: "{{ openshift_logging_kibana_replicas | default (1) }}"
kibana_node_selector: "{{ openshift_logging_kibana_nodeselector | default({}) }}"
- name: Set Kibana DC
oc_obj:
state: present
name: "{{ kibana_name }}"
namespace: "{{ openshift_logging_namespace }}"
kind: dc
files:
- "{{ tempdir }}/templates/kibana-dc.yaml"
delete_after: true
# update master configs?
- name: Delete temp directory
file:
name: "{{ tempdir }}"
state: absent
changed_when: False
|