summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.coveragerc15
-rw-r--r--CONTRIBUTING.md14
-rw-r--r--conftest.py6
-rw-r--r--openshift-ansible.spec4
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/docker/docker_upgrade.yml2
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml2
-rw-r--r--playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml2
-rw-r--r--playbooks/common/openshift-cluster/redeploy-certificates/router.yml5
-rw-r--r--playbooks/common/openshift-cluster/upgrades/post_control_plane.yml10
-rw-r--r--playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml2
-rw-r--r--playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml2
l---------playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins1
l---------playbooks/common/openshift-cluster/upgrades/v3_5/roles1
-rw-r--r--playbooks/common/openshift-node/restart.yml20
-rw-r--r--requirements.txt1
-rw-r--r--roles/lib_openshift/library/oadm_manage_node.py77
-rw-r--r--roles/lib_openshift/library/oc_edit.py77
-rw-r--r--roles/lib_openshift/library/oc_env.py77
-rw-r--r--roles/lib_openshift/library/oc_label.py77
-rw-r--r--roles/lib_openshift/library/oc_obj.py77
-rw-r--r--roles/lib_openshift/library/oc_process.py77
-rw-r--r--roles/lib_openshift/library/oc_route.py79
-rw-r--r--roles/lib_openshift/library/oc_scale.py77
-rw-r--r--roles/lib_openshift/library/oc_secret.py81
-rw-r--r--roles/lib_openshift/library/oc_service.py77
-rw-r--r--roles/lib_openshift/library/oc_serviceaccount.py77
-rw-r--r--roles/lib_openshift/library/oc_serviceaccount_secret.py77
-rw-r--r--roles/lib_openshift/library/oc_version.py77
-rw-r--r--roles/lib_openshift/src/class/oc_route.py2
-rw-r--r--roles/lib_openshift/src/class/oc_secret.py2
-rw-r--r--roles/lib_openshift/src/lib/base.py15
-rw-r--r--roles/lib_openshift/src/lib/import.py7
-rw-r--r--roles/lib_openshift/src/lib/secret.py2
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oadm_manage_node.py (renamed from roles/lib_openshift/src/test/unit/oadm_manage_node.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_env.py (renamed from roles/lib_openshift/src/test/unit/oc_env.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_label.py (renamed from roles/lib_openshift/src/test/unit/oc_label.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_process.py (renamed from roles/lib_openshift/src/test/unit/oc_process.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_route.py (renamed from roles/lib_openshift/src/test/unit/oc_route.py)18
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_scale.py (renamed from roles/lib_openshift/src/test/unit/oc_scale.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_secret.py (renamed from roles/lib_openshift/src/test/unit/oc_secret.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_service.py (renamed from roles/lib_openshift/src/test/unit/oc_service.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_serviceaccount.py (renamed from roles/lib_openshift/src/test/unit/oc_serviceaccount.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_serviceaccount_secret.py (renamed from roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py)94
-rw-r--r--[-rwxr-xr-x]roles/lib_openshift/src/test/unit/test_oc_version.py (renamed from roles/lib_openshift/src/test/unit/oc_version.py)0
-rw-r--r--roles/lib_openshift/tasks/main.yml6
-rw-r--r--roles/lib_utils/library/repoquery.py11
-rw-r--r--roles/lib_utils/library/yedit.py64
-rw-r--r--roles/lib_utils/src/class/repoquery.py2
-rw-r--r--roles/lib_utils/src/class/yedit.py55
-rw-r--r--roles/lib_utils/src/lib/import.py9
-rw-r--r--[-rwxr-xr-x]roles/lib_utils/src/test/unit/test_repoquery.py (renamed from roles/lib_utils/src/test/unit/repoquery.py)0
-rw-r--r--[-rwxr-xr-x]roles/lib_utils/src/test/unit/test_yedit.py (renamed from roles/lib_utils/src/test/unit/yedit_test.py)2
-rw-r--r--roles/lib_utils/tasks/main.yml6
-rw-r--r--roles/nuage_node/tasks/iptables.yml2
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-persistent-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/mysql-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-persistent-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/redis-ephemeral-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/db-templates/redis-persistent-template.json3
-rw-r--r--roles/openshift_examples/files/examples/v1.5/image-streams/image-streams-rhel7.json2
-rw-r--r--roles/openshift_examples/files/examples/v1.5/xpaas-streams/fis-image-streams.json24
-rwxr-xr-xroles/openshift_facts/library/openshift_facts.py2
-rw-r--r--roles/openshift_facts/tasks/main.yml12
-rw-r--r--roles/openshift_hosted/meta/main.yml2
-rw-r--r--roles/openshift_hosted/tasks/registry/storage/s3.yml32
-rw-r--r--roles/openshift_hosted/tasks/router/router.yml24
-rw-r--r--roles/openshift_hosted/templates/registry_config.j22
-rw-r--r--roles/openshift_hosted_logging/handlers/main.yml21
-rw-r--r--roles/openshift_logging/README.md4
-rw-r--r--roles/openshift_logging/defaults/main.yml4
-rw-r--r--roles/openshift_logging/handlers/main.yml21
-rw-r--r--roles/openshift_logging/tasks/install_elasticsearch.yaml2
-rw-r--r--roles/openshift_logging/tasks/upgrade_logging.yaml6
-rw-r--r--roles/openshift_logging/templates/curator.j22
-rw-r--r--roles/openshift_logging/templates/es.j22
-rw-r--r--roles/openshift_logging/templates/kibana.j22
-rw-r--r--roles/openshift_master_facts/test/conftest.py54
-rw-r--r--roles/openshift_master_facts/test/openshift_master_facts_bad_input_tests.py57
-rw-r--r--roles/openshift_master_facts/test/openshift_master_facts_default_predicates_tests.py264
-rw-r--r--roles/openshift_master_facts/test/openshift_master_facts_default_priorities_tests.py262
-rw-r--r--roles/openshift_node_upgrade/README.md2
-rw-r--r--roles/openshift_node_upgrade/tasks/main.yml37
-rw-r--r--setup.cfg30
-rw-r--r--setup.py2
-rw-r--r--test-requirements.txt4
-rw-r--r--tox.ini6
-rw-r--r--utils/.coveragerc15
-rw-r--r--utils/README.md41
-rw-r--r--utils/setup.cfg27
-rw-r--r--utils/setup.py6
-rw-r--r--utils/src/ooinstall/cli_installer.py20
-rw-r--r--utils/src/ooinstall/oo_config.py17
-rw-r--r--utils/test-requirements.txt3
-rw-r--r--utils/tox.ini5
96 files changed, 1559 insertions, 869 deletions
diff --git a/.coveragerc b/.coveragerc
index e1d918755..1e819e157 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,5 +1,18 @@
[run]
-omit=
+branch = True
+omit =
*/lib/python*/site-packages/*
*/lib/python*/*
/usr/*
+ setup.py
+ # TODO(rhcarvalho): this is used to ignore test files from coverage report.
+ # We can make this less generic when we stick with a single test pattern in
+ # the repo.
+ test_*.py
+ *_tests.py
+
+[report]
+fail_under = 28
+
+[html]
+directory = cover
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index dafa73bad..502ef6aa5 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -152,3 +152,17 @@ are run on a different Jenkins host that is not publicly accessible, however the
test results are posted to S3 buckets when complete.
The test output of each job is also posted to the Pull Request as comments.
+
+---
+
+## Appendix
+
+### Finding unused Python code
+
+If you are contributing with Python code, you can use the tool
+[`vulture`](https://pypi.python.org/pypi/vulture) to verify that you are not
+introducing unused code by accident.
+
+This tool is not used in an automated form in CI nor otherwise because it may
+produce both false positives and false negatives. Still, it can be helpful to
+detect dead code that escapes our eyes.
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 000000000..ad03fab29
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,6 @@
+"""pytest configuration"""
+
+
+def pytest_ignore_collect(path):
+ """Hook to ignore symlink files and directories."""
+ return path.islink()
diff --git a/openshift-ansible.spec b/openshift-ansible.spec
index e49c54347..c03d4eb8f 100644
--- a/openshift-ansible.spec
+++ b/openshift-ansible.spec
@@ -73,6 +73,10 @@ find -L %{buildroot}%{_datadir}/ansible/%{name}/playbooks -name filter_plugins -
# openshift-ansible-roles install
cp -rp roles %{buildroot}%{_datadir}/ansible/%{name}/
+# openshift_master_facts symlinks filter_plugins/oo_filters.py from ansible_plugins/filter_plugins
+pushd %{buildroot}%{_datadir}/ansible/%{name}/roles/openshift_master_facts/filter_plugins
+ln -sf ../../../../../ansible_plugins/filter_plugins/oo_filters.py oo_filters.py
+popd
# openshift-ansible-filter-plugins install
cp -rp filter_plugins %{buildroot}%{_datadir}/ansible_plugins/
diff --git a/playbooks/byo/openshift-cluster/upgrades/docker/docker_upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/docker/docker_upgrade.yml
index 5d3280328..4ee6afe2a 100644
--- a/playbooks/byo/openshift-cluster/upgrades/docker/docker_upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/docker/docker_upgrade.yml
@@ -43,7 +43,7 @@
- name: Drain Node for Kubelet upgrade
command: >
- {{ openshift.common.admin_binary }} drain {{ openshift.node.nodename }} --force --delete-local-data
+ {{ openshift.common.admin_binary }} drain {{ openshift.node.nodename }} --force --delete-local-data --ignore-daemonsets
delegate_to: "{{ groups.oo_first_master.0 }}"
when: l_docker_upgrade is defined and l_docker_upgrade | bool and inventory_hostname in groups.oo_nodes_to_upgrade
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
index 6a6f0952b..69cabcd33 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade.yml
@@ -102,4 +102,4 @@
- include: ../../../../common/openshift-cluster/upgrades/post_control_plane.yml
-- inlcude: ../../../../common/openshift-cluster/upgrades/v3_5/storage_upgrade.yml
+- include: ../../../../common/openshift-cluster/upgrades/v3_5/storage_upgrade.yml
diff --git a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
index 2d4e0ff1c..719057d2b 100644
--- a/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
+++ b/playbooks/byo/openshift-cluster/upgrades/v3_5/upgrade_control_plane.yml
@@ -108,4 +108,4 @@
- include: ../../../../common/openshift-cluster/upgrades/post_control_plane.yml
-- inlcude: ../../../../common/openshift-cluster/upgrades/v3_5/storage_upgrade.yml
+- include: ../../../../common/openshift-cluster/upgrades/v3_5/storage_upgrade.yml
diff --git a/playbooks/common/openshift-cluster/redeploy-certificates/router.yml b/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
index 707fb6424..35eedd5ee 100644
--- a/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
+++ b/playbooks/common/openshift-cluster/redeploy-certificates/router.yml
@@ -2,14 +2,13 @@
- name: Update router certificates
hosts: oo_first_master
vars:
+ roles:
+ - lib_openshift
tasks:
- name: Create temp directory for kubeconfig
command: mktemp -d /tmp/openshift-ansible-XXXXXX
register: mktemp
changed_when: false
- roles:
- - lib_openshift
-
- name: Copy admin client config(s)
command: >
cp {{ openshift.common.config_base }}/master//admin.kubeconfig {{ mktemp.stdout }}/admin.kubeconfig
diff --git a/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
index f0191e380..01c1e0c15 100644
--- a/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
+++ b/playbooks/common/openshift-cluster/upgrades/post_control_plane.yml
@@ -14,9 +14,6 @@
- name: Load lib_openshift modules
include_role:
name: lib_openshift
- # TODO: remove temp_skip_router_registry_upgrade variable. This is a short term hack
- # to allow ops to use this control plane upgrade, without triggering router/registry
- # upgrade which has not yet been synced with their process.
- name: Collect all routers
oc_obj:
state: list
@@ -24,22 +21,18 @@
all_namespaces: True
selector: 'router'
register: all_routers
- when: temp_skip_router_registry_upgrade is not defined
- set_fact: haproxy_routers="{{ (all_routers.reults.results[0]['items'] | oo_pods_match_component(openshift_deployment_type, 'haproxy-router') | oo_select_keys_from_list(['metadata']) }}"
when:
- all_routers.results.returncode == 0
- - temp_skip_router_registry_upgrade is not defined
- set_fact: haproxy_routers=[]
when:
- all_routers.results.returncode != 0
- - temp_skip_router_registry_upgrade is not defined
- name: Update router image to current version
when:
- all_routers.results.returncode == 0
- - temp_skip_router_registry_upgrade is not defined
command: >
{{ oc_cmd }} patch dc/{{ item['labels']['deploymentconfig'] }} -n {{ item['namespace'] }} -p
'{"spec":{"template":{"spec":{"containers":[{"name":"router","image":"{{ router_image }}","livenessProbe":{"tcpSocket":null,"httpGet":{"path": "/healthz", "port": 1936, "host": "localhost", "scheme": "HTTP"},"initialDelaySeconds":10,"timeoutSeconds":1}}]}}}}'
@@ -55,12 +48,10 @@
kind: dc
name: docker-registry
register: _default_registry
- when: temp_skip_router_registry_upgrade is not defined
- name: Update registry image to current version
when:
- _default_registry.results.results[0] != {}
- - temp_skip_router_registry_upgrade is not defined
command: >
{{ oc_cmd }} patch dc/docker-registry -n default -p
'{"spec":{"template":{"spec":{"containers":[{"name":"registry","image":"{{ registry_image }}"}]}}}}'
@@ -96,6 +87,7 @@
register: grep_plugin_order_override
when: openshift.common.version_gte_3_3_or_1_3 | bool
changed_when: false
+ failed_when: false
- name: Warn if pluginOrderOverride is in use in master-config.yaml
debug:
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
index a4aefcdac..fd01a6625 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_control_plane.yml
@@ -257,7 +257,7 @@
- name: Drain Node for Kubelet upgrade
command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data
+ {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data --ignore-daemonsets
delegate_to: "{{ groups.oo_first_master.0 }}"
roles:
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
index e3a98fd9b..4e1838c71 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
@@ -26,7 +26,7 @@
- name: Drain Node for Kubelet upgrade
command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data
+ {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data --ignore-daemonsets
delegate_to: "{{ groups.oo_first_master.0 }}"
roles:
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins b/playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins
new file mode 120000
index 000000000..7de3c1dd7
--- /dev/null
+++ b/playbooks/common/openshift-cluster/upgrades/v3_5/filter_plugins
@@ -0,0 +1 @@
+../../../../../filter_plugins/ \ No newline at end of file
diff --git a/playbooks/common/openshift-cluster/upgrades/v3_5/roles b/playbooks/common/openshift-cluster/upgrades/v3_5/roles
new file mode 120000
index 000000000..415645be6
--- /dev/null
+++ b/playbooks/common/openshift-cluster/upgrades/v3_5/roles
@@ -0,0 +1 @@
+../../../../../roles/ \ No newline at end of file
diff --git a/playbooks/common/openshift-node/restart.yml b/playbooks/common/openshift-node/restart.yml
index 6e9b1cca3..5e1df951c 100644
--- a/playbooks/common/openshift-node/restart.yml
+++ b/playbooks/common/openshift-node/restart.yml
@@ -2,16 +2,24 @@
- name: Restart nodes
hosts: oo_nodes_to_config
serial: "{{ openshift_restart_nodes_serial | default(1) }}"
+
+ roles:
+ - lib_openshift
+
tasks:
- name: Restart docker
- service: name=docker state=restarted
+ service:
+ name: docker
+ state: restarted
- name: Update docker facts
openshift_facts:
role: docker
- name: Restart containerized services
- service: name={{ item }} state=started
+ service:
+ name: "{{ item }}"
+ state: started
with_items:
- etcd_container
- openvswitch
@@ -36,12 +44,14 @@
state: restarted
- name: Wait for node to be ready
- command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.client_binary }} get node {{ openshift.common.hostname | lower }} --no-headers
+ oc_obj:
+ state: list
+ kind: node
+ name: "{{ openshift.common.hostname | lower }}"
register: node_output
delegate_to: "{{ groups.oo_first_master.0 }}"
when: inventory_hostname in groups.oo_nodes_to_config
- until: "{{ node_output.stdout.split()[1].startswith('Ready')}}"
+ until: node_output.results.results[0].status.conditions | selectattr('type', 'equalto', 'Ready') | map(attribute='status') | join | bool == True
# Give the node two minutes to come back online.
retries: 24
delay: 5
diff --git a/requirements.txt b/requirements.txt
index 8f47033f8..5a6a161cb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,3 +2,4 @@ ansible>=2.2
six
pyOpenSSL
PyYAML
+ruamel.yaml
diff --git a/roles/lib_openshift/library/oadm_manage_node.py b/roles/lib_openshift/library/oadm_manage_node.py
index 6c0ff9b13..0ac233c72 100644
--- a/roles/lib_openshift/library/oadm_manage_node.py
+++ b/roles/lib_openshift/library/oadm_manage_node.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -330,11 +335,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -374,10 +383,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -542,12 +557,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -560,11 +582,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -929,7 +957,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1006,7 +1034,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1088,7 +1121,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_edit.py b/roles/lib_openshift/library/oc_edit.py
index a565b32f2..1b1faf496 100644
--- a/roles/lib_openshift/library/oc_edit.py
+++ b/roles/lib_openshift/library/oc_edit.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -358,11 +363,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -402,10 +411,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -570,12 +585,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -588,11 +610,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -957,7 +985,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1034,7 +1062,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1116,7 +1149,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_env.py b/roles/lib_openshift/library/oc_env.py
index e00f5cdcc..7c2ccb98f 100644
--- a/roles/lib_openshift/library/oc_env.py
+++ b/roles/lib_openshift/library/oc_env.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -325,11 +330,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -369,10 +378,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -537,12 +552,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -555,11 +577,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -924,7 +952,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1001,7 +1029,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1083,7 +1116,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_label.py b/roles/lib_openshift/library/oc_label.py
index e168614bd..0db8585a4 100644
--- a/roles/lib_openshift/library/oc_label.py
+++ b/roles/lib_openshift/library/oc_label.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -334,11 +339,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -378,10 +387,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -546,12 +561,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -564,11 +586,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -933,7 +961,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1010,7 +1038,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1092,7 +1125,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_obj.py b/roles/lib_openshift/library/oc_obj.py
index d73d05472..6d0b391b9 100644
--- a/roles/lib_openshift/library/oc_obj.py
+++ b/roles/lib_openshift/library/oc_obj.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -337,11 +342,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -381,10 +390,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -549,12 +564,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -567,11 +589,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -936,7 +964,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1013,7 +1041,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1095,7 +1128,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_process.py b/roles/lib_openshift/library/oc_process.py
index bcb4d2289..a1ee79f6e 100644
--- a/roles/lib_openshift/library/oc_process.py
+++ b/roles/lib_openshift/library/oc_process.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -326,11 +331,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -370,10 +379,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -538,12 +553,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -556,11 +578,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -925,7 +953,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1002,7 +1030,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1084,7 +1117,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_route.py b/roles/lib_openshift/library/oc_route.py
index d5dc84116..4b5c4460c 100644
--- a/roles/lib_openshift/library/oc_route.py
+++ b/roles/lib_openshift/library/oc_route.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -368,11 +373,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -412,10 +421,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -580,12 +595,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -598,11 +620,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -967,7 +995,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1044,7 +1072,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1126,7 +1159,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
@@ -1511,7 +1548,7 @@ class OCRoute(OpenShiftCLI):
def needs_update(self):
''' verify an update is needed '''
skip = []
- return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=True)
+ return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=self.verbose)
@staticmethod
def get_cert_data(path, content):
diff --git a/roles/lib_openshift/library/oc_scale.py b/roles/lib_openshift/library/oc_scale.py
index be3b7f837..a37b2aba0 100644
--- a/roles/lib_openshift/library/oc_scale.py
+++ b/roles/lib_openshift/library/oc_scale.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -312,11 +317,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -356,10 +365,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -524,12 +539,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -542,11 +564,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -911,7 +939,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -988,7 +1016,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1070,7 +1103,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_secret.py b/roles/lib_openshift/library/oc_secret.py
index 8598cb0ec..c423e9442 100644
--- a/roles/lib_openshift/library/oc_secret.py
+++ b/roles/lib_openshift/library/oc_secret.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -358,11 +363,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -402,10 +411,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -570,12 +585,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -588,11 +610,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -957,7 +985,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1034,7 +1062,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1116,7 +1149,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
@@ -1392,7 +1429,7 @@ class Secret(Yedit):
def update_secret(self, key, value):
''' update a secret'''
# pylint: disable=no-member
- if self.secrets.has_key(key):
+ if key in self.secrets:
self.secrets[key] = value
else:
self.add_secret(key, value)
@@ -1430,7 +1467,7 @@ class OCSecret(OpenShiftCLI):
if results['returncode'] == 0 and results['results'][0]:
results['exists'] = True
if self.decode:
- if results['results'][0].has_key('data'):
+ if 'data' in results['results'][0]:
for sname, value in results['results'][0]['data'].items():
results['decoded'][sname] = base64.b64decode(value)
diff --git a/roles/lib_openshift/library/oc_service.py b/roles/lib_openshift/library/oc_service.py
index a9baef765..319ec4bd7 100644
--- a/roles/lib_openshift/library/oc_service.py
+++ b/roles/lib_openshift/library/oc_service.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -364,11 +369,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -408,10 +417,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -576,12 +591,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -594,11 +616,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -963,7 +991,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -1040,7 +1068,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1122,7 +1155,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_serviceaccount.py b/roles/lib_openshift/library/oc_serviceaccount.py
index fcc5bbfa7..0d1705414 100644
--- a/roles/lib_openshift/library/oc_serviceaccount.py
+++ b/roles/lib_openshift/library/oc_serviceaccount.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -310,11 +315,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -354,10 +363,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -522,12 +537,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -540,11 +562,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -909,7 +937,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -986,7 +1014,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1068,7 +1101,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_serviceaccount_secret.py b/roles/lib_openshift/library/oc_serviceaccount_secret.py
index ef10162c2..5f07528a0 100644
--- a/roles/lib_openshift/library/oc_serviceaccount_secret.py
+++ b/roles/lib_openshift/library/oc_serviceaccount_secret.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -310,11 +315,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -354,10 +363,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -522,12 +537,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -540,11 +562,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -909,7 +937,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -986,7 +1014,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1068,7 +1101,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/library/oc_version.py b/roles/lib_openshift/library/oc_version.py
index e44375ffa..9b660e1d3 100644
--- a/roles/lib_openshift/library/oc_version.py
+++ b/roles/lib_openshift/library/oc_version.py
@@ -33,6 +33,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -40,7 +41,11 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -282,11 +287,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -326,10 +335,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -494,12 +509,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -512,11 +534,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
@@ -881,7 +909,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -958,7 +986,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -1040,7 +1073,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/src/class/oc_route.py b/roles/lib_openshift/src/class/oc_route.py
index cb743e19d..448457292 100644
--- a/roles/lib_openshift/src/class/oc_route.py
+++ b/roles/lib_openshift/src/class/oc_route.py
@@ -61,7 +61,7 @@ class OCRoute(OpenShiftCLI):
def needs_update(self):
''' verify an update is needed '''
skip = []
- return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=True)
+ return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=self.verbose)
@staticmethod
def get_cert_data(path, content):
diff --git a/roles/lib_openshift/src/class/oc_secret.py b/roles/lib_openshift/src/class/oc_secret.py
index 5eac27572..deb36a9fa 100644
--- a/roles/lib_openshift/src/class/oc_secret.py
+++ b/roles/lib_openshift/src/class/oc_secret.py
@@ -29,7 +29,7 @@ class OCSecret(OpenShiftCLI):
if results['returncode'] == 0 and results['results'][0]:
results['exists'] = True
if self.decode:
- if results['results'][0].has_key('data'):
+ if 'data' in results['results'][0]:
for sname, value in results['results'][0]['data'].items():
results['decoded'][sname] = base64.b64decode(value)
diff --git a/roles/lib_openshift/src/lib/base.py b/roles/lib_openshift/src/lib/base.py
index a895b40b3..9cad5e667 100644
--- a/roles/lib_openshift/src/lib/base.py
+++ b/roles/lib_openshift/src/lib/base.py
@@ -218,7 +218,7 @@ class OpenShiftCLI(object):
stdout, stderr = proc.communicate(input_data)
- return proc.returncode, stdout, stderr
+ return proc.returncode, stdout.decode(), stderr.decode()
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
@@ -295,7 +295,12 @@ class Utils(object):
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
- Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripDumper'):
+ Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
+ else:
+ Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
+
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
@@ -377,7 +382,11 @@ class Utils(object):
contents = sfd.read()
if sfile_type == 'yaml':
- contents = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ contents = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
diff --git a/roles/lib_openshift/src/lib/import.py b/roles/lib_openshift/src/lib/import.py
index 6344c1a54..a79297898 100644
--- a/roles/lib_openshift/src/lib/import.py
+++ b/roles/lib_openshift/src/lib/import.py
@@ -7,6 +7,7 @@
from __future__ import print_function
import atexit
+import copy
import json
import os
import re
@@ -14,5 +15,9 @@ import shutil
import subprocess
import tempfile
# pylint: disable=import-error
-import ruamel.yaml as yaml
+try:
+ import ruamel.yaml as yaml
+except ImportError:
+ import yaml
+
from ansible.module_utils.basic import AnsibleModule
diff --git a/roles/lib_openshift/src/lib/secret.py b/roles/lib_openshift/src/lib/secret.py
index 39bf3c33a..1ba78ddd5 100644
--- a/roles/lib_openshift/src/lib/secret.py
+++ b/roles/lib_openshift/src/lib/secret.py
@@ -91,7 +91,7 @@ class Secret(Yedit):
def update_secret(self, key, value):
''' update a secret'''
# pylint: disable=no-member
- if self.secrets.has_key(key):
+ if key in self.secrets:
self.secrets[key] = value
else:
self.add_secret(key, value)
diff --git a/roles/lib_openshift/src/test/unit/oadm_manage_node.py b/roles/lib_openshift/src/test/unit/test_oadm_manage_node.py
index b0786dfac..b0786dfac 100755..100644
--- a/roles/lib_openshift/src/test/unit/oadm_manage_node.py
+++ b/roles/lib_openshift/src/test/unit/test_oadm_manage_node.py
diff --git a/roles/lib_openshift/src/test/unit/oc_env.py b/roles/lib_openshift/src/test/unit/test_oc_env.py
index 15bd7e464..15bd7e464 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_env.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_env.py
diff --git a/roles/lib_openshift/src/test/unit/oc_label.py b/roles/lib_openshift/src/test/unit/test_oc_label.py
index 3176987b0..3176987b0 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_label.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_label.py
diff --git a/roles/lib_openshift/src/test/unit/oc_process.py b/roles/lib_openshift/src/test/unit/test_oc_process.py
index 450ff7071..450ff7071 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_process.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_process.py
diff --git a/roles/lib_openshift/src/test/unit/oc_route.py b/roles/lib_openshift/src/test/unit/test_oc_route.py
index fcfa88cbf..361b61f4b 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_route.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_route.py
@@ -35,8 +35,9 @@ class OCRouteTest(unittest.TestCase):
''' setup method will create a file and set to known configuration '''
pass
+ @mock.patch('oc_route.Utils.create_tmpfile_copy')
@mock.patch('oc_route.OCRoute._run')
- def test_list_route(self, mock_cmd):
+ def test_list_route(self, mock_cmd, mock_tmpfile_copy):
''' Testing getting a route '''
# Arrange
@@ -114,6 +115,10 @@ class OCRouteTest(unittest.TestCase):
(0, route_result, ''),
]
+ mock_tmpfile_copy.side_effect = [
+ '/tmp/mock.kubeconfig',
+ ]
+
# Act
results = OCRoute.run_ansible(params, False)
@@ -127,11 +132,11 @@ class OCRouteTest(unittest.TestCase):
mock.call(['oc', '-n', 'default', 'get', 'route', 'test', '-o', 'json'], None),
])
+ @mock.patch('oc_route.Utils.create_tmpfile_copy')
@mock.patch('oc_route.Yedit._write')
@mock.patch('oc_route.OCRoute._run')
- def test_create_route(self, mock_cmd, mock_write):
+ def test_create_route(self, mock_cmd, mock_write, mock_tmpfile_copy):
''' Testing getting a route '''
-
# Arrange
# run_ansible input parameters
@@ -230,6 +235,10 @@ metadata:
(0, route_result, ''),
]
+ mock_tmpfile_copy.side_effect = [
+ '/tmp/mock.kubeconfig',
+ ]
+
mock_write.assert_has_calls = [
# First call to mock
mock.call('/tmp/test', test_route)
@@ -246,7 +255,8 @@ metadata:
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', '-n', 'default', 'get', 'route', 'test', '-o', 'json'], None),
- mock.call(['oc', '-n', 'default', 'create', '-f', '/tmp/test'], None),
+ mock.call(['oc', '-n', 'default', 'create', '-f', mock.ANY], None),
+ mock.call(['oc', '-n', 'default', 'get', 'route', 'test', '-o', 'json'], None),
])
def tearDown(self):
diff --git a/roles/lib_openshift/src/test/unit/oc_scale.py b/roles/lib_openshift/src/test/unit/test_oc_scale.py
index f15eb164d..f15eb164d 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_scale.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_scale.py
diff --git a/roles/lib_openshift/src/test/unit/oc_secret.py b/roles/lib_openshift/src/test/unit/test_oc_secret.py
index 645aac82b..645aac82b 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_secret.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_secret.py
diff --git a/roles/lib_openshift/src/test/unit/oc_service.py b/roles/lib_openshift/src/test/unit/test_oc_service.py
index 4a845e9f3..4a845e9f3 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_service.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_service.py
diff --git a/roles/lib_openshift/src/test/unit/oc_serviceaccount.py b/roles/lib_openshift/src/test/unit/test_oc_serviceaccount.py
index 256b569eb..256b569eb 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_serviceaccount.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_serviceaccount.py
diff --git a/roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py b/roles/lib_openshift/src/test/unit/test_oc_serviceaccount_secret.py
index 4d555d412..213c581aa 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_serviceaccount_secret.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_serviceaccount_secret.py
@@ -25,6 +25,12 @@ module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]),
sys.path.insert(0, module_path)
from oc_serviceaccount_secret import OCServiceAccountSecret # noqa: E402
+try:
+ import ruamel.yaml as yaml # noqa: EF401
+ YAML_TYPE = 'ruamel'
+except ImportError:
+ YAML_TYPE = 'pyyaml'
+
class OCServiceAccountSecretTest(unittest.TestCase):
'''
@@ -54,8 +60,13 @@ class OCServiceAccountSecretTest(unittest.TestCase):
}
oc_get_sa_before = '''{
- "kind": "ServiceAccount",
"apiVersion": "v1",
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ],
+ "kind": "ServiceAccount",
"metadata": {
"name": "builder",
"namespace": "default",
@@ -72,18 +83,18 @@ class OCServiceAccountSecretTest(unittest.TestCase):
"name": "builder-token-akqxi"
}
- ],
- "imagePullSecrets": [
- {
- "name": "builder-dockercfg-rsrua"
- }
]
}
'''
oc_get_sa_after = '''{
- "kind": "ServiceAccount",
"apiVersion": "v1",
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ],
+ "kind": "ServiceAccount",
"metadata": {
"name": "builder",
"namespace": "default",
@@ -103,16 +114,10 @@ class OCServiceAccountSecretTest(unittest.TestCase):
"name": "newsecret"
}
- ],
- "imagePullSecrets": [
- {
- "name": "builder-dockercfg-rsrua"
- }
]
}
'''
-
- builder_yaml_file = '''\
+ builder_ryaml_file = '''\
secrets:
- name: builder-dockercfg-rsrua
- name: builder-token-akqxi
@@ -130,6 +135,24 @@ metadata:
uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
'''
+ builder_pyyaml_file = '''\
+apiVersion: v1
+imagePullSecrets:
+- name: builder-dockercfg-rsrua
+kind: ServiceAccount
+metadata:
+ creationTimestamp: '2017-02-05T17:02:00Z'
+ name: builder
+ namespace: default
+ resourceVersion: '302879'
+ selfLink: /api/v1/namespaces/default/serviceaccounts/builder
+ uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
+secrets:
+- name: builder-dockercfg-rsrua
+- name: builder-token-akqxi
+- name: newsecret
+'''
+
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
(0, oc_get_sa_before, ''), # First call to the mock
@@ -158,8 +181,12 @@ metadata:
mock.call(['oc', '-n', 'default', 'get', 'sa', 'builder', '-o', 'json'], None)
])
+ yaml_file = builder_pyyaml_file
+
+ if YAML_TYPE == 'ruamel':
+ yaml_file = builder_ryaml_file
mock_write.assert_has_calls([
- mock.call(mock.ANY, builder_yaml_file)
+ mock.call(mock.ANY, yaml_file)
])
@mock.patch('oc_serviceaccount_secret.Utils.create_tmpfile_copy')
@@ -181,8 +208,13 @@ metadata:
}
oc_get_sa_before = '''{
- "kind": "ServiceAccount",
"apiVersion": "v1",
+ "imagePullSecrets": [
+ {
+ "name": "builder-dockercfg-rsrua"
+ }
+ ],
+ "kind": "ServiceAccount",
"metadata": {
"name": "builder",
"namespace": "default",
@@ -202,16 +234,11 @@ metadata:
"name": "newsecret"
}
- ],
- "imagePullSecrets": [
- {
- "name": "builder-dockercfg-rsrua"
- }
]
}
'''
- builder_yaml_file = '''\
+ builder_ryaml_file = '''\
secrets:
- name: builder-dockercfg-rsrua
- name: builder-token-akqxi
@@ -228,6 +255,23 @@ metadata:
uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
'''
+ builder_pyyaml_file = '''\
+apiVersion: v1
+imagePullSecrets:
+- name: builder-dockercfg-rsrua
+kind: ServiceAccount
+metadata:
+ creationTimestamp: '2017-02-05T17:02:00Z'
+ name: builder
+ namespace: default
+ resourceVersion: '302879'
+ selfLink: /api/v1/namespaces/default/serviceaccounts/builder
+ uid: cf47bca7-ebc4-11e6-b041-0ed9df7abc38
+secrets:
+- name: builder-dockercfg-rsrua
+- name: builder-token-akqxi
+'''
+
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
(0, oc_get_sa_before, ''), # First call to the mock
@@ -254,8 +298,12 @@ metadata:
mock.call(['oc', '-n', 'default', 'replace', '-f', mock.ANY], None),
])
+ yaml_file = builder_pyyaml_file
+
+ if YAML_TYPE == 'ruamel':
+ yaml_file = builder_ryaml_file
mock_write.assert_has_calls([
- mock.call(mock.ANY, builder_yaml_file)
+ mock.call(mock.ANY, yaml_file)
])
def tearDown(self):
diff --git a/roles/lib_openshift/src/test/unit/oc_version.py b/roles/lib_openshift/src/test/unit/test_oc_version.py
index 67dea415b..67dea415b 100755..100644
--- a/roles/lib_openshift/src/test/unit/oc_version.py
+++ b/roles/lib_openshift/src/test/unit/test_oc_version.py
diff --git a/roles/lib_openshift/tasks/main.yml b/roles/lib_openshift/tasks/main.yml
index 2980c8a8d..77366c65e 100644
--- a/roles/lib_openshift/tasks/main.yml
+++ b/roles/lib_openshift/tasks/main.yml
@@ -1,5 +1,11 @@
---
+- name: lib_openshift detect ostree
+ stat:
+ path: /run/ostree-booted
+ register: ostree_booted
+
- name: lib_openshift ensure python-ruamel-yaml package is on target
package:
name: python-ruamel-yaml
state: present
+ when: not ostree_booted.stat.exists
diff --git a/roles/lib_utils/library/repoquery.py b/roles/lib_utils/library/repoquery.py
index 7f0105290..f31c8911b 100644
--- a/roles/lib_utils/library/repoquery.py
+++ b/roles/lib_utils/library/repoquery.py
@@ -29,13 +29,18 @@
# pylint: disable=wrong-import-order,wrong-import-position,unused-import
from __future__ import print_function # noqa: F401
+import copy # noqa: F401
import json # noqa: F401
import os # noqa: F401
import re # noqa: F401
-# pylint: disable=import-error
-import ruamel.yaml as yaml # noqa: F401
import shutil # noqa: F401
+# pylint: disable=import-error
+try:
+ import ruamel.yaml as yaml # noqa: F401
+except ImportError:
+ import yaml # noqa: F401
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -472,7 +477,7 @@ class Repoquery(RepoqueryCLI):
''' Gather and present the versions of each package '''
versions_dict = {}
- versions_dict['available_versions_full'] = formatted_versions.keys()
+ versions_dict['available_versions_full'] = list(formatted_versions.keys())
# set the match version, if called
if self.match_version:
diff --git a/roles/lib_utils/library/yedit.py b/roles/lib_utils/library/yedit.py
index 1c74b4d3f..a358e980e 100644
--- a/roles/lib_utils/library/yedit.py
+++ b/roles/lib_utils/library/yedit.py
@@ -29,13 +29,18 @@
# pylint: disable=wrong-import-order,wrong-import-position,unused-import
from __future__ import print_function # noqa: F401
+import copy # noqa: F401
import json # noqa: F401
import os # noqa: F401
import re # noqa: F401
-# pylint: disable=import-error
-import ruamel.yaml as yaml # noqa: F401
import shutil # noqa: F401
+# pylint: disable=import-error
+try:
+ import ruamel.yaml as yaml # noqa: F401
+except ImportError:
+ import yaml # noqa: F401
+
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
@@ -375,11 +380,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -419,10 +428,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -587,12 +602,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -605,11 +627,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
diff --git a/roles/lib_utils/src/class/repoquery.py b/roles/lib_utils/src/class/repoquery.py
index 2447719e2..82adcada5 100644
--- a/roles/lib_utils/src/class/repoquery.py
+++ b/roles/lib_utils/src/class/repoquery.py
@@ -60,7 +60,7 @@ class Repoquery(RepoqueryCLI):
''' Gather and present the versions of each package '''
versions_dict = {}
- versions_dict['available_versions_full'] = formatted_versions.keys()
+ versions_dict['available_versions_full'] = list(formatted_versions.keys())
# set the match version, if called
if self.match_version:
diff --git a/roles/lib_utils/src/class/yedit.py b/roles/lib_utils/src/class/yedit.py
index 8542fe5c7..5275e84eb 100644
--- a/roles/lib_utils/src/class/yedit.py
+++ b/roles/lib_utils/src/class/yedit.py
@@ -194,11 +194,15 @@ class Yedit(object):
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- self.yaml_dict.fa.set_block_style()
+ if hasattr(yaml, 'RoundTripDumper'):
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ self.yaml_dict.fa.set_block_style()
- Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ # pylint: disable=no-member
+ Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
+ else:
+ Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
@@ -238,10 +242,16 @@ class Yedit(object):
# check if it is yaml
try:
if content_type == 'yaml' and contents:
- self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ # pylint: disable=no-member
+ if hasattr(yaml, 'RoundTripLoader'):
+ self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
+ else:
+ self.yaml_dict = yaml.safe_load(contents)
+
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
+
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
@@ -406,12 +416,19 @@ class Yedit(object):
return (False, self.yaml_dict)
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
- default_flow_style=False),
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
+ default_flow_style=False),
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
@@ -424,11 +441,17 @@ class Yedit(object):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
- tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
- yaml.RoundTripLoader)
- # pylint: disable=no-member
- if hasattr(self.yaml_dict, 'fa'):
- tmp_copy.fa.set_block_style()
+ if hasattr(yaml, 'round_trip_dump'):
+ # pylint: disable=no-member
+ tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), # noqa: E501
+ yaml.RoundTripLoader)
+
+ # pylint: disable=no-member
+ if hasattr(self.yaml_dict, 'fa'):
+ tmp_copy.fa.set_block_style()
+ else:
+ tmp_copy = copy.deepcopy(self.yaml_dict)
+
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
diff --git a/roles/lib_utils/src/lib/import.py b/roles/lib_utils/src/lib/import.py
index d892353a1..ed57a68c9 100644
--- a/roles/lib_utils/src/lib/import.py
+++ b/roles/lib_utils/src/lib/import.py
@@ -4,11 +4,16 @@
# pylint: disable=wrong-import-order,wrong-import-position,unused-import
from __future__ import print_function # noqa: F401
+import copy # noqa: F401
import json # noqa: F401
import os # noqa: F401
import re # noqa: F401
-# pylint: disable=import-error
-import ruamel.yaml as yaml # noqa: F401
import shutil # noqa: F401
+# pylint: disable=import-error
+try:
+ import ruamel.yaml as yaml # noqa: F401
+except ImportError:
+ import yaml # noqa: F401
+
from ansible.module_utils.basic import AnsibleModule
diff --git a/roles/lib_utils/src/test/unit/repoquery.py b/roles/lib_utils/src/test/unit/test_repoquery.py
index c487ab254..c487ab254 100755..100644
--- a/roles/lib_utils/src/test/unit/repoquery.py
+++ b/roles/lib_utils/src/test/unit/test_repoquery.py
diff --git a/roles/lib_utils/src/test/unit/yedit_test.py b/roles/lib_utils/src/test/unit/test_yedit.py
index 2793c5c1a..ed07ac96e 100755..100644
--- a/roles/lib_utils/src/test/unit/yedit_test.py
+++ b/roles/lib_utils/src/test/unit/test_yedit.py
@@ -256,7 +256,7 @@ class YeditTest(unittest.TestCase):
def test_pop_list_item_2(self):
'''test dict value with none value'''
- z = range(10)
+ z = list(range(10))
yed = Yedit(content=z, separator=':')
yed.pop('', 5)
z.pop(5)
diff --git a/roles/lib_utils/tasks/main.yml b/roles/lib_utils/tasks/main.yml
index 8a350da88..32ab9e0c6 100644
--- a/roles/lib_utils/tasks/main.yml
+++ b/roles/lib_utils/tasks/main.yml
@@ -1,5 +1,11 @@
---
+- name: lib_utils detect ostree
+ stat:
+ path: /run/ostree-booted
+ register: ostree_booted
+
- name: lib_utils ensure python-ruamel-yaml package is on target
package:
name: python-ruamel-yaml
state: present
+ when: not ostree_booted.stat.exists
diff --git a/roles/nuage_node/tasks/iptables.yml b/roles/nuage_node/tasks/iptables.yml
index 8e2c29620..847c8395d 100644
--- a/roles/nuage_node/tasks/iptables.yml
+++ b/roles/nuage_node/tasks/iptables.yml
@@ -2,7 +2,7 @@
- name: IPtables | Get iptables rules
command: iptables -L --wait
register: iptablesrules
- always_run: yes
+ check_mode: no
- name: Allow traffic from overlay to underlay
command: /sbin/iptables --wait -I FORWARD 1 -s {{ hostvars[groups.oo_first_master.0].openshift.master.sdn_cluster_network_cidr }} -j ACCEPT -m comment --comment "nuage-overlay-underlay"
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-ephemeral-template.json
index 8b8fcb58b..c38d2680b 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-ephemeral-template.json
@@ -196,7 +196,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-persistent-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-persistent-template.json
index 72d3a8556..e8853d8ff 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/mongodb-persistent-template.json
@@ -213,7 +213,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/mysql-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/mysql-ephemeral-template.json
index 34dd2ed78..f7bcfe2ed 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/mysql-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/mysql-ephemeral-template.json
@@ -196,7 +196,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-ephemeral-template.json
index 1025ab056..64d5e2b32 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-ephemeral-template.json
@@ -186,7 +186,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-persistent-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-persistent-template.json
index 1968e727a..6c101f9d2 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/postgresql-persistent-template.json
@@ -203,7 +203,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/redis-ephemeral-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/redis-ephemeral-template.json
index c9ae8a539..b97e1fd29 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/redis-ephemeral-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/redis-ephemeral-template.json
@@ -157,7 +157,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/db-templates/redis-persistent-template.json b/roles/openshift_examples/files/examples/v1.5/db-templates/redis-persistent-template.json
index e9db9ec9d..dc167da41 100644
--- a/roles/openshift_examples/files/examples/v1.5/db-templates/redis-persistent-template.json
+++ b/roles/openshift_examples/files/examples/v1.5/db-templates/redis-persistent-template.json
@@ -174,7 +174,8 @@
"name": "MEMORY_LIMIT",
"displayName": "Memory Limit",
"description": "Maximum amount of memory the container can use.",
- "value": "512Mi"
+ "value": "512Mi",
+ "required": true
},
{
"name": "NAMESPACE",
diff --git a/roles/openshift_examples/files/examples/v1.5/image-streams/image-streams-rhel7.json b/roles/openshift_examples/files/examples/v1.5/image-streams/image-streams-rhel7.json
index 9b19b8bd0..eb94c3bb4 100644
--- a/roles/openshift_examples/files/examples/v1.5/image-streams/image-streams-rhel7.json
+++ b/roles/openshift_examples/files/examples/v1.5/image-streams/image-streams-rhel7.json
@@ -241,7 +241,7 @@
},
"from": {
"kind": "ImageStreamTag",
- "name": "5.6"
+ "name": "7.0"
}
},
{
diff --git a/roles/openshift_examples/files/examples/v1.5/xpaas-streams/fis-image-streams.json b/roles/openshift_examples/files/examples/v1.5/xpaas-streams/fis-image-streams.json
index ed0e94bed..9d99973be 100644
--- a/roles/openshift_examples/files/examples/v1.5/xpaas-streams/fis-image-streams.json
+++ b/roles/openshift_examples/files/examples/v1.5/xpaas-streams/fis-image-streams.json
@@ -20,12 +20,22 @@
{
"name": "1.0",
"annotations": {
- "description": "JBoss Fuse Integration Services 6.2.1 Java S2I images.",
+ "description": "JBoss Fuse Integration Services 1.0 Java S2I images.",
"iconClass": "icon-jboss",
"tags": "builder,jboss-fuse,java,xpaas",
"supports":"jboss-fuse:6.2.1,java:8,xpaas:1.2",
"version": "1.0"
}
+ },
+ {
+ "name": "2.0",
+ "annotations": {
+ "description": "JBoss Fuse Integration Services 2.0 Java S2I images.",
+ "iconClass": "icon-jboss",
+ "tags": "builder,jboss-fuse,java,xpaas",
+ "supports":"jboss-fuse:6.3.0,java:8,xpaas:1.2",
+ "version": "2.0"
+ }
}
]
}
@@ -42,12 +52,22 @@
{
"name": "1.0",
"annotations": {
- "description": "JBoss Fuse Integration Services 6.2.1 Karaf S2I images.",
+ "description": "JBoss Fuse Integration Services 1.0 Karaf S2I images.",
"iconClass": "icon-jboss",
"tags": "builder,jboss-fuse,java,karaf,xpaas",
"supports":"jboss-fuse:6.2.1,java:8,xpaas:1.2",
"version": "1.0"
}
+ },
+ {
+ "name": "2.0",
+ "annotations": {
+ "description": "JBoss Fuse Integration Services 2.0 Karaf S2I images.",
+ "iconClass": "icon-jboss",
+ "tags": "builder,jboss-fuse,java,karaf,xpaas",
+ "supports":"jboss-fuse:6.3.0,java:8,xpaas:1.2",
+ "version": "2.0"
+ }
}
]
}
diff --git a/roles/openshift_facts/library/openshift_facts.py b/roles/openshift_facts/library/openshift_facts.py
index ef7f159c5..2503d6212 100755
--- a/roles/openshift_facts/library/openshift_facts.py
+++ b/roles/openshift_facts/library/openshift_facts.py
@@ -864,7 +864,7 @@ def set_version_facts_if_unset(facts):
if 'common' in facts:
deployment_type = facts['common']['deployment_type']
openshift_version = get_openshift_version(facts)
- if openshift_version:
+ if openshift_version and openshift_version != "latest":
version = LooseVersion(openshift_version)
facts['common']['version'] = openshift_version
facts['common']['short_version'] = '.'.join([str(x) for x in version.version[0:2]])
diff --git a/roles/openshift_facts/tasks/main.yml b/roles/openshift_facts/tasks/main.yml
index 9a1982076..0ec294bbc 100644
--- a/roles/openshift_facts/tasks/main.yml
+++ b/roles/openshift_facts/tasks/main.yml
@@ -14,6 +14,18 @@
l_is_master_system_container: "{{ (use_master_system_container | default(use_system_containers) | bool) }}"
l_is_etcd_system_container: "{{ (use_etcd_system_container | default(use_system_containers) | bool) }}"
+- name: Validate python version
+ fail:
+ msg: |
+ openshift-ansible requires Python 3 for {{ ansible_distribution }};
+ For information on enabling Python 3 with Ansible, see https://docs.ansible.com/ansible/python_3_support.html
+ when: ansible_distribution == 'Fedora' and ansible_python['version']['major'] != 3
+
+- name: Validate python version
+ fail:
+ msg: "openshift-ansible requires Python 2 for {{ ansible_distribution }}"
+ when: ansible_distribution != 'Fedora' and ansible_python['version']['major'] != 2
+
- name: Ensure various deps are installed
package: name={{ item }} state=present
with_items: "{{ required_packages }}"
diff --git a/roles/openshift_hosted/meta/main.yml b/roles/openshift_hosted/meta/main.yml
index ced71bb41..afea0ac59 100644
--- a/roles/openshift_hosted/meta/main.yml
+++ b/roles/openshift_hosted/meta/main.yml
@@ -4,7 +4,7 @@ galaxy_info:
description: OpenShift Embedded Router
company: Red Hat, Inc.
license: Apache License, Version 2.0
- min_ansible_version: 1.9
+ min_ansible_version: 2.1
platforms:
- name: EL
versions:
diff --git a/roles/openshift_hosted/tasks/registry/storage/s3.yml b/roles/openshift_hosted/tasks/registry/storage/s3.yml
index f73d9f0ae..7d51594bd 100644
--- a/roles/openshift_hosted/tasks/registry/storage/s3.yml
+++ b/roles/openshift_hosted/tasks/registry/storage/s3.yml
@@ -21,13 +21,27 @@
openshift_hosted_registry_storage_s3_cloudfront_keypairid and
openshift_hosted_registry_storage_s3_cloudfront_privatekeyfile are required
-# Copy the cloudfront.pem to the host if the baseurl is given
-- name: Copy cloudfront.pem to the registry
- copy:
- src: "{{ openshift_hosted_registry_storage_s3_cloudfront_privatekeyfile }}"
- dest: /etc/s3-cloudfront/cloudfront.pem
- backup: true
- owner: root
- group: root
- mode: 0600
+
+# Inject the cloudfront private key as a secret when required
+- block:
+
+ - name: Create registry secret for cloudfront
+ oc_secret:
+ state: present
+ namespace: "{{ openshift.hosted.registry.namespace | default('default') }}"
+ name: docker-registry-s3-cloudfront
+ contents:
+ path: cloudfront.pem
+ data: "{{ lookup('file', openshift_hosted_registry_storage_s3_cloudfront_privatekeyfile) }}"
+
+ - name: Add cloudfront secret to the registry deployment config
+ command: >
+ oc volume dc/docker-registry --add --name=cloudfront-vol
+ --namespace="{{ openshift.hosted.registry.namespace | default('default') }}"
+ -m /etc/origin --type=secret --secret-name=docker-registry-s3-cloudfront
+ register: cloudfront_vol_attach
+ failed_when:
+ - "'already exists' not in cloudfront_vol_attach.stderr"
+ - "cloudfront_vol_attach.rc != 0"
+
when: openshift_hosted_registry_storage_s3_cloudfront_baseurl | default(none) is not none
diff --git a/roles/openshift_hosted/tasks/router/router.yml b/roles/openshift_hosted/tasks/router/router.yml
index b944fa522..3d5713d6b 100644
--- a/roles/openshift_hosted/tasks/router/router.yml
+++ b/roles/openshift_hosted/tasks/router/router.yml
@@ -78,3 +78,27 @@
changed_when: "'service exists' not in openshift_hosted_router_results.stdout"
failed_when: "openshift_hosted_router_results.rc != 0 and 'service exists' not in openshift_hosted_router_results.stdout and 'deployment_config' not in openshift_hosted_router_results.stderr and 'service' not in openshift_hosted_router_results.stderr"
when: replicas | int > 0
+
+- command: >
+ {{ openshift.common.client_binary }}
+ {% if openshift.hosted.router.name | default(none) is not none -%}
+ get dc/{{ openshift.hosted.router.name }}
+ {% else %}
+ get dc/router
+ {% endif%}
+ --template=\\{\\{.spec.replicas\\}\\}
+ --namespace={{ openshift.hosted.router.namespace | default('default') }}
+ register: current_replicas
+ when: replicas | int > 0
+
+- name: Ensure router replica count matches desired
+ command: >
+ {{ openshift.common.client_binary }}
+ scale --replicas={{ replicas }}
+ {% if openshift.hosted.router.name | default(none) is not none -%}
+ dc/{{ openshift.hosted.router.name }}
+ {% else %}
+ dc/router
+ {% endif%}
+ --namespace={{ openshift.hosted.router.namespace | default('default') }}
+ when: replicas | int > 0 and replicas | int != current_replicas.stdout | int
diff --git a/roles/openshift_hosted/templates/registry_config.j2 b/roles/openshift_hosted/templates/registry_config.j2
index 557fd03af..f3336334a 100644
--- a/roles/openshift_hosted/templates/registry_config.j2
+++ b/roles/openshift_hosted/templates/registry_config.j2
@@ -78,7 +78,7 @@ middleware:
- name: cloudfront
options:
baseurl: {{ openshift_hosted_registry_storage_s3_cloudfront_baseurl }}
- privatekey: {{ openshift_hosted_registry_storage_s3_cloudfront_privatekeyfile }}
+ privatekey: /etc/origin/cloudfront.pem
keypairid: {{ openshift_hosted_registry_storage_s3_cloudfront_keypairid }}
{% elif openshift.common.version_gte_3_3_or_1_3 | bool %}
storage:
diff --git a/roles/openshift_hosted_logging/handlers/main.yml b/roles/openshift_hosted_logging/handlers/main.yml
index ad79e62ae..ffb812271 100644
--- a/roles/openshift_hosted_logging/handlers/main.yml
+++ b/roles/openshift_hosted_logging/handlers/main.yml
@@ -3,3 +3,24 @@
systemd: name={{ openshift.common.service_type }}-master state=restarted
when: (openshift.master.ha is not defined or not openshift.master.ha | bool) and (not (master_service_status_changed | default(false) | bool))
notify: Verify API Server
+
+- name: Verify API Server
+ # Using curl here since the uri module requires python-httplib2 and
+ # wait_for port doesn't provide health information.
+ command: >
+ curl --silent --tlsv1.2
+ {% if openshift.common.version_gte_3_2_or_1_2 | bool %}
+ --cacert {{ openshift.common.config_base }}/master/ca-bundle.crt
+ {% else %}
+ --cacert {{ openshift.common.config_base }}/master/ca.crt
+ {% endif %}
+ {{ openshift.master.api_url }}/healthz/ready
+ args:
+ # Disables the following warning:
+ # Consider using get_url or uri module rather than running curl
+ warn: no
+ register: api_available_output
+ until: api_available_output.stdout == 'ok'
+ retries: 120
+ delay: 1
+ changed_when: false
diff --git a/roles/openshift_logging/README.md b/roles/openshift_logging/README.md
index f7b2f7743..9189bc2f0 100644
--- a/roles/openshift_logging/README.md
+++ b/roles/openshift_logging/README.md
@@ -63,7 +63,7 @@ When both `openshift_logging_install_logging` and `openshift_logging_upgrade_log
- `openshift_logging_es_cluster_size`: The number of ES cluster members. Defaults to '1'.
- `openshift_logging_es_cpu_limit`: The amount of CPU limit for the ES cluster. Unused if not set
-- `openshift_logging_es_memory_limit`: The amount of RAM that should be assigned to ES. Defaults to '1024Mi'.
+- `openshift_logging_es_memory_limit`: The amount of RAM that should be assigned to ES. Defaults to '8Gi'.
- `openshift_logging_es_pv_selector`: A key/value map added to a PVC in order to select specific PVs. Defaults to 'None'.
- `openshift_logging_es_pvc_dynamic`: Whether or not to add the dynamic PVC annotation for any generated PVCs. Defaults to 'False'.
- `openshift_logging_es_pvc_size`: The requested size for the ES PVCs, when not provided the role will not generate any PVCs. Defaults to '""'.
@@ -81,7 +81,7 @@ same as above for their non-ops counterparts, but apply to the OPS cluster insta
- `openshift_logging_es_ops_client_key`: /etc/fluent/keys/key
- `openshift_logging_es_ops_cluster_size`: 1
- `openshift_logging_es_ops_cpu_limit`: The amount of CPU limit for the ES cluster. Unused if not set
-- `openshift_logging_es_ops_memory_limit`: 1024Mi
+- `openshift_logging_es_ops_memory_limit`: 8Gi
- `openshift_logging_es_ops_pvc_dynamic`: False
- `openshift_logging_es_ops_pvc_size`: ""
- `openshift_logging_es_ops_pvc_prefix`: logging-es-ops
diff --git a/roles/openshift_logging/defaults/main.yml b/roles/openshift_logging/defaults/main.yml
index dea1a889a..87fc7068f 100644
--- a/roles/openshift_logging/defaults/main.yml
+++ b/roles/openshift_logging/defaults/main.yml
@@ -62,7 +62,7 @@ openshift_logging_es_client_cert: /etc/fluent/keys/cert
openshift_logging_es_client_key: /etc/fluent/keys/key
openshift_logging_es_cluster_size: "{{ openshift_hosted_logging_elasticsearch_cluster_size | default(1) }}"
openshift_logging_es_cpu_limit: null
-openshift_logging_es_memory_limit: 1024Mi
+openshift_logging_es_memory_limit: 8Gi
openshift_logging_es_pv_selector: null
openshift_logging_es_pvc_dynamic: "{{ openshift_hosted_logging_elasticsearch_pvc_dynamic | default(False) }}"
openshift_logging_es_pvc_size: "{{ openshift_hosted_logging_elasticsearch_pvc_size | default('') }}"
@@ -80,7 +80,7 @@ openshift_logging_es_ops_client_cert: /etc/fluent/keys/cert
openshift_logging_es_ops_client_key: /etc/fluent/keys/key
openshift_logging_es_ops_cluster_size: "{{ openshift_hosted_logging_elasticsearch_ops_cluster_size | default(1) }}"
openshift_logging_es_ops_cpu_limit: null
-openshift_logging_es_ops_memory_limit: 1024Mi
+openshift_logging_es_ops_memory_limit: 8Gi
openshift_logging_es_ops_pv_selector: None
openshift_logging_es_ops_pvc_dynamic: "{{ openshift_hosted_logging_elasticsearch_ops_pvc_dynamic | default(False) }}"
openshift_logging_es_ops_pvc_size: "{{ openshift_hosted_logging_elasticsearch_ops_pvc_size | default('') }}"
diff --git a/roles/openshift_logging/handlers/main.yml b/roles/openshift_logging/handlers/main.yml
index ad79e62ae..ffb812271 100644
--- a/roles/openshift_logging/handlers/main.yml
+++ b/roles/openshift_logging/handlers/main.yml
@@ -3,3 +3,24 @@
systemd: name={{ openshift.common.service_type }}-master state=restarted
when: (openshift.master.ha is not defined or not openshift.master.ha | bool) and (not (master_service_status_changed | default(false) | bool))
notify: Verify API Server
+
+- name: Verify API Server
+ # Using curl here since the uri module requires python-httplib2 and
+ # wait_for port doesn't provide health information.
+ command: >
+ curl --silent --tlsv1.2
+ {% if openshift.common.version_gte_3_2_or_1_2 | bool %}
+ --cacert {{ openshift.common.config_base }}/master/ca-bundle.crt
+ {% else %}
+ --cacert {{ openshift.common.config_base }}/master/ca.crt
+ {% endif %}
+ {{ openshift.master.api_url }}/healthz/ready
+ args:
+ # Disables the following warning:
+ # Consider using get_url or uri module rather than running curl
+ warn: no
+ register: api_available_output
+ until: api_available_output.stdout == 'ok'
+ retries: 120
+ delay: 1
+ changed_when: false
diff --git a/roles/openshift_logging/tasks/install_elasticsearch.yaml b/roles/openshift_logging/tasks/install_elasticsearch.yaml
index f9c2c81fb..244949505 100644
--- a/roles/openshift_logging/tasks/install_elasticsearch.yaml
+++ b/roles/openshift_logging/tasks/install_elasticsearch.yaml
@@ -5,6 +5,7 @@
- name: Generate PersistentVolumeClaims
include: "{{ role_path}}/tasks/generate_pvcs.yaml"
vars:
+ es_pvc_pool: []
es_pvc_names: "{{openshift_logging_facts.elasticsearch.pvcs.keys()}}"
es_dc_names: "{{openshift_logging_facts.elasticsearch.deploymentconfigs.keys()}}"
@@ -63,6 +64,7 @@
- name: Generate PersistentVolumeClaims for Ops
include: "{{ role_path}}/tasks/generate_pvcs.yaml"
vars:
+ es_pvc_pool: []
es_pvc_names: "{{openshift_logging_facts.elasticsearch_ops.pvcs.keys()}}"
es_dc_names: "{{openshift_logging_facts.elasticsearch_ops.deploymentconfigs.keys()}}"
openshift_logging_es_pvc_prefix: "{{openshift_logging_es_ops_pvc_prefix}}"
diff --git a/roles/openshift_logging/tasks/upgrade_logging.yaml b/roles/openshift_logging/tasks/upgrade_logging.yaml
index 0421cdf58..83867d361 100644
--- a/roles/openshift_logging/tasks/upgrade_logging.yaml
+++ b/roles/openshift_logging/tasks/upgrade_logging.yaml
@@ -26,14 +26,14 @@
loop_control:
loop_var: object
-- name: Wait for pods to stop
+- name: Wait for pods to start
oc_obj:
state: list
- kind: dc
+ kind: pods
selector: "component=es"
namespace: "{{openshift_logging_namespace}}"
register: running_pod
- until: running_pod.results.results.items[?(@.status.phase == "Running")].metadata.name != ''
+ until: running_pod.results.results[0]['items'] | selectattr('status.phase', 'equalto', 'Running') | map(attribute='metadata.name') | list | length != 0
retries: 30
delay: 10
diff --git a/roles/openshift_logging/templates/curator.j2 b/roles/openshift_logging/templates/curator.j2
index b7bc15b62..55f4976ec 100644
--- a/roles/openshift_logging/templates/curator.j2
+++ b/roles/openshift_logging/templates/curator.j2
@@ -31,7 +31,7 @@ spec:
{% if curator_node_selector is iterable and curator_node_selector | length > 0 %}
nodeSelector:
{% for key, value in curator_node_selector.iteritems() %}
- {{key}}: {{value}}
+ {{key}}: "{{value}}"
{% endfor %}
{% endif %}
containers:
diff --git a/roles/openshift_logging/templates/es.j2 b/roles/openshift_logging/templates/es.j2
index ec84c6b76..81ae070be 100644
--- a/roles/openshift_logging/templates/es.j2
+++ b/roles/openshift_logging/templates/es.j2
@@ -33,7 +33,7 @@ spec:
{% if es_node_selector is iterable and es_node_selector | length > 0 %}
nodeSelector:
{% for key, value in es_node_selector.iteritems() %}
- {{key}}: {{value}}
+ {{key}}: "{{value}}"
{% endfor %}
{% endif %}
containers:
diff --git a/roles/openshift_logging/templates/kibana.j2 b/roles/openshift_logging/templates/kibana.j2
index be9b45ab4..e6ecf82ff 100644
--- a/roles/openshift_logging/templates/kibana.j2
+++ b/roles/openshift_logging/templates/kibana.j2
@@ -30,7 +30,7 @@ spec:
{% if kibana_node_selector is iterable and kibana_node_selector | length > 0 %}
nodeSelector:
{% for key, value in kibana_node_selector.iteritems() %}
- {{key}}: {{value}}
+ {{key}}: "{{value}}"
{% endfor %}
{% endif %}
containers:
diff --git a/roles/openshift_master_facts/test/conftest.py b/roles/openshift_master_facts/test/conftest.py
new file mode 100644
index 000000000..e67d24f04
--- /dev/null
+++ b/roles/openshift_master_facts/test/conftest.py
@@ -0,0 +1,54 @@
+import os
+import sys
+
+import pytest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir, "lookup_plugins"))
+
+from openshift_master_facts_default_predicates import LookupModule as PredicatesLookupModule # noqa: E402
+from openshift_master_facts_default_priorities import LookupModule as PrioritiesLookupModule # noqa: E402
+
+
+@pytest.fixture()
+def predicates_lookup():
+ return PredicatesLookupModule()
+
+
+@pytest.fixture()
+def priorities_lookup():
+ return PrioritiesLookupModule()
+
+
+@pytest.fixture()
+def facts(request):
+ return {
+ 'openshift': {
+ 'common': {}
+ }
+ }
+
+
+@pytest.fixture(params=[True, False])
+def regions_enabled(request):
+ return request.param
+
+
+@pytest.fixture(params=[True, False])
+def zones_enabled(request):
+ return request.param
+
+
+def v_prefix(release):
+ """Prefix a release number with 'v'."""
+ return "v" + release
+
+
+def minor(release):
+ """Add a suffix to release, making 'X.Y' become 'X.Y.Z'."""
+ return release + ".1"
+
+
+@pytest.fixture(params=[str, v_prefix, minor])
+def release_mod(request):
+ """Modifies a release string to alternative valid values."""
+ return request.param
diff --git a/roles/openshift_master_facts/test/openshift_master_facts_bad_input_tests.py b/roles/openshift_master_facts/test/openshift_master_facts_bad_input_tests.py
new file mode 100644
index 000000000..e8da1e04a
--- /dev/null
+++ b/roles/openshift_master_facts/test/openshift_master_facts_bad_input_tests.py
@@ -0,0 +1,57 @@
+import copy
+import os
+import sys
+
+from ansible.errors import AnsibleError
+import pytest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir, "lookup_plugins"))
+
+from openshift_master_facts_default_predicates import LookupModule # noqa: E402
+
+
+class TestOpenShiftMasterFactsBadInput(object):
+ lookup = LookupModule()
+ default_facts = {
+ 'openshift': {
+ 'common': {}
+ }
+ }
+
+ def test_missing_openshift_facts(self):
+ with pytest.raises(AnsibleError):
+ facts = {}
+ self.lookup.run(None, variables=facts)
+
+ def test_missing_deployment_type(self):
+ with pytest.raises(AnsibleError):
+ facts = copy.deepcopy(self.default_facts)
+ facts['openshift']['common']['short_version'] = '10.10'
+ self.lookup.run(None, variables=facts)
+
+ def test_missing_short_version_and_missing_openshift_release(self):
+ with pytest.raises(AnsibleError):
+ facts = copy.deepcopy(self.default_facts)
+ facts['openshift']['common']['deployment_type'] = 'origin'
+ self.lookup.run(None, variables=facts)
+
+ def test_unknown_deployment_types(self):
+ with pytest.raises(AnsibleError):
+ facts = copy.deepcopy(self.default_facts)
+ facts['openshift']['common']['short_version'] = '1.1'
+ facts['openshift']['common']['deployment_type'] = 'bogus'
+ self.lookup.run(None, variables=facts)
+
+ def test_unknown_origin_version(self):
+ with pytest.raises(AnsibleError):
+ facts = copy.deepcopy(self.default_facts)
+ facts['openshift']['common']['short_version'] = '0.1'
+ facts['openshift']['common']['deployment_type'] = 'origin'
+ self.lookup.run(None, variables=facts)
+
+ def test_unknown_ocp_version(self):
+ with pytest.raises(AnsibleError):
+ facts = copy.deepcopy(self.default_facts)
+ facts['openshift']['common']['short_version'] = '0.1'
+ facts['openshift']['common']['deployment_type'] = 'openshift-enterprise'
+ self.lookup.run(None, variables=facts)
diff --git a/roles/openshift_master_facts/test/openshift_master_facts_default_predicates_tests.py b/roles/openshift_master_facts/test/openshift_master_facts_default_predicates_tests.py
index 68b6deb88..25294d91a 100644
--- a/roles/openshift_master_facts/test/openshift_master_facts_default_predicates_tests.py
+++ b/roles/openshift_master_facts/test/openshift_master_facts_default_predicates_tests.py
@@ -1,13 +1,5 @@
-import copy
-import os
-import sys
+import pytest
-from ansible.errors import AnsibleError
-from nose.tools import raises, assert_equal
-
-sys.path = [os.path.abspath(os.path.dirname(__file__) + "/../lookup_plugins/")] + sys.path
-
-from openshift_master_facts_default_predicates import LookupModule # noqa: E402
# Predicates ordered according to OpenShift Origin source:
# origin/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go
@@ -88,179 +80,83 @@ TEST_VARS = [
]
-class TestOpenShiftMasterFactsDefaultPredicates(object):
- def setUp(self):
- self.lookup = LookupModule()
- self.default_facts = {
- 'openshift': {
- 'common': {}
- }
- }
+def assert_ok(predicates_lookup, default_predicates, regions_enabled, **kwargs):
+ results = predicates_lookup.run(None, regions_enabled=regions_enabled, **kwargs)
+ if regions_enabled:
+ assert results == default_predicates + [REGION_PREDICATE]
+ else:
+ assert results == default_predicates
+
+
+def test_openshift_version(predicates_lookup, openshift_version_fixture, regions_enabled):
+ facts, default_predicates = openshift_version_fixture
+ assert_ok(predicates_lookup, default_predicates, variables=facts, regions_enabled=regions_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def openshift_version_fixture(request, facts):
+ version, deployment_type, default_predicates = request.param
+ version += '.1'
+ facts['openshift_version'] = version
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_predicates
+
+
+def test_openshift_release(predicates_lookup, openshift_release_fixture, regions_enabled):
+ facts, default_predicates = openshift_release_fixture
+ assert_ok(predicates_lookup, default_predicates, variables=facts, regions_enabled=regions_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def openshift_release_fixture(request, facts, release_mod):
+ release, deployment_type, default_predicates = request.param
+ facts['openshift_release'] = release_mod(release)
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_predicates
+
+
+def test_short_version(predicates_lookup, short_version_fixture, regions_enabled):
+ facts, default_predicates = short_version_fixture
+ assert_ok(predicates_lookup, default_predicates, variables=facts, regions_enabled=regions_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def short_version_fixture(request, facts):
+ short_version, deployment_type, default_predicates = request.param
+ facts['openshift']['common']['short_version'] = short_version
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_predicates
+
+
+def test_short_version_kwarg(predicates_lookup, short_version_kwarg_fixture, regions_enabled):
+ facts, short_version, default_predicates = short_version_kwarg_fixture
+ assert_ok(predicates_lookup, default_predicates, variables=facts, regions_enabled=regions_enabled, short_version=short_version)
+
+
+@pytest.fixture(params=TEST_VARS)
+def short_version_kwarg_fixture(request, facts):
+ short_version, deployment_type, default_predicates = request.param
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, short_version, default_predicates
+
+
+def test_deployment_type_kwarg(predicates_lookup, deployment_type_kwarg_fixture, regions_enabled):
+ facts, deployment_type, default_predicates = deployment_type_kwarg_fixture
+ assert_ok(predicates_lookup, default_predicates, variables=facts, regions_enabled=regions_enabled, deployment_type=deployment_type)
+
+
+@pytest.fixture(params=TEST_VARS)
+def deployment_type_kwarg_fixture(request, facts):
+ short_version, deployment_type, default_predicates = request.param
+ facts['openshift']['common']['short_version'] = short_version
+ return facts, deployment_type, default_predicates
+
+
+def test_short_version_deployment_type_kwargs(predicates_lookup, short_version_deployment_type_kwargs_fixture, regions_enabled):
+ short_version, deployment_type, default_predicates = short_version_deployment_type_kwargs_fixture
+ assert_ok(predicates_lookup, default_predicates, regions_enabled=regions_enabled, short_version=short_version, deployment_type=deployment_type)
+
- @raises(AnsibleError)
- def test_missing_short_version_and_missing_openshift_release(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['deployment_type'] = 'origin'
- self.lookup.run(None, variables=facts)
-
- def check_defaults_short_version(self, short_version, deployment_type, default_predicates,
- regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = short_version
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_short_version_kwarg(self, short_version, deployment_type, default_predicates,
- regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled,
- short_version=short_version)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_deployment_type_kwarg(self, short_version, deployment_type,
- default_predicates, regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = short_version
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled,
- deployment_type=deployment_type)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_only_kwargs(self, short_version, deployment_type,
- default_predicates, regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled,
- short_version=short_version,
- deployment_type=deployment_type)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_release(self, release, deployment_type, default_predicates,
- regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift_release'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_version(self, version, deployment_type, default_predicates,
- regions_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift_version'] = version
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def check_defaults_override_vars(self, release, deployment_type,
- default_predicates, regions_enabled,
- extra_facts=None):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- if extra_facts is not None:
- for fact in extra_facts:
- facts[fact] = extra_facts[fact]
- results = self.lookup.run(None, variables=facts,
- regions_enabled=regions_enabled,
- return_set_vars=False)
- if regions_enabled:
- assert_equal(results, default_predicates + [REGION_PREDICATE])
- else:
- assert_equal(results, default_predicates)
-
- def test_openshift_version(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- release = release + '.1'
- yield self.check_defaults_version, release, deployment_type, default_predicates, regions_enabled
-
- def test_v_release_defaults(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_release, 'v' + release, deployment_type, default_predicates, regions_enabled
-
- def test_release_defaults(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_release, release, deployment_type, default_predicates, regions_enabled
-
- def test_short_version_defaults(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_short_version, release, deployment_type, default_predicates, regions_enabled
-
- def test_short_version_kwarg(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_short_version_kwarg, release, deployment_type, default_predicates, regions_enabled
-
- def test_only_kwargs(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_only_kwargs, release, deployment_type, default_predicates, regions_enabled
-
- def test_deployment_type_kwarg(self):
- for regions_enabled in (True, False):
- for release, deployment_type, default_predicates in TEST_VARS:
- yield self.check_defaults_deployment_type_kwarg, release, deployment_type, default_predicates, regions_enabled
-
- def test_trunc_openshift_release(self):
- for release, deployment_type, default_predicates in TEST_VARS:
- release = release + '.1'
- yield self.check_defaults_release, release, deployment_type, default_predicates, False
-
- @raises(AnsibleError)
- def test_unknown_deployment_types(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '1.1'
- facts['openshift']['common']['deployment_type'] = 'bogus'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_unknown_origin_version(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '0.1'
- facts['openshift']['common']['deployment_type'] = 'origin'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_unknown_ocp_version(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '0.1'
- facts['openshift']['common']['deployment_type'] = 'openshift-enterprise'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_missing_deployment_type(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '10.10'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def testMissingOpenShiftFacts(self):
- facts = {}
- self.lookup.run(None, variables=facts)
+@pytest.fixture(params=TEST_VARS)
+def short_version_deployment_type_kwargs_fixture(request):
+ return request.param
diff --git a/roles/openshift_master_facts/test/openshift_master_facts_default_priorities_tests.py b/roles/openshift_master_facts/test/openshift_master_facts_default_priorities_tests.py
index 4e44a2b3d..cec44faa4 100644
--- a/roles/openshift_master_facts/test/openshift_master_facts_default_priorities_tests.py
+++ b/roles/openshift_master_facts/test/openshift_master_facts_default_priorities_tests.py
@@ -1,13 +1,5 @@
-import copy
-import os
-import sys
+import pytest
-from ansible.errors import AnsibleError
-from nose.tools import raises, assert_equal
-
-sys.path = [os.path.abspath(os.path.dirname(__file__) + "/../lookup_plugins/")] + sys.path
-
-from openshift_master_facts_default_priorities import LookupModule # noqa: E402
DEFAULT_PRIORITIES_1_1 = [
{'name': 'LeastRequestedPriority', 'weight': 1},
@@ -76,177 +68,83 @@ TEST_VARS = [
]
-class TestOpenShiftMasterFactsDefaultPredicates(object):
- def setUp(self):
- self.lookup = LookupModule()
- self.default_facts = {
- 'openshift': {
- 'common': {}
- }
- }
+def assert_ok(priorities_lookup, default_priorities, zones_enabled, **kwargs):
+ results = priorities_lookup.run(None, zones_enabled=zones_enabled, **kwargs)
+ if zones_enabled:
+ assert results == default_priorities + [ZONE_PRIORITY]
+ else:
+ assert results == default_priorities
+
+
+def test_openshift_version(priorities_lookup, openshift_version_fixture, zones_enabled):
+ facts, default_priorities = openshift_version_fixture
+ assert_ok(priorities_lookup, default_priorities, variables=facts, zones_enabled=zones_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def openshift_version_fixture(request, facts):
+ version, deployment_type, default_priorities = request.param
+ version += '.1'
+ facts['openshift_version'] = version
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_priorities
+
+
+def test_openshift_release(priorities_lookup, openshift_release_fixture, zones_enabled):
+ facts, default_priorities = openshift_release_fixture
+ assert_ok(priorities_lookup, default_priorities, variables=facts, zones_enabled=zones_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def openshift_release_fixture(request, facts, release_mod):
+ release, deployment_type, default_priorities = request.param
+ facts['openshift_release'] = release_mod(release)
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_priorities
+
+
+def test_short_version(priorities_lookup, short_version_fixture, zones_enabled):
+ facts, default_priorities = short_version_fixture
+ assert_ok(priorities_lookup, default_priorities, variables=facts, zones_enabled=zones_enabled)
+
+
+@pytest.fixture(params=TEST_VARS)
+def short_version_fixture(request, facts):
+ short_version, deployment_type, default_priorities = request.param
+ facts['openshift']['common']['short_version'] = short_version
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, default_priorities
+
+
+def test_short_version_kwarg(priorities_lookup, short_version_kwarg_fixture, zones_enabled):
+ facts, short_version, default_priorities = short_version_kwarg_fixture
+ assert_ok(priorities_lookup, default_priorities, variables=facts, zones_enabled=zones_enabled, short_version=short_version)
+
+
+@pytest.fixture(params=TEST_VARS)
+def short_version_kwarg_fixture(request, facts):
+ short_version, deployment_type, default_priorities = request.param
+ facts['openshift']['common']['deployment_type'] = deployment_type
+ return facts, short_version, default_priorities
+
+
+def test_deployment_type_kwarg(priorities_lookup, deployment_type_kwarg_fixture, zones_enabled):
+ facts, deployment_type, default_priorities = deployment_type_kwarg_fixture
+ assert_ok(priorities_lookup, default_priorities, variables=facts, zones_enabled=zones_enabled, deployment_type=deployment_type)
+
+
+@pytest.fixture(params=TEST_VARS)
+def deployment_type_kwarg_fixture(request, facts):
+ short_version, deployment_type, default_priorities = request.param
+ facts['openshift']['common']['short_version'] = short_version
+ return facts, deployment_type, default_priorities
+
+
+def test_short_version_deployment_type_kwargs(priorities_lookup, short_version_deployment_type_kwargs_fixture, zones_enabled):
+ short_version, deployment_type, default_priorities = short_version_deployment_type_kwargs_fixture
+ assert_ok(priorities_lookup, default_priorities, zones_enabled=zones_enabled, short_version=short_version, deployment_type=deployment_type)
+
- @raises(AnsibleError)
- def test_missing_short_version_and_missing_openshift_release(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['deployment_type'] = 'origin'
- self.lookup.run(None, variables=facts)
-
- def check_defaults_short_version(self, release, deployment_type,
- default_priorities, zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts, zones_enabled=zones_enabled)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_short_version_kwarg(self, release, deployment_type,
- default_priorities, zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts,
- zones_enabled=zones_enabled,
- short_version=release)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_deployment_type_kwarg(self, release, deployment_type,
- default_priorities, zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = release
- results = self.lookup.run(None, variables=facts,
- zones_enabled=zones_enabled,
- deployment_type=deployment_type)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_only_kwargs(self, release, deployment_type,
- default_priorities, zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- results = self.lookup.run(None, variables=facts,
- zones_enabled=zones_enabled,
- short_version=release,
- deployment_type=deployment_type)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_release(self, release, deployment_type, default_priorities,
- zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift_release'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts, zones_enabled=zones_enabled)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_version(self, release, deployment_type, default_priorities,
- zones_enabled):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift_version'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- results = self.lookup.run(None, variables=facts, zones_enabled=zones_enabled)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def check_defaults_override_vars(self, release, deployment_type,
- default_priorities, zones_enabled,
- extra_facts=None):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = release
- facts['openshift']['common']['deployment_type'] = deployment_type
- if extra_facts is not None:
- for fact in extra_facts:
- facts[fact] = extra_facts[fact]
- results = self.lookup.run(None, variables=facts,
- zones_enabled=zones_enabled,
- return_set_vars=False)
- if zones_enabled:
- assert_equal(results, default_priorities + [ZONE_PRIORITY])
- else:
- assert_equal(results, default_priorities)
-
- def test_openshift_version(self):
- for zones_enabled in (True, False):
- for release, deployment_type, default_priorities in TEST_VARS:
- release = release + '.1'
- yield self.check_defaults_version, release, deployment_type, default_priorities, zones_enabled
-
- def test_v_release_defaults(self):
- for zones_enabled in (True, False):
- for release, deployment_type, default_priorities in TEST_VARS:
- release = 'v' + release
- yield self.check_defaults_release, release, deployment_type, default_priorities, zones_enabled
-
- def test_release_defaults(self):
- for zones_enabled in (True, False):
- for release, deployment_type, default_priorities in TEST_VARS:
- yield self.check_defaults_release, release, deployment_type, default_priorities, zones_enabled
-
- def test_short_version_defaults(self):
- for zones_enabled in (True, False):
- for short_version, deployment_type, default_priorities in TEST_VARS:
- yield self.check_defaults_short_version, short_version, deployment_type, default_priorities, zones_enabled
-
- def test_only_kwargs(self):
- for zones_enabled in (True, False):
- for short_version, deployment_type, default_priorities in TEST_VARS:
- yield self.check_defaults_only_kwargs, short_version, deployment_type, default_priorities, zones_enabled
-
- def test_deployment_type_kwarg(self):
- for zones_enabled in (True, False):
- for short_version, deployment_type, default_priorities in TEST_VARS:
- yield self.check_defaults_deployment_type_kwarg, short_version, deployment_type, default_priorities, zones_enabled
-
- def test_release_kwarg(self):
- for zones_enabled in (True, False):
- for short_version, deployment_type, default_priorities in TEST_VARS:
- yield self.check_defaults_short_version_kwarg, short_version, deployment_type, default_priorities, zones_enabled
-
- def test_trunc_openshift_release(self):
- for release, deployment_type, default_priorities in TEST_VARS:
- release = release + '.1'
- yield self.check_defaults_release, release, deployment_type, default_priorities, False
-
- @raises(AnsibleError)
- def test_unknown_origin_version(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '0.1'
- facts['openshift']['common']['deployment_type'] = 'origin'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_unknown_ocp_version(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '0.1'
- facts['openshift']['common']['deployment_type'] = 'openshift-enterprise'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_unknown_deployment_types(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '1.1'
- facts['openshift']['common']['deployment_type'] = 'bogus'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_missing_deployment_type(self):
- facts = copy.deepcopy(self.default_facts)
- facts['openshift']['common']['short_version'] = '10.10'
- self.lookup.run(None, variables=facts)
-
- @raises(AnsibleError)
- def test_missing_openshift_facts(self):
- facts = {}
- self.lookup.run(None, variables=facts)
+@pytest.fixture(params=TEST_VARS)
+def short_version_deployment_type_kwargs_fixture(request):
+ return request.param
diff --git a/roles/openshift_node_upgrade/README.md b/roles/openshift_node_upgrade/README.md
index e21bee412..66bce38ec 100644
--- a/roles/openshift_node_upgrade/README.md
+++ b/roles/openshift_node_upgrade/README.md
@@ -82,7 +82,7 @@ Including an example of how to use your role (for instance, with variables passe
- name: Drain Node for Kubelet upgrade
command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data
+ {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --force --delete-local-data --ignore-daemonsets
delegate_to: "{{ groups.oo_first_master.0 }}"
roles:
diff --git a/roles/openshift_node_upgrade/tasks/main.yml b/roles/openshift_node_upgrade/tasks/main.yml
index 609ca2a6e..96e296a3b 100644
--- a/roles/openshift_node_upgrade/tasks/main.yml
+++ b/roles/openshift_node_upgrade/tasks/main.yml
@@ -13,7 +13,10 @@
vars:
# We will restart Docker ourselves after everything is ready:
skip_docker_restart: True
- when: l_docker_upgrade is defined and l_docker_upgrade | bool and not openshift.common.is_atomic | bool
+ when:
+ - l_docker_upgrade is defined
+ - l_docker_upgrade | bool
+ - not openshift.common.is_containerized | bool
- include: "{{ node_config_hook }}"
when: node_config_hook is defined
@@ -25,14 +28,19 @@
when: not openshift.common.is_containerized | bool
- name: Remove obsolete docker-sdn-ovs.conf
- file: path=/etc/systemd/system/docker.service.d/docker-sdn-ovs.conf state=absent
- when: (deployment_type == 'openshift-enterprise' and openshift_release | version_compare('3.4', '>=')) or (deployment_type == 'origin' and openshift_release | version_compare('1.4', '>='))
+ file:
+ path: "/etc/systemd/system/docker.service.d/docker-sdn-ovs.conf"
+ state: absent
+ when: (deployment_type == 'openshift-enterprise' and openshift_release | version_compare('3.4', '>='))
+ or (deployment_type == 'origin' and openshift_release | version_compare('1.4', '>='))
- include: containerized_node_upgrade.yml
when: openshift.common.is_containerized | bool
- name: Ensure containerized services stopped before Docker restart
- service: name={{ item }} state=stopped
+ service:
+ name: "{{ item }}"
+ state: stopped
with_items:
- etcd_container
- openvswitch
@@ -62,22 +70,19 @@
- include: docker/restart.yml
- name: Restart rpm node service
- service: name="{{ openshift.common.service_type }}-node" state=restarted
+ service:
+ name: "{{ openshift.common.service_type }}-node"
+ state: restarted
when: not openshift.common.is_containerized | bool
- name: Wait for node to be ready
- command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.client_binary }} get node {{ openshift.common.hostname | lower }} --no-headers
+ oc_obj:
+ state: list
+ kind: node
+ name: "{{ openshift.common.hostname | lower }}"
register: node_output
delegate_to: "{{ groups.oo_first_master.0 }}"
- until: "{{ node_output.stdout.split()[1].startswith('Ready')}}"
- # Give the node two minutes to come back online. Note that we pre-pull images now
- # so containerized services should restart quickly as well.
+ until: node_output.results.results[0].status.conditions | selectattr('type', 'equalto', 'Ready') | map(attribute='status') | join | bool == True
+ # Give the node two minutes to come back online.
retries: 24
delay: 5
- # AUDIT:changed_when: `false` because we are only inspecting the
- # state of the node, we aren't changing anything (we changed node
- # service state in the previous task). You could say we shouldn't
- # override this because something will be changing (the state of a
- # service), but that should be part of the last task.
- changed_when: false
diff --git a/setup.cfg b/setup.cfg
index 06346852c..e6bf2c5d1 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,17 +4,6 @@
# will need to generate wheels for each Python version that you support.
universal=1
-[nosetests]
-tests=roles/openshift_master_facts/test/, test/
-verbosity=2
-with-coverage=1
-cover-html=1
-cover-inclusive=1
-cover-min-percentage=70
-cover-erase=1
-detailed-errors=1
-cover-branches=1
-
[yamllint]
excludes=.tox,utils,files
@@ -25,3 +14,22 @@ lint_disable=fixme,locally-disabled,file-ignored,duplicate-code
exclude=.tox/*,utils/*,inventory/*
max_line_length = 120
ignore = E501,T003
+
+[tool:pytest]
+norecursedirs =
+ .*
+ __pycache__
+ cover
+ docs
+ # utils have its own config
+ utils
+python_files =
+ # TODO(rhcarvalho): rename test files to follow a single pattern. "test*.py"
+ # is Python unittest's default, while pytest discovers both "test_*.py" and
+ # "*_test.py" by default.
+ test_*.py
+ *_tests.py
+addopts =
+ --cov=.
+ --cov-report=term
+ --cov-report=html
diff --git a/setup.py b/setup.py
index 372aca9ff..2ad26110b 100644
--- a/setup.py
+++ b/setup.py
@@ -13,6 +13,7 @@ import yaml
from setuptools import setup, Command
from setuptools_lint.setuptools_command import PylintCommand
from six import string_types
+from six.moves import reload_module
from yamllint.config import YamlLintConfig
from yamllint.cli import Format
from yamllint import linter
@@ -185,6 +186,7 @@ class OpenShiftAnsibleGenerateValidation(Command):
# the python path.
# pylint: disable=import-error
import generate
+ reload_module(generate)
generate.verify()
except generate.GenerateAnsibleException as gae:
print(gae.args)
diff --git a/test-requirements.txt b/test-requirements.txt
index 2ee1e657d..9bb6e058c 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -7,5 +7,7 @@ pylint
setuptools-lint
PyYAML
yamllint
-nose
coverage
+mock
+pytest
+pytest-cov
diff --git a/tox.ini b/tox.ini
index 4d3594023..13c87f5c4 100644
--- a/tox.ini
+++ b/tox.ini
@@ -12,8 +12,10 @@ deps =
ansible22: ansible~=2.2
commands =
- flake8: flake8
+ unit: pytest {posargs}
+ flake8: flake8 {posargs}
pylint: python setup.py lint
yamllint: python setup.py yamllint
- unit: nosetests
generate_validation: python setup.py generate_validation
+
+
diff --git a/utils/.coveragerc b/utils/.coveragerc
index e1d918755..551e13192 100644
--- a/utils/.coveragerc
+++ b/utils/.coveragerc
@@ -1,5 +1,18 @@
[run]
-omit=
+branch = True
+omit =
*/lib/python*/site-packages/*
*/lib/python*/*
/usr/*
+ setup.py
+ # TODO(rhcarvalho): this is used to ignore test files from coverage report.
+ # We can make this less generic when we stick with a single test pattern in
+ # the repo.
+ test_*.py
+ *_tests.py
+
+[report]
+fail_under = 73
+
+[html]
+directory = cover
diff --git a/utils/README.md b/utils/README.md
index c37ab41e6..7aa045ae4 100644
--- a/utils/README.md
+++ b/utils/README.md
@@ -1,56 +1,61 @@
-# Running Tests (NEW)
+# Running Tests
Run the command:
make ci
-to run an array of unittests locally.
+to run tests and linting tools.
Underneath the covers, we use [tox](http://readthedocs.org/docs/tox/) to manage virtualenvs and run
tests. Alternatively, tests can be run using [detox](https://pypi.python.org/pypi/detox/) which allows
-for running tests in parallel
-
+for running tests in parallel.
```
pip install tox detox
```
List the test environments available:
+
```
tox -l
```
Run all of the tests with:
+
```
tox
```
Run all of the tests in parallel with detox:
+
```
detox
```
-Running a particular test environment (python 2.7 flake8 tests in this case):
+Run a particular test environment:
+
```
-tox -e py27-ansible22-flake8
+tox -e py27-flake8
```
-Running a particular test environment in a clean virtualenv (python 3.5 pylint
-tests in this case):
+Run a particular test environment in a clean virtualenv:
+
```
-tox -r -e py35-ansible22-pylint
+tox -r -e py35-pylint
```
If you want to enter the virtualenv created by tox to do additional
-testing/debugging (py27-flake8 env in this case):
+testing/debugging:
+
```
-source .tox/py27-ansible22-flake8/bin/activate
+source .tox/py27-flake8/bin/activate
```
You will get errors if the log files already exist and can not be
written to by the current user (`/tmp/ansible.log` and
`/tmp/installer.txt`). *We're working on it.*
+
# Running From Source
You will need to setup a **virtualenv** to run from source:
@@ -66,17 +71,3 @@ The virtualenv `bin` directory should now be at the start of your
You can exit the virtualenv with:
$ deactivate
-
-# Testing (OLD)
-
-*This section is deprecated, but still works*
-
-First, run the **virtualenv setup steps** described above.
-
-Install some testing libraries: (we cannot do this via setuptools due to the version virtualenv bundles)
-
-$ pip install mock nose
-
-Then run the tests with:
-
-$ oo-install/bin/nosetests
diff --git a/utils/setup.cfg b/utils/setup.cfg
index 862dffd7b..d730cd3b4 100644
--- a/utils/setup.cfg
+++ b/utils/setup.cfg
@@ -4,15 +4,8 @@
# will need to generate wheels for each Python version that you support.
universal=1
-[nosetests]
-verbosity=2
-with-coverage=1
-cover-html=1
-cover-inclusive=1
-cover-min-percentage=70
-cover-erase=1
-detailed-errors=1
-cover-branches=1
+[aliases]
+test=pytest
[flake8]
max-line-length=120
@@ -21,3 +14,19 @@ ignore=E501
[lint]
lint_disable=fixme,locally-disabled,file-ignored,duplicate-code
+
+[tool:pytest]
+testpaths = test
+norecursedirs =
+ .*
+ __pycache__
+python_files =
+ # TODO(rhcarvalho): rename test files to follow a single pattern. "test*.py"
+ # is Python unittest's default, while pytest discovers both "test_*.py" and
+ # "*_test.py" by default.
+ test_*.py
+ *_tests.py
+addopts =
+ --cov=.
+ --cov-report=term
+ --cov-report=html
diff --git a/utils/setup.py b/utils/setup.py
index 3518581e7..238c64c19 100644
--- a/utils/setup.py
+++ b/utils/setup.py
@@ -40,7 +40,7 @@ setup(
# simple. Or you can use find_packages().
#packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
packages=['ooinstall'],
- package_dir={'ooinstall': 'src/ooinstall'},
+ package_dir={'': 'src'},
# List run-time dependencies here. These will be installed by pip when
@@ -65,10 +65,6 @@ setup(
'ooinstall': ['ansible.cfg', 'ansible-quiet.cfg', 'ansible_plugins/*'],
},
- tests_require=['nose'],
-
- test_suite='nose.collector',
-
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
diff --git a/utils/src/ooinstall/cli_installer.py b/utils/src/ooinstall/cli_installer.py
index b787741d7..a6d784dea 100644
--- a/utils/src/ooinstall/cli_installer.py
+++ b/utils/src/ooinstall/cli_installer.py
@@ -72,12 +72,6 @@ You might want to override the default subdomain used for exposed routes. If you
return click.prompt('New default subdomain (ENTER for none)', default='')
-def list_hosts(hosts):
- hosts_idx = range(len(hosts))
- for idx in hosts_idx:
- click.echo(' {}: {}'.format(idx, hosts[idx]))
-
-
def collect_hosts(oo_cfg, existing_env=False, masters_set=False, print_summary=True):
"""
Collect host information from user. This will later be filled in using
@@ -656,20 +650,6 @@ https://docs.openshift.com/enterprise/latest/admin_guide/install/prerequisites.h
return oo_cfg
-def get_role_variable(oo_cfg, role_name, variable_name):
- try:
- target_role = next(role for role in oo_cfg.deployment.roles if role.name is role_name)
- target_variable = target_role.variables[variable_name]
- return target_variable
- except (StopIteration, KeyError):
- return None
-
-
-def set_role_variable(oo_cfg, role_name, variable_name, variable_value):
- target_role = next(role for role in oo_cfg.deployment.roles if role.name is role_name)
- target_role[variable_name] = variable_value
-
-
def collect_new_nodes(oo_cfg):
click.clear()
click.echo('*** New Node Configuration ***')
diff --git a/utils/src/ooinstall/oo_config.py b/utils/src/ooinstall/oo_config.py
index cf14105af..c3501c018 100644
--- a/utils/src/ooinstall/oo_config.py
+++ b/utils/src/ooinstall/oo_config.py
@@ -126,15 +126,6 @@ class Host(object):
""" Does this host have the etcd role """
return 'etcd' in self.roles
- def is_etcd_member(self, all_hosts):
- """ Will this host be a member of a standalone etcd cluster. """
- if not self.is_master():
- return False
- masters = [host for host in all_hosts if host.is_master()]
- if len(masters) > 1:
- return True
- return False
-
def is_dedicated_node(self):
""" Will this host be a dedicated node. (not a master) """
return self.is_node() and not self.is_master()
@@ -185,7 +176,7 @@ class Deployment(object):
class OOConfig(object):
default_dir = os.path.normpath(
os.environ.get('XDG_CONFIG_HOME',
- os.environ['HOME'] + '/.config/') + '/openshift/')
+ os.environ.get('HOME', '') + '/.config/') + '/openshift/')
default_file = '/installer.cfg.yml'
def __init__(self, config_path):
@@ -436,12 +427,6 @@ class OOConfig(object):
def __str__(self):
return self.yaml()
- def get_host(self, name):
- for host in self.deployment.hosts:
- if host.connect_to == name:
- return host
- return None
-
def get_host_roles_set(self):
roles_set = set()
for host in self.deployment.hosts:
diff --git a/utils/test-requirements.txt b/utils/test-requirements.txt
index 699afc26a..b26e22a7e 100644
--- a/utils/test-requirements.txt
+++ b/utils/test-requirements.txt
@@ -2,7 +2,6 @@ ansible
# flake8 moved to before setuptools-lint to satisfy mccabe dependency issue
flake8
setuptools-lint
-nose
coverage
mock
PyYAML
@@ -12,3 +11,5 @@ pyOpenSSL
yamllint
tox
detox
+pytest
+pytest-cov
diff --git a/utils/tox.ini b/utils/tox.ini
index 1308f7505..2524923cb 100644
--- a/utils/tox.ini
+++ b/utils/tox.ini
@@ -11,6 +11,9 @@ deps =
-rtest-requirements.txt
py35-flake8: flake8-bugbear
commands =
+ # Needed to make detox work, since it ignores usedevelop
+ # https://github.com/tox-dev/tox/issues/180
+ unit: pip install -e .
+ unit: pytest {posargs}
flake8: python setup.py flake8
- unit: python setup.py nosetests
pylint: python setup.py lint