forked from projectatomic/atomic-host-tests
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.yml
169 lines (155 loc) · 6.81 KB
/
main.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
---
# vim: set ft=ansible:
#
# Logging
# Running the playbook will automatically create a test result file in the
# playbook directory called admin-unlock-results.log.
#
# To disable result logging, run this playbook with the following option:
# -e log_results=false
#
# To change the log file name and location run with the following option:
# -e result_file=/your/file.log
#
# Core Functionality
# Verify rpms can installed and uninstalled after ostree admin unlock
# Verify overlayfs is created after running ostree admin unlock
# (use mount command verify presence of overlayfs mount)
# Verify ostree admin status indicates "Unlocked: development" after running
# ostree admin unlock
# Verify that changes to deployment after ostree admin unlock do not persist
# through reboot.
# Verify overlayfs is removed (use mount command verify absence of overlayfs
# mount)
# Verify ostree admin status does not show locked deployment
# Verify rpms can be installed and uninstalled after ostree admin unlock
# --hotfix and changes persist through reboot.
# Verify ostree admin status indicates "Unlocked: hotfix"
# Verify a clone of the current deployment is added to the deployment
# Use rpm-ostree status to verify there are two deployments with the same
# commit ID
# Verify overlayfs is created (use mount command verify presence of overlayfs
# mount)
# Verify rpms can still be installed and uninstalled after reboot
# Verify ostree admin unlock --hotfix overwrites any other deployment already
# on the system
# Verify rollback between hotfixed deployment and regular deployment
# NOT TESTED:
# Verify upgrade from hotfixed deployment
# Upgrade should be successful
# Packages installed in previous unlocked deployment should not be present
# in the upgraded deployment
#
# Negative Testing
# Verify rpms cannot be installed without ostree admin unlock
# Run ostree admin unlock twice. Verify that an error message specifying
# that the deployment is already in an unlocked state: development
# Run ostree admin unlock, then ostree admin unlock --hotfix. Verify that
# an error message specifying that the deployment is already in an unlocked
# state: development
# Run ostree admin unlock --hotfix twice. Verify that an error message
# specifying that the deployment is already in an unlocked state: hotfix
# Run ostree admin unlock --hotfix then ostree admin unlock. Verify that an
# error message specifying that the deployment is already in an unlocked
# state: hotfix
- name: Ostree Admin Unlock - Test Suite
hosts: all
become: true
vars_files:
- vars.yml
vars:
tests: []
tasks:
- name: Set logging
set_fact:
log_results: true
result_file: "{{ playbook_dir }}/admin-unlock-result.log"
tags: setup
- include_tasks: 'setup.yml'
tags: setup
# TEST
# Verify installed/removed packages do not persist through reboot with unlock
- block:
- include_tasks: 'add_remove_package_non_persistence.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'Add/Remove Package - Non Persistence', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Add/Remove Package - Non Persistence', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: add_remove_package_non_persistence
# TEST
# Verify installed/removed packages persist through reboot with hotfix unlock
- block:
- include_tasks: 'add_remove_package_persistence.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'Add/Remove Package - Persistence', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Add/Remove Package - Persistence', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: add_remove_package_persistence
# TEST
# Verify rollback to original deployment after hotfix unlock
- block:
- include_tasks: 'hotfix_rollback.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'Hotfix rollback', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Hotfix rollback', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: hotfix_rollback
# TEST
# Verify no packages can be installed wihtout unlock
- block:
- include_tasks: 'no_install_without_unlock.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'No install without unlock', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'No install without unlock', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: no_install_without_unlock
# TEST
# Verify unlocking twice causes error
- block:
- include_tasks: 'unlock_twice_error.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'Unlock twice error', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Unlock twice error', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: unlock_twice_error
# TEST
# Verify hotfix unlocking twice causes error
- block:
- include_tasks: 'unlock_twice_hotfix_error.yml'
- set_fact:
tests: "{{ tests + [ { 'name':'Unlock twice hotfix error', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Unlock twice hotfix error', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
tags: unlock_twice_hotfix_error
# CLEANUP
- block:
- include_tasks: 'cleanup.yml'
- set_fact:
tests: "{{ tests + [ { 'name': 'Cleanup', 'result':'Passed', 'result_details': '' } ] }}"
rescue:
- set_fact:
tests: "{{ tests + [ { 'name':'Cleanup', 'result':'Failed', 'result_details': ansible_failed_result } ] }}"
always:
# WRITE RESULTS TO FILE
- name: Remove existing log files
local_action: file path={{ result_file }} state=absent
become: false
- name: Save result to file
when: log_results
local_action: copy content={{ tests | to_nice_yaml(indent=2) }} dest={{ result_file }}
become: false
tags: cleanup
# Handled exceptions show up as failures in Ansible but the playbook
# itself does not return 0, so explicitly fail the test by checking
# the test results
- name: Explicitly fail based on test results
when: item['result']|lower == "failed"
fail:
msg: "Failure found in test"
with_items: "{{ tests }}"