docker_storage_test.py 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. import pytest
  2. from openshift_checks import OpenShiftCheckException
  3. from openshift_checks.docker_storage import DockerStorage
  4. def dummy_check(execute_module=None):
  5. def dummy_exec(self, status, task_vars):
  6. raise Exception("dummy executor called")
  7. return DockerStorage(execute_module=execute_module or dummy_exec)
  8. @pytest.mark.parametrize('is_containerized, group_names, is_active', [
  9. (False, ["masters", "etcd"], False),
  10. (False, ["masters", "nodes"], True),
  11. (True, ["etcd"], True),
  12. ])
  13. def test_is_active(is_containerized, group_names, is_active):
  14. task_vars = dict(
  15. openshift=dict(common=dict(is_containerized=is_containerized)),
  16. group_names=group_names,
  17. )
  18. assert DockerStorage.is_active(task_vars=task_vars) == is_active
  19. def non_atomic_task_vars():
  20. return {"openshift": {"common": {"is_atomic": False}}}
  21. @pytest.mark.parametrize('docker_info, failed, expect_msg', [
  22. (
  23. dict(failed=True, msg="Error connecting: Error while fetching server API version"),
  24. True,
  25. ["Is docker running on this host?"],
  26. ),
  27. (
  28. dict(msg="I have no info"),
  29. True,
  30. ["missing info"],
  31. ),
  32. (
  33. dict(info={
  34. "Driver": "devicemapper",
  35. "DriverStatus": [("Pool Name", "docker-docker--pool")],
  36. }),
  37. False,
  38. [],
  39. ),
  40. (
  41. dict(info={
  42. "Driver": "devicemapper",
  43. "DriverStatus": [("Data loop file", "true")],
  44. }),
  45. True,
  46. ["loopback devices with the Docker devicemapper storage driver"],
  47. ),
  48. (
  49. dict(info={
  50. "Driver": "overlay2",
  51. "DriverStatus": [("Backing Filesystem", "xfs")],
  52. }),
  53. False,
  54. [],
  55. ),
  56. (
  57. dict(info={
  58. "Driver": "overlay",
  59. "DriverStatus": [("Backing Filesystem", "btrfs")],
  60. }),
  61. True,
  62. ["storage is type 'btrfs'", "only supported with\n'xfs'"],
  63. ),
  64. (
  65. dict(info={
  66. "Driver": "overlay2",
  67. "DriverStatus": [("Backing Filesystem", "xfs")],
  68. "OperatingSystem": "Red Hat Enterprise Linux Server release 7.2 (Maipo)",
  69. "KernelVersion": "3.10.0-327.22.2.el7.x86_64",
  70. }),
  71. True,
  72. ["Docker reports kernel version 3.10.0-327"],
  73. ),
  74. (
  75. dict(info={
  76. "Driver": "overlay",
  77. "DriverStatus": [("Backing Filesystem", "xfs")],
  78. "OperatingSystem": "CentOS",
  79. "KernelVersion": "3.10.0-514",
  80. }),
  81. False,
  82. [],
  83. ),
  84. (
  85. dict(info={
  86. "Driver": "unsupported",
  87. }),
  88. True,
  89. ["unsupported Docker storage driver"],
  90. ),
  91. ])
  92. def test_check_storage_driver(docker_info, failed, expect_msg):
  93. def execute_module(module_name, module_args, tmp=None, task_vars=None):
  94. if module_name == "yum":
  95. return {}
  96. if module_name != "docker_info":
  97. raise ValueError("not expecting module " + module_name)
  98. return docker_info
  99. check = dummy_check(execute_module=execute_module)
  100. check.check_dm_usage = lambda status, task_vars: dict() # stub out for this test
  101. check.check_overlay_usage = lambda info, task_vars: dict() # stub out for this test
  102. result = check.run(tmp=None, task_vars=non_atomic_task_vars())
  103. if failed:
  104. assert result["failed"]
  105. else:
  106. assert not result.get("failed", False)
  107. for word in expect_msg:
  108. assert word in result["msg"]
  109. enough_space = {
  110. "Pool Name": "docker--vg-docker--pool",
  111. "Data Space Used": "19.92 MB",
  112. "Data Space Total": "8.535 GB",
  113. "Metadata Space Used": "40.96 kB",
  114. "Metadata Space Total": "25.17 MB",
  115. }
  116. not_enough_space = {
  117. "Pool Name": "docker--vg-docker--pool",
  118. "Data Space Used": "10 GB",
  119. "Data Space Total": "10 GB",
  120. "Metadata Space Used": "42 kB",
  121. "Metadata Space Total": "43 kB",
  122. }
  123. @pytest.mark.parametrize('task_vars, driver_status, vg_free, success, expect_msg', [
  124. (
  125. {"max_thinpool_data_usage_percent": "not a float"},
  126. enough_space,
  127. "12g",
  128. False,
  129. ["is not a percentage"],
  130. ),
  131. (
  132. {},
  133. {}, # empty values from driver status
  134. "bogus", # also does not parse as bytes
  135. False,
  136. ["Could not interpret", "as bytes"],
  137. ),
  138. (
  139. {},
  140. enough_space,
  141. "12.00g",
  142. True,
  143. [],
  144. ),
  145. (
  146. {},
  147. not_enough_space,
  148. "0.00",
  149. False,
  150. ["data usage", "metadata usage", "higher than threshold"],
  151. ),
  152. ])
  153. def test_dm_usage(task_vars, driver_status, vg_free, success, expect_msg):
  154. check = dummy_check()
  155. check.get_vg_free = lambda pool, task_vars: vg_free
  156. result = check.check_dm_usage(driver_status, task_vars)
  157. result_success = not result.get("failed")
  158. assert result_success is success
  159. for msg in expect_msg:
  160. assert msg in result["msg"]
  161. @pytest.mark.parametrize('pool, command_returns, raises, returns', [
  162. (
  163. "foo-bar",
  164. { # vgs missing
  165. "msg": "[Errno 2] No such file or directory",
  166. "failed": True,
  167. "cmd": "/sbin/vgs",
  168. "rc": 2,
  169. },
  170. "Failed to run /sbin/vgs",
  171. None,
  172. ),
  173. (
  174. "foo", # no hyphen in name - should not happen
  175. {},
  176. "name does not have the expected format",
  177. None,
  178. ),
  179. (
  180. "foo-bar",
  181. dict(stdout=" 4.00g\n"),
  182. None,
  183. "4.00g",
  184. ),
  185. (
  186. "foo-bar",
  187. dict(stdout="\n"), # no matching VG
  188. "vgs did not find this VG",
  189. None,
  190. )
  191. ])
  192. def test_vg_free(pool, command_returns, raises, returns):
  193. def execute_module(module_name, module_args, tmp=None, task_vars=None):
  194. if module_name != "command":
  195. raise ValueError("not expecting module " + module_name)
  196. return command_returns
  197. check = dummy_check(execute_module=execute_module)
  198. if raises:
  199. with pytest.raises(OpenShiftCheckException) as err:
  200. check.get_vg_free(pool, {})
  201. assert raises in str(err.value)
  202. else:
  203. ret = check.get_vg_free(pool, {})
  204. assert ret == returns
  205. @pytest.mark.parametrize('string, expect_bytes', [
  206. ("12", 12.0),
  207. ("12 k", 12.0 * 1024),
  208. ("42.42 MB", 42.42 * 1024**2),
  209. ("12g", 12.0 * 1024**3),
  210. ])
  211. def test_convert_to_bytes(string, expect_bytes):
  212. got = DockerStorage.convert_to_bytes(string)
  213. assert got == expect_bytes
  214. @pytest.mark.parametrize('string', [
  215. "bork",
  216. "42 Qs",
  217. ])
  218. def test_convert_to_bytes_error(string):
  219. with pytest.raises(ValueError) as err:
  220. DockerStorage.convert_to_bytes(string)
  221. assert "Cannot convert" in str(err.value)
  222. assert string in str(err.value)
  223. ansible_mounts_enough = [{
  224. 'mount': '/var/lib/docker',
  225. 'size_available': 50 * 10**9,
  226. 'size_total': 50 * 10**9,
  227. }]
  228. ansible_mounts_not_enough = [{
  229. 'mount': '/var/lib/docker',
  230. 'size_available': 0,
  231. 'size_total': 50 * 10**9,
  232. }]
  233. ansible_mounts_missing_fields = [dict(mount='/var/lib/docker')]
  234. ansible_mounts_zero_size = [{
  235. 'mount': '/var/lib/docker',
  236. 'size_available': 0,
  237. 'size_total': 0,
  238. }]
  239. @pytest.mark.parametrize('ansible_mounts, threshold, expect_fail, expect_msg', [
  240. (
  241. ansible_mounts_enough,
  242. None,
  243. False,
  244. [],
  245. ),
  246. (
  247. ansible_mounts_not_enough,
  248. None,
  249. True,
  250. ["usage percentage", "higher than threshold"],
  251. ),
  252. (
  253. ansible_mounts_not_enough,
  254. "bogus percent",
  255. True,
  256. ["is not a percentage"],
  257. ),
  258. (
  259. ansible_mounts_missing_fields,
  260. None,
  261. True,
  262. ["Ansible bug"],
  263. ),
  264. (
  265. ansible_mounts_zero_size,
  266. None,
  267. True,
  268. ["Ansible bug"],
  269. ),
  270. ])
  271. def test_overlay_usage(ansible_mounts, threshold, expect_fail, expect_msg):
  272. check = dummy_check()
  273. task_vars = non_atomic_task_vars()
  274. task_vars["ansible_mounts"] = ansible_mounts
  275. if threshold is not None:
  276. task_vars["max_overlay_usage_percent"] = threshold
  277. docker_info = dict(DockerRootDir="/var/lib/docker", Driver="overlay")
  278. result = check.check_overlay_usage(docker_info, task_vars)
  279. assert expect_fail == bool(result.get("failed"))
  280. for msg in expect_msg:
  281. assert msg in result["msg"]