fluentd_config_test.py 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348
  1. import pytest
  2. from openshift_checks.logging.fluentd_config import FluentdConfig, OpenShiftCheckException
  3. def canned_fluentd_pod(containers):
  4. return {
  5. "metadata": {
  6. "labels": {"component": "fluentd", "deploymentconfig": "logging-fluentd"},
  7. "name": "logging-fluentd-1",
  8. },
  9. "spec": {
  10. "host": "node1",
  11. "nodeName": "node1",
  12. "containers": containers,
  13. },
  14. "status": {
  15. "phase": "Running",
  16. "containerStatuses": [{"ready": True}],
  17. "conditions": [{"status": "True", "type": "Ready"}],
  18. }
  19. }
  20. fluentd_pod = {
  21. "metadata": {
  22. "labels": {"component": "fluentd", "deploymentconfig": "logging-fluentd"},
  23. "name": "logging-fluentd-1",
  24. },
  25. "spec": {
  26. "host": "node1",
  27. "nodeName": "node1",
  28. "containers": [
  29. {
  30. "name": "container1",
  31. "env": [
  32. {
  33. "name": "USE_JOURNAL",
  34. "value": "true",
  35. }
  36. ],
  37. }
  38. ],
  39. },
  40. "status": {
  41. "phase": "Running",
  42. "containerStatuses": [{"ready": True}],
  43. "conditions": [{"status": "True", "type": "Ready"}],
  44. }
  45. }
  46. not_running_fluentd_pod = {
  47. "metadata": {
  48. "labels": {"component": "fluentd", "deploymentconfig": "logging-fluentd"},
  49. "name": "logging-fluentd-2",
  50. },
  51. "status": {
  52. "phase": "Unknown",
  53. "containerStatuses": [{"ready": True}, {"ready": False}],
  54. "conditions": [{"status": "True", "type": "Ready"}],
  55. }
  56. }
  57. @pytest.mark.parametrize('name, use_journald, logging_driver, extra_words', [
  58. (
  59. 'test success with use_journald=false, and docker config set to use "json-file"',
  60. False,
  61. "json-file",
  62. [],
  63. ),
  64. ], ids=lambda argvals: argvals[0])
  65. def test_check_logging_config_non_master(name, use_journald, logging_driver, extra_words):
  66. def execute_module(module_name, args):
  67. if module_name == "docker_info":
  68. return {
  69. "info": {
  70. "LoggingDriver": logging_driver,
  71. }
  72. }
  73. return {}
  74. task_vars = dict(
  75. group_names=["nodes", "etcd"],
  76. openshift_logging_fluentd_use_journal=use_journald,
  77. openshift=dict(
  78. common=dict(config_base=""),
  79. ),
  80. )
  81. check = FluentdConfig(execute_module, task_vars)
  82. check.execute_module = execute_module
  83. error = check.check_logging_config()
  84. assert error is None
  85. @pytest.mark.parametrize('name, use_journald, logging_driver, words', [
  86. (
  87. 'test failure with use_journald=false, but docker config set to use "journald"',
  88. False,
  89. "journald",
  90. ['json log files', 'has been set to use "journald"'],
  91. ),
  92. (
  93. 'test failure with use_journald=false, but docker config set to use an "unsupported" driver',
  94. False,
  95. "unsupported",
  96. ["json log files", 'has been set to use "unsupported"'],
  97. ),
  98. (
  99. 'test failure with use_journald=true, but docker config set to use "json-file"',
  100. True,
  101. "json-file",
  102. ['logs from "journald"', 'has been set to use "json-file"'],
  103. ),
  104. ], ids=lambda argvals: argvals[0])
  105. def test_check_logging_config_non_master_failed(name, use_journald, logging_driver, words):
  106. def execute_module(module_name, args):
  107. if module_name == "docker_info":
  108. return {
  109. "info": {
  110. "LoggingDriver": logging_driver,
  111. }
  112. }
  113. return {}
  114. task_vars = dict(
  115. group_names=["nodes", "etcd"],
  116. openshift_logging_fluentd_use_journal=use_journald,
  117. openshift=dict(
  118. common=dict(config_base=""),
  119. ),
  120. )
  121. check = FluentdConfig(execute_module, task_vars)
  122. check.execute_module = execute_module
  123. error = check.check_logging_config()
  124. assert error is not None
  125. for word in words:
  126. assert word in error
  127. @pytest.mark.parametrize('name, pods, logging_driver, extra_words', [
  128. # use_journald returns false (not using journald), but check succeeds
  129. # since docker is set to use json-file
  130. (
  131. 'test success with use_journald=false, and docker config set to use default driver "json-file"',
  132. [canned_fluentd_pod(
  133. [
  134. {
  135. "name": "container1",
  136. "env": [{
  137. "name": "USE_JOURNAL",
  138. "value": "false",
  139. }],
  140. },
  141. ]
  142. )],
  143. "json-file",
  144. [],
  145. ),
  146. (
  147. 'test success with USE_JOURNAL env var missing and docker config set to use default driver "json-file"',
  148. [canned_fluentd_pod(
  149. [
  150. {
  151. "name": "container1",
  152. "env": [{
  153. "name": "RANDOM",
  154. "value": "value",
  155. }],
  156. },
  157. ]
  158. )],
  159. "json-file",
  160. [],
  161. ),
  162. ], ids=lambda argvals: argvals[0])
  163. def test_check_logging_config_master(name, pods, logging_driver, extra_words):
  164. def execute_module(module_name, args):
  165. if module_name == "docker_info":
  166. return {
  167. "info": {
  168. "LoggingDriver": logging_driver,
  169. }
  170. }
  171. return {}
  172. task_vars = dict(
  173. group_names=["masters"],
  174. openshift=dict(
  175. common=dict(config_base=""),
  176. ),
  177. )
  178. check = FluentdConfig(execute_module, task_vars)
  179. check.execute_module = execute_module
  180. check.get_pods_for_component = lambda _: pods
  181. error = check.check_logging_config()
  182. assert error is None
  183. @pytest.mark.parametrize('name, pods, logging_driver, words', [
  184. (
  185. 'test failure with use_journald=false, but docker config set to use "journald"',
  186. [canned_fluentd_pod(
  187. [
  188. {
  189. "name": "container1",
  190. "env": [{
  191. "name": "USE_JOURNAL",
  192. "value": "false",
  193. }],
  194. },
  195. ]
  196. )],
  197. "journald",
  198. ['json log files', 'has been set to use "journald"'],
  199. ),
  200. (
  201. 'test failure with use_journald=true, but docker config set to use "json-file"',
  202. [fluentd_pod],
  203. "json-file",
  204. ['logs from "journald"', 'has been set to use "json-file"'],
  205. ),
  206. (
  207. 'test failure with use_journald=false, but docker set to use an "unsupported" driver',
  208. [canned_fluentd_pod(
  209. [
  210. {
  211. "name": "container1",
  212. "env": [{
  213. "name": "USE_JOURNAL",
  214. "value": "false",
  215. }],
  216. },
  217. ]
  218. )],
  219. "unsupported",
  220. ["json log files", 'has been set to use "unsupported"'],
  221. ),
  222. (
  223. 'test failure with USE_JOURNAL env var missing and docker config set to use "journald"',
  224. [canned_fluentd_pod(
  225. [
  226. {
  227. "name": "container1",
  228. "env": [{
  229. "name": "RANDOM",
  230. "value": "value",
  231. }],
  232. },
  233. ]
  234. )],
  235. "journald",
  236. ["configuration is set to", "json log files"],
  237. ),
  238. ], ids=lambda argvals: argvals[0])
  239. def test_check_logging_config_master_failed(name, pods, logging_driver, words):
  240. def execute_module(module_name, args):
  241. if module_name == "docker_info":
  242. return {
  243. "info": {
  244. "LoggingDriver": logging_driver,
  245. }
  246. }
  247. return {}
  248. task_vars = dict(
  249. group_names=["masters"],
  250. openshift=dict(
  251. common=dict(config_base=""),
  252. ),
  253. )
  254. check = FluentdConfig(execute_module, task_vars)
  255. check.execute_module = execute_module
  256. check.get_pods_for_component = lambda _: pods
  257. error = check.check_logging_config()
  258. assert error is not None
  259. for word in words:
  260. assert word in error
  261. @pytest.mark.parametrize('name, pods, response, logging_driver, extra_words', [
  262. (
  263. 'test OpenShiftCheckException with no running containers',
  264. [canned_fluentd_pod([])],
  265. {
  266. "failed": True,
  267. "result": "unexpected",
  268. },
  269. "json-file",
  270. ['no running containers'],
  271. ),
  272. (
  273. 'test OpenShiftCheckException one container and no env vars set',
  274. [canned_fluentd_pod(
  275. [
  276. {
  277. "name": "container1",
  278. "env": [],
  279. },
  280. ]
  281. )],
  282. {
  283. "failed": True,
  284. "result": "unexpected",
  285. },
  286. "json-file",
  287. ['no environment variables'],
  288. ),
  289. ], ids=lambda argvals: argvals[0])
  290. def test_check_logging_config_master_fails_on_unscheduled_deployment(name, pods, response, logging_driver, extra_words):
  291. def execute_module(module_name, args):
  292. if module_name == "docker_info":
  293. return {
  294. "info": {
  295. "LoggingDriver": logging_driver,
  296. }
  297. }
  298. return {}
  299. task_vars = dict(
  300. group_names=["masters"],
  301. openshift=dict(
  302. common=dict(config_base=""),
  303. ),
  304. )
  305. check = FluentdConfig(execute_module, task_vars)
  306. check.get_pods_for_component = lambda _: pods
  307. with pytest.raises(OpenShiftCheckException) as error:
  308. check.check_logging_config()
  309. assert error is not None
  310. for word in extra_words:
  311. assert word in str(error)