test_db_stats_fetcher.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. # Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
  2. # This source code is licensed under both the GPLv2 (found in the
  3. # COPYING file in the root directory) and Apache 2.0 License
  4. # (found in the LICENSE.Apache file in the root directory).
  5. from advisor.db_stats_fetcher import LogStatsParser, DatabasePerfContext
  6. from advisor.db_timeseries_parser import NO_ENTITY
  7. from advisor.rule_parser import Condition, TimeSeriesCondition
  8. import os
  9. import time
  10. import unittest
  11. from unittest.mock import MagicMock
  12. class TestLogStatsParser(unittest.TestCase):
  13. def setUp(self):
  14. this_path = os.path.abspath(os.path.dirname(__file__))
  15. stats_file = os.path.join(
  16. this_path, 'input_files/log_stats_parser_keys_ts'
  17. )
  18. # populate the keys_ts dictionary of LogStatsParser
  19. self.stats_dict = {NO_ENTITY: {}}
  20. with open(stats_file, 'r') as fp:
  21. for line in fp:
  22. stat_name = line.split(':')[0].strip()
  23. self.stats_dict[NO_ENTITY][stat_name] = {}
  24. token_list = line.split(':')[1].strip().split(',')
  25. for token in token_list:
  26. timestamp = int(token.split()[0])
  27. value = float(token.split()[1])
  28. self.stats_dict[NO_ENTITY][stat_name][timestamp] = value
  29. self.log_stats_parser = LogStatsParser('dummy_log_file', 20)
  30. self.log_stats_parser.keys_ts = self.stats_dict
  31. def test_check_and_trigger_conditions_bursty(self):
  32. # mock fetch_timeseries() because 'keys_ts' has been pre-populated
  33. self.log_stats_parser.fetch_timeseries = MagicMock()
  34. # condition: bursty
  35. cond1 = Condition('cond-1')
  36. cond1 = TimeSeriesCondition.create(cond1)
  37. cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
  38. cond1.set_parameter('behavior', 'bursty')
  39. cond1.set_parameter('window_sec', 40)
  40. cond1.set_parameter('rate_threshold', 0)
  41. self.log_stats_parser.check_and_trigger_conditions([cond1])
  42. expected_cond_trigger = {
  43. NO_ENTITY: {1530896440: 0.9767546362322214}
  44. }
  45. self.assertDictEqual(expected_cond_trigger, cond1.get_trigger())
  46. # ensure that fetch_timeseries() was called once
  47. self.log_stats_parser.fetch_timeseries.assert_called_once()
  48. def test_check_and_trigger_conditions_eval_agg(self):
  49. # mock fetch_timeseries() because 'keys_ts' has been pre-populated
  50. self.log_stats_parser.fetch_timeseries = MagicMock()
  51. # condition: evaluate_expression
  52. cond1 = Condition('cond-1')
  53. cond1 = TimeSeriesCondition.create(cond1)
  54. cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
  55. cond1.set_parameter('behavior', 'evaluate_expression')
  56. keys = [
  57. 'rocksdb.manifest.file.sync.micros.p99',
  58. 'rocksdb.db.get.micros.p50'
  59. ]
  60. cond1.set_parameter('keys', keys)
  61. cond1.set_parameter('aggregation_op', 'latest')
  62. # condition evaluates to FALSE
  63. cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)>200')
  64. self.log_stats_parser.check_and_trigger_conditions([cond1])
  65. expected_cond_trigger = {NO_ENTITY: [1792.0, 15.9638]}
  66. self.assertIsNone(cond1.get_trigger())
  67. # condition evaluates to TRUE
  68. cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)<200')
  69. self.log_stats_parser.check_and_trigger_conditions([cond1])
  70. expected_cond_trigger = {NO_ENTITY: [1792.0, 15.9638]}
  71. self.assertDictEqual(expected_cond_trigger, cond1.get_trigger())
  72. # ensure that fetch_timeseries() was called
  73. self.log_stats_parser.fetch_timeseries.assert_called()
  74. def test_check_and_trigger_conditions_eval(self):
  75. # mock fetch_timeseries() because 'keys_ts' has been pre-populated
  76. self.log_stats_parser.fetch_timeseries = MagicMock()
  77. # condition: evaluate_expression
  78. cond1 = Condition('cond-1')
  79. cond1 = TimeSeriesCondition.create(cond1)
  80. cond1.set_parameter('keys', 'rocksdb.db.get.micros.p50')
  81. cond1.set_parameter('behavior', 'evaluate_expression')
  82. keys = [
  83. 'rocksdb.manifest.file.sync.micros.p99',
  84. 'rocksdb.db.get.micros.p50'
  85. ]
  86. cond1.set_parameter('keys', keys)
  87. cond1.set_parameter('evaluate', 'keys[0]-(keys[1]*100)>500')
  88. self.log_stats_parser.check_and_trigger_conditions([cond1])
  89. expected_trigger = {NO_ENTITY: {
  90. 1530896414: [9938.0, 16.31508],
  91. 1530896440: [9938.0, 16.346602],
  92. 1530896466: [9938.0, 16.284669],
  93. 1530896492: [9938.0, 16.16005]
  94. }}
  95. self.assertDictEqual(expected_trigger, cond1.get_trigger())
  96. self.log_stats_parser.fetch_timeseries.assert_called_once()
  97. class TestDatabasePerfContext(unittest.TestCase):
  98. def test_unaccumulate_metrics(self):
  99. perf_dict = {
  100. "user_key_comparison_count": 675903942,
  101. "block_cache_hit_count": 830086,
  102. }
  103. timestamp = int(time.time())
  104. perf_ts = {}
  105. for key in perf_dict:
  106. perf_ts[key] = {}
  107. start_val = perf_dict[key]
  108. for ix in range(5):
  109. perf_ts[key][timestamp+(ix*10)] = start_val + (2 * ix * ix)
  110. db_perf_context = DatabasePerfContext(perf_ts, 10, True)
  111. timestamps = [timestamp+(ix*10) for ix in range(1, 5, 1)]
  112. values = [val for val in range(2, 15, 4)]
  113. inner_dict = {timestamps[ix]: values[ix] for ix in range(4)}
  114. expected_keys_ts = {NO_ENTITY: {
  115. 'user_key_comparison_count': inner_dict,
  116. 'block_cache_hit_count': inner_dict
  117. }}
  118. self.assertDictEqual(expected_keys_ts, db_perf_context.keys_ts)