17dafa
From a4b4a11f904d7f70b53c7959e489d7aab72a9fa4 Mon Sep 17 00:00:00 2001
17dafa
From: Eduardo Otubo <otubo@redhat.com>
17dafa
Date: Wed, 15 May 2019 12:15:27 +0200
17dafa
Subject: [PATCH 3/5] Azure: Changes to the Hyper-V KVP Reporter
17dafa
MIME-Version: 1.0
17dafa
Content-Type: text/plain; charset=UTF-8
17dafa
Content-Transfer-Encoding: 8bit
17dafa
17dafa
RH-Author: Eduardo Otubo <otubo@redhat.com>
17dafa
Message-id: <20190515121529.11191-4-otubo@redhat.com>
17dafa
Patchwork-id: 87885
17dafa
O-Subject: [rhel-7 cloud-init PATCHv2 3/5] Azure: Changes to the Hyper-V KVP Reporter
17dafa
Bugzilla: 1687565
17dafa
RH-Acked-by: Vitaly Kuznetsov <vkuznets@redhat.com>
17dafa
RH-Acked-by: Mohammed Gamal <mgamal@redhat.com>
17dafa
17dafa
From: Anh Vo <anhvo@microsoft.com>
17dafa
17dafa
BZ: 1687565
17dafa
BRANCH: rhel7/master-18.5
17dafa
UPSTREAM: 86674f01
17dafa
BREW: 21696239
17dafa
17dafa
commit 86674f013dfcea3c075ab41373ffb475881066f6
17dafa
Author: Anh Vo <anhvo@microsoft.com>
17dafa
Date:   Mon Apr 29 20:22:16 2019 +0000
17dafa
17dafa
    Azure: Changes to the Hyper-V KVP Reporter
17dafa
17dafa
     + Truncate KVP Pool file to prevent stale entries from
17dafa
       being processed by the Hyper-V KVP reporter.
17dafa
     + Drop filtering of KVPs as it is no longer needed.
17dafa
     + Batch appending of existing KVP entries.
17dafa
17dafa
Signed-off-by: Eduardo Otubo <otubo@redhat.com>
17dafa
Signed-off-by: Miroslav Rezanina <mrezanin@redhat.com>
17dafa
---
17dafa
 cloudinit/reporting/handlers.py          | 117 +++++++++++++++----------------
17dafa
 tests/unittests/test_reporting_hyperv.py | 104 +++++++++++++--------------
17dafa
 2 files changed, 106 insertions(+), 115 deletions(-)
17dafa
 mode change 100644 => 100755 cloudinit/reporting/handlers.py
17dafa
 mode change 100644 => 100755 tests/unittests/test_reporting_hyperv.py
17dafa
17dafa
diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py
17dafa
old mode 100644
17dafa
new mode 100755
17dafa
index 6d23558..10165ae
17dafa
--- a/cloudinit/reporting/handlers.py
17dafa
+++ b/cloudinit/reporting/handlers.py
17dafa
@@ -5,7 +5,6 @@ import fcntl
17dafa
 import json
17dafa
 import six
17dafa
 import os
17dafa
-import re
17dafa
 import struct
17dafa
 import threading
17dafa
 import time
17dafa
@@ -14,6 +13,7 @@ from cloudinit import log as logging
17dafa
 from cloudinit.registry import DictRegistry
17dafa
 from cloudinit import (url_helper, util)
17dafa
 from datetime import datetime
17dafa
+from six.moves.queue import Empty as QueueEmptyError
17dafa
 
17dafa
 if six.PY2:
17dafa
     from multiprocessing.queues import JoinableQueue as JQueue
17dafa
@@ -129,24 +129,50 @@ class HyperVKvpReportingHandler(ReportingHandler):
17dafa
     DESC_IDX_KEY = 'msg_i'
17dafa
     JSON_SEPARATORS = (',', ':')
17dafa
     KVP_POOL_FILE_GUEST = '/var/lib/hyperv/.kvp_pool_1'
17dafa
+    _already_truncated_pool_file = False
17dafa
 
17dafa
     def __init__(self,
17dafa
                  kvp_file_path=KVP_POOL_FILE_GUEST,
17dafa
                  event_types=None):
17dafa
         super(HyperVKvpReportingHandler, self).__init__()
17dafa
         self._kvp_file_path = kvp_file_path
17dafa
+        HyperVKvpReportingHandler._truncate_guest_pool_file(
17dafa
+            self._kvp_file_path)
17dafa
+
17dafa
         self._event_types = event_types
17dafa
         self.q = JQueue()
17dafa
-        self.kvp_file = None
17dafa
         self.incarnation_no = self._get_incarnation_no()
17dafa
         self.event_key_prefix = u"{0}|{1}".format(self.EVENT_PREFIX,
17dafa
                                                   self.incarnation_no)
17dafa
-        self._current_offset = 0
17dafa
         self.publish_thread = threading.Thread(
17dafa
                 target=self._publish_event_routine)
17dafa
         self.publish_thread.daemon = True
17dafa
         self.publish_thread.start()
17dafa
 
17dafa
+    @classmethod
17dafa
+    def _truncate_guest_pool_file(cls, kvp_file):
17dafa
+        """
17dafa
+        Truncate the pool file if it has not been truncated since boot.
17dafa
+        This should be done exactly once for the file indicated by
17dafa
+        KVP_POOL_FILE_GUEST constant above. This method takes a filename
17dafa
+        so that we can use an arbitrary file during unit testing.
17dafa
+        Since KVP is a best-effort telemetry channel we only attempt to
17dafa
+        truncate the file once and only if the file has not been modified
17dafa
+        since boot. Additional truncation can lead to loss of existing
17dafa
+        KVPs.
17dafa
+        """
17dafa
+        if cls._already_truncated_pool_file:
17dafa
+            return
17dafa
+        boot_time = time.time() - float(util.uptime())
17dafa
+        try:
17dafa
+            if os.path.getmtime(kvp_file) < boot_time:
17dafa
+                with open(kvp_file, "w"):
17dafa
+                    pass
17dafa
+        except (OSError, IOError) as e:
17dafa
+            LOG.warning("failed to truncate kvp pool file, %s", e)
17dafa
+        finally:
17dafa
+            cls._already_truncated_pool_file = True
17dafa
+
17dafa
     def _get_incarnation_no(self):
17dafa
         """
17dafa
         use the time passed as the incarnation number.
17dafa
@@ -162,20 +188,15 @@ class HyperVKvpReportingHandler(ReportingHandler):
17dafa
 
17dafa
     def _iterate_kvps(self, offset):
17dafa
         """iterate the kvp file from the current offset."""
17dafa
-        try:
17dafa
-            with open(self._kvp_file_path, 'rb+') as f:
17dafa
-                self.kvp_file = f
17dafa
-                fcntl.flock(f, fcntl.LOCK_EX)
17dafa
-                f.seek(offset)
17dafa
+        with open(self._kvp_file_path, 'rb') as f:
17dafa
+            fcntl.flock(f, fcntl.LOCK_EX)
17dafa
+            f.seek(offset)
17dafa
+            record_data = f.read(self.HV_KVP_RECORD_SIZE)
17dafa
+            while len(record_data) == self.HV_KVP_RECORD_SIZE:
17dafa
+                kvp_item = self._decode_kvp_item(record_data)
17dafa
+                yield kvp_item
17dafa
                 record_data = f.read(self.HV_KVP_RECORD_SIZE)
17dafa
-                while len(record_data) == self.HV_KVP_RECORD_SIZE:
17dafa
-                    self._current_offset += self.HV_KVP_RECORD_SIZE
17dafa
-                    kvp_item = self._decode_kvp_item(record_data)
17dafa
-                    yield kvp_item
17dafa
-                    record_data = f.read(self.HV_KVP_RECORD_SIZE)
17dafa
-                fcntl.flock(f, fcntl.LOCK_UN)
17dafa
-        finally:
17dafa
-            self.kvp_file = None
17dafa
+            fcntl.flock(f, fcntl.LOCK_UN)
17dafa
 
17dafa
     def _event_key(self, event):
17dafa
         """
17dafa
@@ -207,23 +228,13 @@ class HyperVKvpReportingHandler(ReportingHandler):
17dafa
 
17dafa
         return {'key': k, 'value': v}
17dafa
 
17dafa
-    def _update_kvp_item(self, record_data):
17dafa
-        if self.kvp_file is None:
17dafa
-            raise ReportException(
17dafa
-                "kvp file '{0}' not opened."
17dafa
-                .format(self._kvp_file_path))
17dafa
-        self.kvp_file.seek(-self.HV_KVP_RECORD_SIZE, 1)
17dafa
-        self.kvp_file.write(record_data)
17dafa
-
17dafa
     def _append_kvp_item(self, record_data):
17dafa
-        with open(self._kvp_file_path, 'rb+') as f:
17dafa
+        with open(self._kvp_file_path, 'ab') as f:
17dafa
             fcntl.flock(f, fcntl.LOCK_EX)
17dafa
-            # seek to end of the file
17dafa
-            f.seek(0, 2)
17dafa
-            f.write(record_data)
17dafa
+            for data in record_data:
17dafa
+                f.write(data)
17dafa
             f.flush()
17dafa
             fcntl.flock(f, fcntl.LOCK_UN)
17dafa
-            self._current_offset = f.tell()
17dafa
 
17dafa
     def _break_down(self, key, meta_data, description):
17dafa
         del meta_data[self.MSG_KEY]
17dafa
@@ -279,40 +290,26 @@ class HyperVKvpReportingHandler(ReportingHandler):
17dafa
 
17dafa
     def _publish_event_routine(self):
17dafa
         while True:
17dafa
+            items_from_queue = 0
17dafa
             try:
17dafa
                 event = self.q.get(block=True)
17dafa
-                need_append = True
17dafa
+                items_from_queue += 1
17dafa
+                encoded_data = []
17dafa
+                while event is not None:
17dafa
+                    encoded_data += self._encode_event(event)
17dafa
+                    try:
17dafa
+                        # get all the rest of the events in the queue
17dafa
+                        event = self.q.get(block=False)
17dafa
+                        items_from_queue += 1
17dafa
+                    except QueueEmptyError:
17dafa
+                        event = None
17dafa
                 try:
17dafa
-                    if not os.path.exists(self._kvp_file_path):
17dafa
-                        LOG.warning(
17dafa
-                            "skip writing events %s to %s. file not present.",
17dafa
-                            event.as_string(),
17dafa
-                            self._kvp_file_path)
17dafa
-                    encoded_event = self._encode_event(event)
17dafa
-                    # for each encoded_event
17dafa
-                    for encoded_data in (encoded_event):
17dafa
-                        for kvp in self._iterate_kvps(self._current_offset):
17dafa
-                            match = (
17dafa
-                                re.match(
17dafa
-                                    r"^{0}\|(\d+)\|.+"
17dafa
-                                    .format(self.EVENT_PREFIX),
17dafa
-                                    kvp['key']
17dafa
-                                ))
17dafa
-                            if match:
17dafa
-                                match_groups = match.groups(0)
17dafa
-                                if int(match_groups[0]) < self.incarnation_no:
17dafa
-                                    need_append = False
17dafa
-                                    self._update_kvp_item(encoded_data)
17dafa
-                                    continue
17dafa
-                        if need_append:
17dafa
-                            self._append_kvp_item(encoded_data)
17dafa
-                except IOError as e:
17dafa
-                    LOG.warning(
17dafa
-                        "failed posting event to kvp: %s e:%s",
17dafa
-                        event.as_string(), e)
17dafa
+                    self._append_kvp_item(encoded_data)
17dafa
+                except (OSError, IOError) as e:
17dafa
+                    LOG.warning("failed posting events to kvp, %s", e)
17dafa
                 finally:
17dafa
-                    self.q.task_done()
17dafa
-
17dafa
+                    for _ in range(items_from_queue):
17dafa
+                        self.q.task_done()
17dafa
             # when main process exits, q.get() will through EOFError
17dafa
             # indicating we should exit this thread.
17dafa
             except EOFError:
17dafa
@@ -322,7 +319,7 @@ class HyperVKvpReportingHandler(ReportingHandler):
17dafa
     # if the kvp pool already contains a chunk of data,
17dafa
     # so defer it to another thread.
17dafa
     def publish_event(self, event):
17dafa
-        if (not self._event_types or event.event_type in self._event_types):
17dafa
+        if not self._event_types or event.event_type in self._event_types:
17dafa
             self.q.put(event)
17dafa
 
17dafa
     def flush(self):
17dafa
diff --git a/tests/unittests/test_reporting_hyperv.py b/tests/unittests/test_reporting_hyperv.py
17dafa
old mode 100644
17dafa
new mode 100755
17dafa
index 2e64c6c..d01ed5b
17dafa
--- a/tests/unittests/test_reporting_hyperv.py
17dafa
+++ b/tests/unittests/test_reporting_hyperv.py
17dafa
@@ -1,10 +1,12 @@
17dafa
 # This file is part of cloud-init. See LICENSE file for license information.
17dafa
 
17dafa
 from cloudinit.reporting import events
17dafa
-from cloudinit.reporting import handlers
17dafa
+from cloudinit.reporting.handlers import HyperVKvpReportingHandler
17dafa
 
17dafa
 import json
17dafa
 import os
17dafa
+import struct
17dafa
+import time
17dafa
 
17dafa
 from cloudinit import util
17dafa
 from cloudinit.tests.helpers import CiTestCase
17dafa
@@ -13,7 +15,7 @@ from cloudinit.tests.helpers import CiTestCase
17dafa
 class TestKvpEncoding(CiTestCase):
17dafa
     def test_encode_decode(self):
17dafa
         kvp = {'key': 'key1', 'value': 'value1'}
17dafa
-        kvp_reporting = handlers.HyperVKvpReportingHandler()
17dafa
+        kvp_reporting = HyperVKvpReportingHandler()
17dafa
         data = kvp_reporting._encode_kvp_item(kvp['key'], kvp['value'])
17dafa
         self.assertEqual(len(data), kvp_reporting.HV_KVP_RECORD_SIZE)
17dafa
         decoded_kvp = kvp_reporting._decode_kvp_item(data)
17dafa
@@ -26,57 +28,9 @@ class TextKvpReporter(CiTestCase):
17dafa
         self.tmp_file_path = self.tmp_path('kvp_pool_file')
17dafa
         util.ensure_file(self.tmp_file_path)
17dafa
 
17dafa
-    def test_event_type_can_be_filtered(self):
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
-            kvp_file_path=self.tmp_file_path,
17dafa
-            event_types=['foo', 'bar'])
17dafa
-
17dafa
-        reporter.publish_event(
17dafa
-            events.ReportingEvent('foo', 'name', 'description'))
17dafa
-        reporter.publish_event(
17dafa
-            events.ReportingEvent('some_other', 'name', 'description3'))
17dafa
-        reporter.q.join()
17dafa
-
17dafa
-        kvps = list(reporter._iterate_kvps(0))
17dafa
-        self.assertEqual(1, len(kvps))
17dafa
-
17dafa
-        reporter.publish_event(
17dafa
-            events.ReportingEvent('bar', 'name', 'description2'))
17dafa
-        reporter.q.join()
17dafa
-        kvps = list(reporter._iterate_kvps(0))
17dafa
-        self.assertEqual(2, len(kvps))
17dafa
-
17dafa
-        self.assertIn('foo', kvps[0]['key'])
17dafa
-        self.assertIn('bar', kvps[1]['key'])
17dafa
-        self.assertNotIn('some_other', kvps[0]['key'])
17dafa
-        self.assertNotIn('some_other', kvps[1]['key'])
17dafa
-
17dafa
-    def test_events_are_over_written(self):
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
-            kvp_file_path=self.tmp_file_path)
17dafa
-
17dafa
-        self.assertEqual(0, len(list(reporter._iterate_kvps(0))))
17dafa
-
17dafa
-        reporter.publish_event(
17dafa
-            events.ReportingEvent('foo', 'name1', 'description'))
17dafa
-        reporter.publish_event(
17dafa
-            events.ReportingEvent('foo', 'name2', 'description'))
17dafa
-        reporter.q.join()
17dafa
-        self.assertEqual(2, len(list(reporter._iterate_kvps(0))))
17dafa
-
17dafa
-        reporter2 = handlers.HyperVKvpReportingHandler(
17dafa
-            kvp_file_path=self.tmp_file_path)
17dafa
-        reporter2.incarnation_no = reporter.incarnation_no + 1
17dafa
-        reporter2.publish_event(
17dafa
-            events.ReportingEvent('foo', 'name3', 'description'))
17dafa
-        reporter2.q.join()
17dafa
-
17dafa
-        self.assertEqual(2, len(list(reporter2._iterate_kvps(0))))
17dafa
-
17dafa
     def test_events_with_higher_incarnation_not_over_written(self):
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
+        reporter = HyperVKvpReportingHandler(
17dafa
             kvp_file_path=self.tmp_file_path)
17dafa
-
17dafa
         self.assertEqual(0, len(list(reporter._iterate_kvps(0))))
17dafa
 
17dafa
         reporter.publish_event(
17dafa
@@ -86,7 +40,7 @@ class TextKvpReporter(CiTestCase):
17dafa
         reporter.q.join()
17dafa
         self.assertEqual(2, len(list(reporter._iterate_kvps(0))))
17dafa
 
17dafa
-        reporter3 = handlers.HyperVKvpReportingHandler(
17dafa
+        reporter3 = HyperVKvpReportingHandler(
17dafa
             kvp_file_path=self.tmp_file_path)
17dafa
         reporter3.incarnation_no = reporter.incarnation_no - 1
17dafa
         reporter3.publish_event(
17dafa
@@ -95,7 +49,7 @@ class TextKvpReporter(CiTestCase):
17dafa
         self.assertEqual(3, len(list(reporter3._iterate_kvps(0))))
17dafa
 
17dafa
     def test_finish_event_result_is_logged(self):
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
+        reporter = HyperVKvpReportingHandler(
17dafa
             kvp_file_path=self.tmp_file_path)
17dafa
         reporter.publish_event(
17dafa
             events.FinishReportingEvent('name2', 'description1',
17dafa
@@ -105,7 +59,7 @@ class TextKvpReporter(CiTestCase):
17dafa
 
17dafa
     def test_file_operation_issue(self):
17dafa
         os.remove(self.tmp_file_path)
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
+        reporter = HyperVKvpReportingHandler(
17dafa
             kvp_file_path=self.tmp_file_path)
17dafa
         reporter.publish_event(
17dafa
             events.FinishReportingEvent('name2', 'description1',
17dafa
@@ -113,7 +67,7 @@ class TextKvpReporter(CiTestCase):
17dafa
         reporter.q.join()
17dafa
 
17dafa
     def test_event_very_long(self):
17dafa
-        reporter = handlers.HyperVKvpReportingHandler(
17dafa
+        reporter = HyperVKvpReportingHandler(
17dafa
             kvp_file_path=self.tmp_file_path)
17dafa
         description = 'ab' * reporter.HV_KVP_EXCHANGE_MAX_VALUE_SIZE
17dafa
         long_event = events.FinishReportingEvent(
17dafa
@@ -132,3 +86,43 @@ class TextKvpReporter(CiTestCase):
17dafa
             self.assertEqual(msg_slice['msg_i'], i)
17dafa
             full_description += msg_slice['msg']
17dafa
         self.assertEqual(description, full_description)
17dafa
+
17dafa
+    def test_not_truncate_kvp_file_modified_after_boot(self):
17dafa
+        with open(self.tmp_file_path, "wb+") as f:
17dafa
+            kvp = {'key': 'key1', 'value': 'value1'}
17dafa
+            data = (struct.pack("%ds%ds" % (
17dafa
+                    HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_KEY_SIZE,
17dafa
+                    HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_VALUE_SIZE),
17dafa
+                    kvp['key'].encode('utf-8'), kvp['value'].encode('utf-8')))
17dafa
+            f.write(data)
17dafa
+        cur_time = time.time()
17dafa
+        os.utime(self.tmp_file_path, (cur_time, cur_time))
17dafa
+
17dafa
+        # reset this because the unit test framework
17dafa
+        # has already polluted the class variable
17dafa
+        HyperVKvpReportingHandler._already_truncated_pool_file = False
17dafa
+
17dafa
+        reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
17dafa
+        kvps = list(reporter._iterate_kvps(0))
17dafa
+        self.assertEqual(1, len(kvps))
17dafa
+
17dafa
+    def test_truncate_stale_kvp_file(self):
17dafa
+        with open(self.tmp_file_path, "wb+") as f:
17dafa
+            kvp = {'key': 'key1', 'value': 'value1'}
17dafa
+            data = (struct.pack("%ds%ds" % (
17dafa
+                HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_KEY_SIZE,
17dafa
+                HyperVKvpReportingHandler.HV_KVP_EXCHANGE_MAX_VALUE_SIZE),
17dafa
+                kvp['key'].encode('utf-8'), kvp['value'].encode('utf-8')))
17dafa
+            f.write(data)
17dafa
+
17dafa
+        # set the time ways back to make it look like
17dafa
+        # we had an old kvp file
17dafa
+        os.utime(self.tmp_file_path, (1000000, 1000000))
17dafa
+
17dafa
+        # reset this because the unit test framework
17dafa
+        # has already polluted the class variable
17dafa
+        HyperVKvpReportingHandler._already_truncated_pool_file = False
17dafa
+
17dafa
+        reporter = HyperVKvpReportingHandler(kvp_file_path=self.tmp_file_path)
17dafa
+        kvps = list(reporter._iterate_kvps(0))
17dafa
+        self.assertEqual(0, len(kvps))
17dafa
-- 
17dafa
1.8.3.1
17dafa