sailesh1993 / rpms / cloud-init

Forked from rpms/cloud-init a year ago
Clone
17dafa
From 7765776d538e61639d1ea920919211f780b75d13 Mon Sep 17 00:00:00 2001
17dafa
From: Eduardo Otubo <otubo@redhat.com>
17dafa
Date: Wed, 15 May 2019 12:15:26 +0200
17dafa
Subject: [PATCH 2/5] DatasourceAzure: add additional logging for azure
17dafa
 datasource
17dafa
17dafa
RH-Author: Eduardo Otubo <otubo@redhat.com>
17dafa
Message-id: <20190515121529.11191-3-otubo@redhat.com>
17dafa
Patchwork-id: 87882
17dafa
O-Subject: [rhel-7 cloud-init PATCHv2 2/5] DatasourceAzure: add additional logging for azure datasource
17dafa
Bugzilla: 1687565
17dafa
RH-Acked-by: Vitaly Kuznetsov <vkuznets@redhat.com>
17dafa
RH-Acked-by: Mohammed Gamal <mgamal@redhat.com>
17dafa
17dafa
From: Anh Vo <anhvo@microsoft.com>
17dafa
17dafa
BZ: 1687565
17dafa
BRANCH: rhel7/master-18.5
17dafa
UPSTREAM: 0d8c8839
17dafa
BREW: 21696239
17dafa
17dafa
commit 0d8c88393b51db6454491a379dcc2e691551217a
17dafa
Author: Anh Vo <anhvo@microsoft.com>
17dafa
Date:   Wed Apr 3 18:23:18 2019 +0000
17dafa
17dafa
    DatasourceAzure: add additional logging for azure datasource
17dafa
17dafa
    Create an Azure logging decorator and use additional ReportEventStack
17dafa
    context managers to provide additional logging details.
17dafa
17dafa
Signed-off-by: Eduardo Otubo <otubo@redhat.com>
17dafa
Signed-off-by: Miroslav Rezanina <mrezanin@redhat.com>
17dafa
---
17dafa
 cloudinit/sources/DataSourceAzure.py | 231 ++++++++++++++++++++++-------------
17dafa
 cloudinit/sources/helpers/azure.py   |  31 +++++
17dafa
 2 files changed, 179 insertions(+), 83 deletions(-)
17dafa
 mode change 100644 => 100755 cloudinit/sources/DataSourceAzure.py
17dafa
 mode change 100644 => 100755 cloudinit/sources/helpers/azure.py
17dafa
17dafa
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
17dafa
old mode 100644
17dafa
new mode 100755
17dafa
index a768b2c..c827816
17dafa
--- a/cloudinit/sources/DataSourceAzure.py
17dafa
+++ b/cloudinit/sources/DataSourceAzure.py
17dafa
@@ -21,10 +21,14 @@ from cloudinit import net
17dafa
 from cloudinit.event import EventType
17dafa
 from cloudinit.net.dhcp import EphemeralDHCPv4
17dafa
 from cloudinit import sources
17dafa
-from cloudinit.sources.helpers.azure import get_metadata_from_fabric
17dafa
 from cloudinit.sources.helpers import netlink
17dafa
 from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc
17dafa
 from cloudinit import util
17dafa
+from cloudinit.reporting import events
17dafa
+
17dafa
+from cloudinit.sources.helpers.azure import (azure_ds_reporter,
17dafa
+                                             azure_ds_telemetry_reporter,
17dafa
+                                             get_metadata_from_fabric)
17dafa
 
17dafa
 LOG = logging.getLogger(__name__)
17dafa
 
17dafa
@@ -244,6 +248,7 @@ def set_hostname(hostname, hostname_command='hostname'):
17dafa
     util.subp(['hostnamectl', 'set-hostname', str(hostname)])
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 @contextlib.contextmanager
17dafa
 def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
17dafa
     """
17dafa
@@ -290,6 +295,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
         root = sources.DataSource.__str__(self)
17dafa
         return "%s [seed=%s]" % (root, self.seed)
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def bounce_network_with_azure_hostname(self):
17dafa
         # When using cloud-init to provision, we have to set the hostname from
17dafa
         # the metadata and "bounce" the network to force DDNS to update via
17dafa
@@ -315,6 +321,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
                     util.logexc(LOG, "handling set_hostname failed")
17dafa
         return False
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def get_metadata_from_agent(self):
17dafa
         temp_hostname = self.metadata.get('local-hostname')
17dafa
         agent_cmd = self.ds_cfg['agent_command']
17dafa
@@ -344,15 +351,18 @@ class DataSourceAzure(sources.DataSource):
17dafa
                 LOG.debug("ssh authentication: "
17dafa
                           "using fingerprint from fabirc")
17dafa
 
17dafa
-        # wait very long for public SSH keys to arrive
17dafa
-        # https://bugs.launchpad.net/cloud-init/+bug/1717611
17dafa
-        missing = util.log_time(logfunc=LOG.debug,
17dafa
-                                msg="waiting for SSH public key files",
17dafa
-                                func=util.wait_for_files,
17dafa
-                                args=(fp_files, 900))
17dafa
-
17dafa
-        if len(missing):
17dafa
-            LOG.warning("Did not find files, but going on: %s", missing)
17dafa
+        with events.ReportEventStack(
17dafa
+                name="waiting-for-ssh-public-key",
17dafa
+                description="wait for agents to retrieve ssh keys",
17dafa
+                parent=azure_ds_reporter):
17dafa
+            # wait very long for public SSH keys to arrive
17dafa
+            # https://bugs.launchpad.net/cloud-init/+bug/1717611
17dafa
+            missing = util.log_time(logfunc=LOG.debug,
17dafa
+                                    msg="waiting for SSH public key files",
17dafa
+                                    func=util.wait_for_files,
17dafa
+                                    args=(fp_files, 900))
17dafa
+            if len(missing):
17dafa
+                LOG.warning("Did not find files, but going on: %s", missing)
17dafa
 
17dafa
         metadata = {}
17dafa
         metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
17dafa
@@ -366,6 +376,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
             subplatform_type = 'seed-dir'
17dafa
         return '%s (%s)' % (subplatform_type, self.seed)
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def crawl_metadata(self):
17dafa
         """Walk all instance metadata sources returning a dict on success.
17dafa
 
17dafa
@@ -467,6 +478,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
         super(DataSourceAzure, self).clear_cached_attrs(attr_defaults)
17dafa
         self._metadata_imds = sources.UNSET
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _get_data(self):
17dafa
         """Crawl and process datasource metadata caching metadata as attrs.
17dafa
 
17dafa
@@ -513,6 +525,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
         # quickly (local check only) if self.instance_id is still valid
17dafa
         return sources.instance_id_matches_system_uuid(self.get_instance_id())
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def setup(self, is_new_instance):
17dafa
         if self._negotiated is False:
17dafa
             LOG.debug("negotiating for %s (new_instance=%s)",
17dafa
@@ -580,6 +593,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
                 if nl_sock:
17dafa
                     nl_sock.close()
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _report_ready(self, lease):
17dafa
         """Tells the fabric provisioning has completed """
17dafa
         try:
17dafa
@@ -617,9 +631,14 @@ class DataSourceAzure(sources.DataSource):
17dafa
     def _reprovision(self):
17dafa
         """Initiate the reprovisioning workflow."""
17dafa
         contents = self._poll_imds()
17dafa
-        md, ud, cfg = read_azure_ovf(contents)
17dafa
-        return (md, ud, cfg, {'ovf-env.xml': contents})
17dafa
-
17dafa
+        with events.ReportEventStack(
17dafa
+                name="reprovisioning-read-azure-ovf",
17dafa
+                description="read azure ovf during reprovisioning",
17dafa
+                parent=azure_ds_reporter):
17dafa
+            md, ud, cfg = read_azure_ovf(contents)
17dafa
+            return (md, ud, cfg, {'ovf-env.xml': contents})
17dafa
+
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _negotiate(self):
17dafa
         """Negotiate with fabric and return data from it.
17dafa
 
17dafa
@@ -652,6 +671,7 @@ class DataSourceAzure(sources.DataSource):
17dafa
         util.del_file(REPROVISION_MARKER_FILE)
17dafa
         return fabric_data
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def activate(self, cfg, is_new_instance):
17dafa
         address_ephemeral_resize(is_new_instance=is_new_instance,
17dafa
                                  preserve_ntfs=self.ds_cfg.get(
17dafa
@@ -690,12 +710,14 @@ def _partitions_on_device(devpath, maxnum=16):
17dafa
     return []
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def _has_ntfs_filesystem(devpath):
17dafa
     ntfs_devices = util.find_devs_with("TYPE=ntfs", no_cache=True)
17dafa
     LOG.debug('ntfs_devices found = %s', ntfs_devices)
17dafa
     return os.path.realpath(devpath) in ntfs_devices
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def can_dev_be_reformatted(devpath, preserve_ntfs):
17dafa
     """Determine if the ephemeral drive at devpath should be reformatted.
17dafa
 
17dafa
@@ -744,43 +766,59 @@ def can_dev_be_reformatted(devpath, preserve_ntfs):
17dafa
                (cand_part, cand_path, devpath))
17dafa
         return False, msg
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def count_files(mp):
17dafa
         ignored = set(['dataloss_warning_readme.txt'])
17dafa
         return len([f for f in os.listdir(mp) if f.lower() not in ignored])
17dafa
 
17dafa
     bmsg = ('partition %s (%s) on device %s was ntfs formatted' %
17dafa
             (cand_part, cand_path, devpath))
17dafa
-    try:
17dafa
-        file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
17dafa
-                                   update_env_for_mount={'LANG': 'C'})
17dafa
-    except util.MountFailedError as e:
17dafa
-        if "unknown filesystem type 'ntfs'" in str(e):
17dafa
-            return True, (bmsg + ' but this system cannot mount NTFS,'
17dafa
-                          ' assuming there are no important files.'
17dafa
-                          ' Formatting allowed.')
17dafa
-        return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
17dafa
-
17dafa
-    if file_count != 0:
17dafa
-        LOG.warning("it looks like you're using NTFS on the ephemeral disk, "
17dafa
-                    'to ensure that filesystem does not get wiped, set '
17dafa
-                    '%s.%s in config', '.'.join(DS_CFG_PATH),
17dafa
-                    DS_CFG_KEY_PRESERVE_NTFS)
17dafa
-        return False, bmsg + ' but had %d files on it.' % file_count
17dafa
+
17dafa
+    with events.ReportEventStack(
17dafa
+                name="mount-ntfs-and-count",
17dafa
+                description="mount-ntfs-and-count",
17dafa
+                parent=azure_ds_reporter) as evt:
17dafa
+        try:
17dafa
+            file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
17dafa
+                                       update_env_for_mount={'LANG': 'C'})
17dafa
+        except util.MountFailedError as e:
17dafa
+            evt.description = "cannot mount ntfs"
17dafa
+            if "unknown filesystem type 'ntfs'" in str(e):
17dafa
+                return True, (bmsg + ' but this system cannot mount NTFS,'
17dafa
+                              ' assuming there are no important files.'
17dafa
+                              ' Formatting allowed.')
17dafa
+            return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
17dafa
+
17dafa
+        if file_count != 0:
17dafa
+            evt.description = "mounted and counted %d files" % file_count
17dafa
+            LOG.warning("it looks like you're using NTFS on the ephemeral"
17dafa
+                        " disk, to ensure that filesystem does not get wiped,"
17dafa
+                        " set %s.%s in config", '.'.join(DS_CFG_PATH),
17dafa
+                        DS_CFG_KEY_PRESERVE_NTFS)
17dafa
+            return False, bmsg + ' but had %d files on it.' % file_count
17dafa
 
17dafa
     return True, bmsg + ' and had no important files. Safe for reformatting.'
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
17dafa
                              is_new_instance=False, preserve_ntfs=False):
17dafa
     # wait for ephemeral disk to come up
17dafa
     naplen = .2
17dafa
-    missing = util.wait_for_files([devpath], maxwait=maxwait, naplen=naplen,
17dafa
-                                  log_pre="Azure ephemeral disk: ")
17dafa
-
17dafa
-    if missing:
17dafa
-        LOG.warning("ephemeral device '%s' did not appear after %d seconds.",
17dafa
-                    devpath, maxwait)
17dafa
-        return
17dafa
+    with events.ReportEventStack(
17dafa
+                name="wait-for-ephemeral-disk",
17dafa
+                description="wait for ephemeral disk",
17dafa
+                parent=azure_ds_reporter):
17dafa
+        missing = util.wait_for_files([devpath],
17dafa
+                                      maxwait=maxwait,
17dafa
+                                      naplen=naplen,
17dafa
+                                      log_pre="Azure ephemeral disk: ")
17dafa
+
17dafa
+        if missing:
17dafa
+            LOG.warning("ephemeral device '%s' did"
17dafa
+                        " not appear after %d seconds.",
17dafa
+                        devpath, maxwait)
17dafa
+            return
17dafa
 
17dafa
     result = False
17dafa
     msg = None
17dafa
@@ -808,6 +846,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
17dafa
     return
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def perform_hostname_bounce(hostname, cfg, prev_hostname):
17dafa
     # set the hostname to 'hostname' if it is not already set to that.
17dafa
     # then, if policy is not off, bounce the interface using command
17dafa
@@ -843,6 +882,7 @@ def perform_hostname_bounce(hostname, cfg, prev_hostname):
17dafa
     return True
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def crtfile_to_pubkey(fname, data=None):
17dafa
     pipeline = ('openssl x509 -noout -pubkey < "$0" |'
17dafa
                 'ssh-keygen -i -m PKCS8 -f /dev/stdin')
17dafa
@@ -851,6 +891,7 @@ def crtfile_to_pubkey(fname, data=None):
17dafa
     return out.rstrip()
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def pubkeys_from_crt_files(flist):
17dafa
     pubkeys = []
17dafa
     errors = []
17dafa
@@ -866,6 +907,7 @@ def pubkeys_from_crt_files(flist):
17dafa
     return pubkeys
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def write_files(datadir, files, dirmode=None):
17dafa
 
17dafa
     def _redact_password(cnt, fname):
17dafa
@@ -893,6 +935,7 @@ def write_files(datadir, files, dirmode=None):
17dafa
         util.write_file(filename=fname, content=content, mode=0o600)
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def invoke_agent(cmd):
17dafa
     # this is a function itself to simplify patching it for test
17dafa
     if cmd:
17dafa
@@ -912,6 +955,7 @@ def find_child(node, filter_func):
17dafa
     return ret
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def load_azure_ovf_pubkeys(sshnode):
17dafa
     # This parses a 'SSH' node formatted like below, and returns
17dafa
     # an array of dicts.
17dafa
@@ -964,6 +1008,7 @@ def load_azure_ovf_pubkeys(sshnode):
17dafa
     return found
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def read_azure_ovf(contents):
17dafa
     try:
17dafa
         dom = minidom.parseString(contents)
17dafa
@@ -1064,6 +1109,7 @@ def read_azure_ovf(contents):
17dafa
     return (md, ud, cfg)
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def _extract_preprovisioned_vm_setting(dom):
17dafa
     """Read the preprovision flag from the ovf. It should not
17dafa
        exist unless true."""
17dafa
@@ -1092,6 +1138,7 @@ def encrypt_pass(password, salt_id="$6$"):
17dafa
     return crypt.crypt(password, salt_id + util.rand_str(strlen=16))
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def _check_freebsd_cdrom(cdrom_dev):
17dafa
     """Return boolean indicating path to cdrom device has content."""
17dafa
     try:
17dafa
@@ -1103,6 +1150,7 @@ def _check_freebsd_cdrom(cdrom_dev):
17dafa
     return False
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
17dafa
     """Return content random seed file if available, otherwise,
17dafa
        return None."""
17dafa
@@ -1126,6 +1174,7 @@ def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
17dafa
     return seed
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def list_possible_azure_ds_devs():
17dafa
     devlist = []
17dafa
     if util.is_FreeBSD():
17dafa
@@ -1140,6 +1189,7 @@ def list_possible_azure_ds_devs():
17dafa
     return devlist
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def load_azure_ds_dir(source_dir):
17dafa
     ovf_file = os.path.join(source_dir, "ovf-env.xml")
17dafa
 
17dafa
@@ -1162,47 +1212,54 @@ def parse_network_config(imds_metadata):
17dafa
     @param: imds_metadata: Dict of content read from IMDS network service.
17dafa
     @return: Dictionary containing network version 2 standard configuration.
17dafa
     """
17dafa
-    if imds_metadata != sources.UNSET and imds_metadata:
17dafa
-        netconfig = {'version': 2, 'ethernets': {}}
17dafa
-        LOG.debug('Azure: generating network configuration from IMDS')
17dafa
-        network_metadata = imds_metadata['network']
17dafa
-        for idx, intf in enumerate(network_metadata['interface']):
17dafa
-            nicname = 'eth{idx}'.format(idx=idx)
17dafa
-            dev_config = {}
17dafa
-            for addr4 in intf['ipv4']['ipAddress']:
17dafa
-                privateIpv4 = addr4['privateIpAddress']
17dafa
-                if privateIpv4:
17dafa
-                    if dev_config.get('dhcp4', False):
17dafa
-                        # Append static address config for nic > 1
17dafa
-                        netPrefix = intf['ipv4']['subnet'][0].get(
17dafa
-                            'prefix', '24')
17dafa
-                        if not dev_config.get('addresses'):
17dafa
-                            dev_config['addresses'] = []
17dafa
-                        dev_config['addresses'].append(
17dafa
-                            '{ip}/{prefix}'.format(
17dafa
-                                ip=privateIpv4, prefix=netPrefix))
17dafa
-                    else:
17dafa
-                        dev_config['dhcp4'] = True
17dafa
-            for addr6 in intf['ipv6']['ipAddress']:
17dafa
-                privateIpv6 = addr6['privateIpAddress']
17dafa
-                if privateIpv6:
17dafa
-                    dev_config['dhcp6'] = True
17dafa
-                    break
17dafa
-            if dev_config:
17dafa
-                mac = ':'.join(re.findall(r'..', intf['macAddress']))
17dafa
-                dev_config.update(
17dafa
-                    {'match': {'macaddress': mac.lower()},
17dafa
-                     'set-name': nicname})
17dafa
-                netconfig['ethernets'][nicname] = dev_config
17dafa
-    else:
17dafa
-        blacklist = ['mlx4_core']
17dafa
-        LOG.debug('Azure: generating fallback configuration')
17dafa
-        # generate a network config, blacklist picking mlx4_core devs
17dafa
-        netconfig = net.generate_fallback_config(
17dafa
-            blacklist_drivers=blacklist, config_driver=True)
17dafa
-    return netconfig
17dafa
+    with events.ReportEventStack(
17dafa
+                name="parse_network_config",
17dafa
+                description="",
17dafa
+                parent=azure_ds_reporter) as evt:
17dafa
+        if imds_metadata != sources.UNSET and imds_metadata:
17dafa
+            netconfig = {'version': 2, 'ethernets': {}}
17dafa
+            LOG.debug('Azure: generating network configuration from IMDS')
17dafa
+            network_metadata = imds_metadata['network']
17dafa
+            for idx, intf in enumerate(network_metadata['interface']):
17dafa
+                nicname = 'eth{idx}'.format(idx=idx)
17dafa
+                dev_config = {}
17dafa
+                for addr4 in intf['ipv4']['ipAddress']:
17dafa
+                    privateIpv4 = addr4['privateIpAddress']
17dafa
+                    if privateIpv4:
17dafa
+                        if dev_config.get('dhcp4', False):
17dafa
+                            # Append static address config for nic > 1
17dafa
+                            netPrefix = intf['ipv4']['subnet'][0].get(
17dafa
+                                'prefix', '24')
17dafa
+                            if not dev_config.get('addresses'):
17dafa
+                                dev_config['addresses'] = []
17dafa
+                            dev_config['addresses'].append(
17dafa
+                                '{ip}/{prefix}'.format(
17dafa
+                                    ip=privateIpv4, prefix=netPrefix))
17dafa
+                        else:
17dafa
+                            dev_config['dhcp4'] = True
17dafa
+                for addr6 in intf['ipv6']['ipAddress']:
17dafa
+                    privateIpv6 = addr6['privateIpAddress']
17dafa
+                    if privateIpv6:
17dafa
+                        dev_config['dhcp6'] = True
17dafa
+                        break
17dafa
+                if dev_config:
17dafa
+                    mac = ':'.join(re.findall(r'..', intf['macAddress']))
17dafa
+                    dev_config.update(
17dafa
+                        {'match': {'macaddress': mac.lower()},
17dafa
+                         'set-name': nicname})
17dafa
+                    netconfig['ethernets'][nicname] = dev_config
17dafa
+            evt.description = "network config from imds"
17dafa
+        else:
17dafa
+            blacklist = ['mlx4_core']
17dafa
+            LOG.debug('Azure: generating fallback configuration')
17dafa
+            # generate a network config, blacklist picking mlx4_core devs
17dafa
+            netconfig = net.generate_fallback_config(
17dafa
+                blacklist_drivers=blacklist, config_driver=True)
17dafa
+            evt.description = "network config from fallback"
17dafa
+        return netconfig
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def get_metadata_from_imds(fallback_nic, retries):
17dafa
     """Query Azure's network metadata service, returning a dictionary.
17dafa
 
17dafa
@@ -1227,6 +1284,7 @@ def get_metadata_from_imds(fallback_nic, retries):
17dafa
             return util.log_time(**kwargs)
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def _get_metadata_from_imds(retries):
17dafa
 
17dafa
     url = IMDS_URL + "instance?api-version=2017-12-01"
17dafa
@@ -1246,6 +1304,7 @@ def _get_metadata_from_imds(retries):
17dafa
     return {}
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def maybe_remove_ubuntu_network_config_scripts(paths=None):
17dafa
     """Remove Azure-specific ubuntu network config for non-primary nics.
17dafa
 
17dafa
@@ -1283,14 +1342,20 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
17dafa
 
17dafa
 
17dafa
 def _is_platform_viable(seed_dir):
17dafa
-    """Check platform environment to report if this datasource may run."""
17dafa
-    asset_tag = util.read_dmi_data('chassis-asset-tag')
17dafa
-    if asset_tag == AZURE_CHASSIS_ASSET_TAG:
17dafa
-        return True
17dafa
-    LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
17dafa
-    if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
17dafa
-        return True
17dafa
-    return False
17dafa
+    with events.ReportEventStack(
17dafa
+                name="check-platform-viability",
17dafa
+                description="found azure asset tag",
17dafa
+                parent=azure_ds_reporter) as evt:
17dafa
+
17dafa
+        """Check platform environment to report if this datasource may run."""
17dafa
+        asset_tag = util.read_dmi_data('chassis-asset-tag')
17dafa
+        if asset_tag == AZURE_CHASSIS_ASSET_TAG:
17dafa
+            return True
17dafa
+        LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
17dafa
+        evt.description = "Non-Azure DMI asset tag '%s' discovered.", asset_tag
17dafa
+        if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
17dafa
+            return True
17dafa
+        return False
17dafa
 
17dafa
 
17dafa
 class BrokenAzureDataSource(Exception):
17dafa
diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
17dafa
old mode 100644
17dafa
new mode 100755
17dafa
index 2829dd2..d3af05e
17dafa
--- a/cloudinit/sources/helpers/azure.py
17dafa
+++ b/cloudinit/sources/helpers/azure.py
17dafa
@@ -16,10 +16,27 @@ from xml.etree import ElementTree
17dafa
 
17dafa
 from cloudinit import url_helper
17dafa
 from cloudinit import util
17dafa
+from cloudinit.reporting import events
17dafa
 
17dafa
 LOG = logging.getLogger(__name__)
17dafa
 
17dafa
 
17dafa
+azure_ds_reporter = events.ReportEventStack(
17dafa
+    name="azure-ds",
17dafa
+    description="initialize reporter for azure ds",
17dafa
+    reporting_enabled=True)
17dafa
+
17dafa
+
17dafa
+def azure_ds_telemetry_reporter(func):
17dafa
+    def impl(*args, **kwargs):
17dafa
+        with events.ReportEventStack(
17dafa
+                name=func.__name__,
17dafa
+                description=func.__name__,
17dafa
+                parent=azure_ds_reporter):
17dafa
+            return func(*args, **kwargs)
17dafa
+    return impl
17dafa
+
17dafa
+
17dafa
 @contextmanager
17dafa
 def cd(newdir):
17dafa
     prevdir = os.getcwd()
17dafa
@@ -119,6 +136,7 @@ class OpenSSLManager(object):
17dafa
     def clean_up(self):
17dafa
         util.del_dir(self.tmpdir)
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def generate_certificate(self):
17dafa
         LOG.debug('Generating certificate for communication with fabric...')
17dafa
         if self.certificate is not None:
17dafa
@@ -139,17 +157,20 @@ class OpenSSLManager(object):
17dafa
         LOG.debug('New certificate generated.')
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _run_x509_action(action, cert):
17dafa
         cmd = ['openssl', 'x509', '-noout', action]
17dafa
         result, _ = util.subp(cmd, data=cert)
17dafa
         return result
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _get_ssh_key_from_cert(self, certificate):
17dafa
         pub_key = self._run_x509_action('-pubkey', certificate)
17dafa
         keygen_cmd = ['ssh-keygen', '-i', '-m', 'PKCS8', '-f', '/dev/stdin']
17dafa
         ssh_key, _ = util.subp(keygen_cmd, data=pub_key)
17dafa
         return ssh_key
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _get_fingerprint_from_cert(self, certificate):
17dafa
         """openssl x509 formats fingerprints as so:
17dafa
         'SHA1 Fingerprint=07:3E:19:D1:4D:1C:79:92:24:C6:A0:FD:8D:DA:\
17dafa
@@ -163,6 +184,7 @@ class OpenSSLManager(object):
17dafa
         octets = raw_fp[eq+1:-1].split(':')
17dafa
         return ''.join(octets)
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _decrypt_certs_from_xml(self, certificates_xml):
17dafa
         """Decrypt the certificates XML document using the our private key;
17dafa
            return the list of certs and private keys contained in the doc.
17dafa
@@ -185,6 +207,7 @@ class OpenSSLManager(object):
17dafa
                 shell=True, data=b'\n'.join(lines))
17dafa
         return out
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def parse_certificates(self, certificates_xml):
17dafa
         """Given the Certificates XML document, return a dictionary of
17dafa
            fingerprints and associated SSH keys derived from the certs."""
17dafa
@@ -265,11 +288,13 @@ class WALinuxAgentShim(object):
17dafa
         return socket.inet_ntoa(packed_bytes)
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _networkd_get_value_from_leases(leases_d=None):
17dafa
         return dhcp.networkd_get_option_from_leases(
17dafa
             'OPTION_245', leases_d=leases_d)
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _get_value_from_leases_file(fallback_lease_file):
17dafa
         leases = []
17dafa
         content = util.load_file(fallback_lease_file)
17dafa
@@ -287,6 +312,7 @@ class WALinuxAgentShim(object):
17dafa
             return leases[-1]
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _load_dhclient_json():
17dafa
         dhcp_options = {}
17dafa
         hooks_dir = WALinuxAgentShim._get_hooks_dir()
17dafa
@@ -305,6 +331,7 @@ class WALinuxAgentShim(object):
17dafa
         return dhcp_options
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _get_value_from_dhcpoptions(dhcp_options):
17dafa
         if dhcp_options is None:
17dafa
             return None
17dafa
@@ -318,6 +345,7 @@ class WALinuxAgentShim(object):
17dafa
         return _value
17dafa
 
17dafa
     @staticmethod
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def find_endpoint(fallback_lease_file=None, dhcp245=None):
17dafa
         value = None
17dafa
         if dhcp245 is not None:
17dafa
@@ -352,6 +380,7 @@ class WALinuxAgentShim(object):
17dafa
         LOG.debug('Azure endpoint found at %s', endpoint_ip_address)
17dafa
         return endpoint_ip_address
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def register_with_azure_and_fetch_data(self, pubkey_info=None):
17dafa
         if self.openssl_manager is None:
17dafa
             self.openssl_manager = OpenSSLManager()
17dafa
@@ -404,6 +433,7 @@ class WALinuxAgentShim(object):
17dafa
 
17dafa
         return keys
17dafa
 
17dafa
+    @azure_ds_telemetry_reporter
17dafa
     def _report_ready(self, goal_state, http_client):
17dafa
         LOG.debug('Reporting ready to Azure fabric.')
17dafa
         document = self.REPORT_READY_XML_TEMPLATE.format(
17dafa
@@ -419,6 +449,7 @@ class WALinuxAgentShim(object):
17dafa
         LOG.info('Reported ready to Azure fabric.')
17dafa
 
17dafa
 
17dafa
+@azure_ds_telemetry_reporter
17dafa
 def get_metadata_from_fabric(fallback_lease_file=None, dhcp_opts=None,
17dafa
                              pubkey_info=None):
17dafa
     shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file,
17dafa
-- 
17dafa
1.8.3.1
17dafa