|
|
0a07cd |
From f6054bcebaca3fd731f4547ce0ad4ddf3bbcbd23 Mon Sep 17 00:00:00 2001
|
|
|
0a07cd |
From: Eduardo Otubo <otubo@redhat.com>
|
|
|
0a07cd |
Date: Wed, 29 May 2019 13:41:46 +0200
|
|
|
0a07cd |
Subject: [PATCH 2/5] DatasourceAzure: add additional logging for azure
|
|
|
0a07cd |
datasource
|
|
|
0a07cd |
|
|
|
0a07cd |
RH-Author: Eduardo Otubo <otubo@redhat.com>
|
|
|
0a07cd |
Message-id: <20190529134149.842-3-otubo@redhat.com>
|
|
|
0a07cd |
Patchwork-id: 88268
|
|
|
0a07cd |
O-Subject: [RHEL-8.0.1/RHEL-8.1.0 cloud-init PATCHv2 2/5] DatasourceAzure: add additional logging for azure datasource
|
|
|
0a07cd |
Bugzilla: 1648375
|
|
|
0a07cd |
RH-Acked-by: Vitaly Kuznetsov <vkuznets@redhat.com>
|
|
|
0a07cd |
RH-Acked-by: Cathy Avery <cavery@redhat.com>
|
|
|
0a07cd |
|
|
|
0a07cd |
From: Anh Vo <anhvo@microsoft.com>
|
|
|
0a07cd |
commit 0d8c88393b51db6454491a379dcc2e691551217a
|
|
|
0a07cd |
Author: Anh Vo <anhvo@microsoft.com>
|
|
|
0a07cd |
Date: Wed Apr 3 18:23:18 2019 +0000
|
|
|
0a07cd |
|
|
|
0a07cd |
DatasourceAzure: add additional logging for azure datasource
|
|
|
0a07cd |
|
|
|
0a07cd |
Create an Azure logging decorator and use additional ReportEventStack
|
|
|
0a07cd |
context managers to provide additional logging details.
|
|
|
0a07cd |
|
|
|
0a07cd |
Signed-off-by: Eduardo Otubo <otubo@redhat.com>
|
|
|
0a07cd |
Signed-off-by: Miroslav Rezanina <mrezanin@redhat.com>
|
|
|
0a07cd |
---
|
|
|
0a07cd |
cloudinit/sources/DataSourceAzure.py | 231 ++++++++++++++++++++++-------------
|
|
|
0a07cd |
cloudinit/sources/helpers/azure.py | 31 +++++
|
|
|
0a07cd |
2 files changed, 179 insertions(+), 83 deletions(-)
|
|
|
0a07cd |
mode change 100644 => 100755 cloudinit/sources/DataSourceAzure.py
|
|
|
0a07cd |
mode change 100644 => 100755 cloudinit/sources/helpers/azure.py
|
|
|
0a07cd |
|
|
|
0a07cd |
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
|
|
|
0a07cd |
old mode 100644
|
|
|
0a07cd |
new mode 100755
|
|
|
0a07cd |
index a768b2c..c827816
|
|
|
0a07cd |
--- a/cloudinit/sources/DataSourceAzure.py
|
|
|
0a07cd |
+++ b/cloudinit/sources/DataSourceAzure.py
|
|
|
0a07cd |
@@ -21,10 +21,14 @@ from cloudinit import net
|
|
|
0a07cd |
from cloudinit.event import EventType
|
|
|
0a07cd |
from cloudinit.net.dhcp import EphemeralDHCPv4
|
|
|
0a07cd |
from cloudinit import sources
|
|
|
0a07cd |
-from cloudinit.sources.helpers.azure import get_metadata_from_fabric
|
|
|
0a07cd |
from cloudinit.sources.helpers import netlink
|
|
|
0a07cd |
from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc
|
|
|
0a07cd |
from cloudinit import util
|
|
|
0a07cd |
+from cloudinit.reporting import events
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+from cloudinit.sources.helpers.azure import (azure_ds_reporter,
|
|
|
0a07cd |
+ azure_ds_telemetry_reporter,
|
|
|
0a07cd |
+ get_metadata_from_fabric)
|
|
|
0a07cd |
|
|
|
0a07cd |
LOG = logging.getLogger(__name__)
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -244,6 +248,7 @@ def set_hostname(hostname, hostname_command='hostname'):
|
|
|
0a07cd |
util.subp(['hostnamectl', 'set-hostname', str(hostname)])
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
@contextlib.contextmanager
|
|
|
0a07cd |
def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
|
|
|
0a07cd |
"""
|
|
|
0a07cd |
@@ -290,6 +295,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
root = sources.DataSource.__str__(self)
|
|
|
0a07cd |
return "%s [seed=%s]" % (root, self.seed)
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def bounce_network_with_azure_hostname(self):
|
|
|
0a07cd |
# When using cloud-init to provision, we have to set the hostname from
|
|
|
0a07cd |
# the metadata and "bounce" the network to force DDNS to update via
|
|
|
0a07cd |
@@ -315,6 +321,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
util.logexc(LOG, "handling set_hostname failed")
|
|
|
0a07cd |
return False
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def get_metadata_from_agent(self):
|
|
|
0a07cd |
temp_hostname = self.metadata.get('local-hostname')
|
|
|
0a07cd |
agent_cmd = self.ds_cfg['agent_command']
|
|
|
0a07cd |
@@ -344,15 +351,18 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
LOG.debug("ssh authentication: "
|
|
|
0a07cd |
"using fingerprint from fabirc")
|
|
|
0a07cd |
|
|
|
0a07cd |
- # wait very long for public SSH keys to arrive
|
|
|
0a07cd |
- # https://bugs.launchpad.net/cloud-init/+bug/1717611
|
|
|
0a07cd |
- missing = util.log_time(logfunc=LOG.debug,
|
|
|
0a07cd |
- msg="waiting for SSH public key files",
|
|
|
0a07cd |
- func=util.wait_for_files,
|
|
|
0a07cd |
- args=(fp_files, 900))
|
|
|
0a07cd |
-
|
|
|
0a07cd |
- if len(missing):
|
|
|
0a07cd |
- LOG.warning("Did not find files, but going on: %s", missing)
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="waiting-for-ssh-public-key",
|
|
|
0a07cd |
+ description="wait for agents to retrieve ssh keys",
|
|
|
0a07cd |
+ parent=azure_ds_reporter):
|
|
|
0a07cd |
+ # wait very long for public SSH keys to arrive
|
|
|
0a07cd |
+ # https://bugs.launchpad.net/cloud-init/+bug/1717611
|
|
|
0a07cd |
+ missing = util.log_time(logfunc=LOG.debug,
|
|
|
0a07cd |
+ msg="waiting for SSH public key files",
|
|
|
0a07cd |
+ func=util.wait_for_files,
|
|
|
0a07cd |
+ args=(fp_files, 900))
|
|
|
0a07cd |
+ if len(missing):
|
|
|
0a07cd |
+ LOG.warning("Did not find files, but going on: %s", missing)
|
|
|
0a07cd |
|
|
|
0a07cd |
metadata = {}
|
|
|
0a07cd |
metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
|
|
|
0a07cd |
@@ -366,6 +376,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
subplatform_type = 'seed-dir'
|
|
|
0a07cd |
return '%s (%s)' % (subplatform_type, self.seed)
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def crawl_metadata(self):
|
|
|
0a07cd |
"""Walk all instance metadata sources returning a dict on success.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -467,6 +478,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
super(DataSourceAzure, self).clear_cached_attrs(attr_defaults)
|
|
|
0a07cd |
self._metadata_imds = sources.UNSET
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_data(self):
|
|
|
0a07cd |
"""Crawl and process datasource metadata caching metadata as attrs.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -513,6 +525,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
# quickly (local check only) if self.instance_id is still valid
|
|
|
0a07cd |
return sources.instance_id_matches_system_uuid(self.get_instance_id())
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def setup(self, is_new_instance):
|
|
|
0a07cd |
if self._negotiated is False:
|
|
|
0a07cd |
LOG.debug("negotiating for %s (new_instance=%s)",
|
|
|
0a07cd |
@@ -580,6 +593,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
if nl_sock:
|
|
|
0a07cd |
nl_sock.close()
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _report_ready(self, lease):
|
|
|
0a07cd |
"""Tells the fabric provisioning has completed """
|
|
|
0a07cd |
try:
|
|
|
0a07cd |
@@ -617,9 +631,14 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
def _reprovision(self):
|
|
|
0a07cd |
"""Initiate the reprovisioning workflow."""
|
|
|
0a07cd |
contents = self._poll_imds()
|
|
|
0a07cd |
- md, ud, cfg = read_azure_ovf(contents)
|
|
|
0a07cd |
- return (md, ud, cfg, {'ovf-env.xml': contents})
|
|
|
0a07cd |
-
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="reprovisioning-read-azure-ovf",
|
|
|
0a07cd |
+ description="read azure ovf during reprovisioning",
|
|
|
0a07cd |
+ parent=azure_ds_reporter):
|
|
|
0a07cd |
+ md, ud, cfg = read_azure_ovf(contents)
|
|
|
0a07cd |
+ return (md, ud, cfg, {'ovf-env.xml': contents})
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _negotiate(self):
|
|
|
0a07cd |
"""Negotiate with fabric and return data from it.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -652,6 +671,7 @@ class DataSourceAzure(sources.DataSource):
|
|
|
0a07cd |
util.del_file(REPROVISION_MARKER_FILE)
|
|
|
0a07cd |
return fabric_data
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def activate(self, cfg, is_new_instance):
|
|
|
0a07cd |
address_ephemeral_resize(is_new_instance=is_new_instance,
|
|
|
0a07cd |
preserve_ntfs=self.ds_cfg.get(
|
|
|
0a07cd |
@@ -690,12 +710,14 @@ def _partitions_on_device(devpath, maxnum=16):
|
|
|
0a07cd |
return []
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _has_ntfs_filesystem(devpath):
|
|
|
0a07cd |
ntfs_devices = util.find_devs_with("TYPE=ntfs", no_cache=True)
|
|
|
0a07cd |
LOG.debug('ntfs_devices found = %s', ntfs_devices)
|
|
|
0a07cd |
return os.path.realpath(devpath) in ntfs_devices
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def can_dev_be_reformatted(devpath, preserve_ntfs):
|
|
|
0a07cd |
"""Determine if the ephemeral drive at devpath should be reformatted.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -744,43 +766,59 @@ def can_dev_be_reformatted(devpath, preserve_ntfs):
|
|
|
0a07cd |
(cand_part, cand_path, devpath))
|
|
|
0a07cd |
return False, msg
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def count_files(mp):
|
|
|
0a07cd |
ignored = set(['dataloss_warning_readme.txt'])
|
|
|
0a07cd |
return len([f for f in os.listdir(mp) if f.lower() not in ignored])
|
|
|
0a07cd |
|
|
|
0a07cd |
bmsg = ('partition %s (%s) on device %s was ntfs formatted' %
|
|
|
0a07cd |
(cand_part, cand_path, devpath))
|
|
|
0a07cd |
- try:
|
|
|
0a07cd |
- file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
|
|
|
0a07cd |
- update_env_for_mount={'LANG': 'C'})
|
|
|
0a07cd |
- except util.MountFailedError as e:
|
|
|
0a07cd |
- if "unknown filesystem type 'ntfs'" in str(e):
|
|
|
0a07cd |
- return True, (bmsg + ' but this system cannot mount NTFS,'
|
|
|
0a07cd |
- ' assuming there are no important files.'
|
|
|
0a07cd |
- ' Formatting allowed.')
|
|
|
0a07cd |
- return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
|
|
|
0a07cd |
-
|
|
|
0a07cd |
- if file_count != 0:
|
|
|
0a07cd |
- LOG.warning("it looks like you're using NTFS on the ephemeral disk, "
|
|
|
0a07cd |
- 'to ensure that filesystem does not get wiped, set '
|
|
|
0a07cd |
- '%s.%s in config', '.'.join(DS_CFG_PATH),
|
|
|
0a07cd |
- DS_CFG_KEY_PRESERVE_NTFS)
|
|
|
0a07cd |
- return False, bmsg + ' but had %d files on it.' % file_count
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="mount-ntfs-and-count",
|
|
|
0a07cd |
+ description="mount-ntfs-and-count",
|
|
|
0a07cd |
+ parent=azure_ds_reporter) as evt:
|
|
|
0a07cd |
+ try:
|
|
|
0a07cd |
+ file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
|
|
|
0a07cd |
+ update_env_for_mount={'LANG': 'C'})
|
|
|
0a07cd |
+ except util.MountFailedError as e:
|
|
|
0a07cd |
+ evt.description = "cannot mount ntfs"
|
|
|
0a07cd |
+ if "unknown filesystem type 'ntfs'" in str(e):
|
|
|
0a07cd |
+ return True, (bmsg + ' but this system cannot mount NTFS,'
|
|
|
0a07cd |
+ ' assuming there are no important files.'
|
|
|
0a07cd |
+ ' Formatting allowed.')
|
|
|
0a07cd |
+ return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+ if file_count != 0:
|
|
|
0a07cd |
+ evt.description = "mounted and counted %d files" % file_count
|
|
|
0a07cd |
+ LOG.warning("it looks like you're using NTFS on the ephemeral"
|
|
|
0a07cd |
+ " disk, to ensure that filesystem does not get wiped,"
|
|
|
0a07cd |
+ " set %s.%s in config", '.'.join(DS_CFG_PATH),
|
|
|
0a07cd |
+ DS_CFG_KEY_PRESERVE_NTFS)
|
|
|
0a07cd |
+ return False, bmsg + ' but had %d files on it.' % file_count
|
|
|
0a07cd |
|
|
|
0a07cd |
return True, bmsg + ' and had no important files. Safe for reformatting.'
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
|
|
|
0a07cd |
is_new_instance=False, preserve_ntfs=False):
|
|
|
0a07cd |
# wait for ephemeral disk to come up
|
|
|
0a07cd |
naplen = .2
|
|
|
0a07cd |
- missing = util.wait_for_files([devpath], maxwait=maxwait, naplen=naplen,
|
|
|
0a07cd |
- log_pre="Azure ephemeral disk: ")
|
|
|
0a07cd |
-
|
|
|
0a07cd |
- if missing:
|
|
|
0a07cd |
- LOG.warning("ephemeral device '%s' did not appear after %d seconds.",
|
|
|
0a07cd |
- devpath, maxwait)
|
|
|
0a07cd |
- return
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="wait-for-ephemeral-disk",
|
|
|
0a07cd |
+ description="wait for ephemeral disk",
|
|
|
0a07cd |
+ parent=azure_ds_reporter):
|
|
|
0a07cd |
+ missing = util.wait_for_files([devpath],
|
|
|
0a07cd |
+ maxwait=maxwait,
|
|
|
0a07cd |
+ naplen=naplen,
|
|
|
0a07cd |
+ log_pre="Azure ephemeral disk: ")
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+ if missing:
|
|
|
0a07cd |
+ LOG.warning("ephemeral device '%s' did"
|
|
|
0a07cd |
+ " not appear after %d seconds.",
|
|
|
0a07cd |
+ devpath, maxwait)
|
|
|
0a07cd |
+ return
|
|
|
0a07cd |
|
|
|
0a07cd |
result = False
|
|
|
0a07cd |
msg = None
|
|
|
0a07cd |
@@ -808,6 +846,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
|
|
|
0a07cd |
return
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def perform_hostname_bounce(hostname, cfg, prev_hostname):
|
|
|
0a07cd |
# set the hostname to 'hostname' if it is not already set to that.
|
|
|
0a07cd |
# then, if policy is not off, bounce the interface using command
|
|
|
0a07cd |
@@ -843,6 +882,7 @@ def perform_hostname_bounce(hostname, cfg, prev_hostname):
|
|
|
0a07cd |
return True
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def crtfile_to_pubkey(fname, data=None):
|
|
|
0a07cd |
pipeline = ('openssl x509 -noout -pubkey < "$0" |'
|
|
|
0a07cd |
'ssh-keygen -i -m PKCS8 -f /dev/stdin')
|
|
|
0a07cd |
@@ -851,6 +891,7 @@ def crtfile_to_pubkey(fname, data=None):
|
|
|
0a07cd |
return out.rstrip()
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def pubkeys_from_crt_files(flist):
|
|
|
0a07cd |
pubkeys = []
|
|
|
0a07cd |
errors = []
|
|
|
0a07cd |
@@ -866,6 +907,7 @@ def pubkeys_from_crt_files(flist):
|
|
|
0a07cd |
return pubkeys
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def write_files(datadir, files, dirmode=None):
|
|
|
0a07cd |
|
|
|
0a07cd |
def _redact_password(cnt, fname):
|
|
|
0a07cd |
@@ -893,6 +935,7 @@ def write_files(datadir, files, dirmode=None):
|
|
|
0a07cd |
util.write_file(filename=fname, content=content, mode=0o600)
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def invoke_agent(cmd):
|
|
|
0a07cd |
# this is a function itself to simplify patching it for test
|
|
|
0a07cd |
if cmd:
|
|
|
0a07cd |
@@ -912,6 +955,7 @@ def find_child(node, filter_func):
|
|
|
0a07cd |
return ret
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def load_azure_ovf_pubkeys(sshnode):
|
|
|
0a07cd |
# This parses a 'SSH' node formatted like below, and returns
|
|
|
0a07cd |
# an array of dicts.
|
|
|
0a07cd |
@@ -964,6 +1008,7 @@ def load_azure_ovf_pubkeys(sshnode):
|
|
|
0a07cd |
return found
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def read_azure_ovf(contents):
|
|
|
0a07cd |
try:
|
|
|
0a07cd |
dom = minidom.parseString(contents)
|
|
|
0a07cd |
@@ -1064,6 +1109,7 @@ def read_azure_ovf(contents):
|
|
|
0a07cd |
return (md, ud, cfg)
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _extract_preprovisioned_vm_setting(dom):
|
|
|
0a07cd |
"""Read the preprovision flag from the ovf. It should not
|
|
|
0a07cd |
exist unless true."""
|
|
|
0a07cd |
@@ -1092,6 +1138,7 @@ def encrypt_pass(password, salt_id="$6$"):
|
|
|
0a07cd |
return crypt.crypt(password, salt_id + util.rand_str(strlen=16))
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _check_freebsd_cdrom(cdrom_dev):
|
|
|
0a07cd |
"""Return boolean indicating path to cdrom device has content."""
|
|
|
0a07cd |
try:
|
|
|
0a07cd |
@@ -1103,6 +1150,7 @@ def _check_freebsd_cdrom(cdrom_dev):
|
|
|
0a07cd |
return False
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
|
|
|
0a07cd |
"""Return content random seed file if available, otherwise,
|
|
|
0a07cd |
return None."""
|
|
|
0a07cd |
@@ -1126,6 +1174,7 @@ def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
|
|
|
0a07cd |
return seed
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def list_possible_azure_ds_devs():
|
|
|
0a07cd |
devlist = []
|
|
|
0a07cd |
if util.is_FreeBSD():
|
|
|
0a07cd |
@@ -1140,6 +1189,7 @@ def list_possible_azure_ds_devs():
|
|
|
0a07cd |
return devlist
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def load_azure_ds_dir(source_dir):
|
|
|
0a07cd |
ovf_file = os.path.join(source_dir, "ovf-env.xml")
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -1162,47 +1212,54 @@ def parse_network_config(imds_metadata):
|
|
|
0a07cd |
@param: imds_metadata: Dict of content read from IMDS network service.
|
|
|
0a07cd |
@return: Dictionary containing network version 2 standard configuration.
|
|
|
0a07cd |
"""
|
|
|
0a07cd |
- if imds_metadata != sources.UNSET and imds_metadata:
|
|
|
0a07cd |
- netconfig = {'version': 2, 'ethernets': {}}
|
|
|
0a07cd |
- LOG.debug('Azure: generating network configuration from IMDS')
|
|
|
0a07cd |
- network_metadata = imds_metadata['network']
|
|
|
0a07cd |
- for idx, intf in enumerate(network_metadata['interface']):
|
|
|
0a07cd |
- nicname = 'eth{idx}'.format(idx=idx)
|
|
|
0a07cd |
- dev_config = {}
|
|
|
0a07cd |
- for addr4 in intf['ipv4']['ipAddress']:
|
|
|
0a07cd |
- privateIpv4 = addr4['privateIpAddress']
|
|
|
0a07cd |
- if privateIpv4:
|
|
|
0a07cd |
- if dev_config.get('dhcp4', False):
|
|
|
0a07cd |
- # Append static address config for nic > 1
|
|
|
0a07cd |
- netPrefix = intf['ipv4']['subnet'][0].get(
|
|
|
0a07cd |
- 'prefix', '24')
|
|
|
0a07cd |
- if not dev_config.get('addresses'):
|
|
|
0a07cd |
- dev_config['addresses'] = []
|
|
|
0a07cd |
- dev_config['addresses'].append(
|
|
|
0a07cd |
- '{ip}/{prefix}'.format(
|
|
|
0a07cd |
- ip=privateIpv4, prefix=netPrefix))
|
|
|
0a07cd |
- else:
|
|
|
0a07cd |
- dev_config['dhcp4'] = True
|
|
|
0a07cd |
- for addr6 in intf['ipv6']['ipAddress']:
|
|
|
0a07cd |
- privateIpv6 = addr6['privateIpAddress']
|
|
|
0a07cd |
- if privateIpv6:
|
|
|
0a07cd |
- dev_config['dhcp6'] = True
|
|
|
0a07cd |
- break
|
|
|
0a07cd |
- if dev_config:
|
|
|
0a07cd |
- mac = ':'.join(re.findall(r'..', intf['macAddress']))
|
|
|
0a07cd |
- dev_config.update(
|
|
|
0a07cd |
- {'match': {'macaddress': mac.lower()},
|
|
|
0a07cd |
- 'set-name': nicname})
|
|
|
0a07cd |
- netconfig['ethernets'][nicname] = dev_config
|
|
|
0a07cd |
- else:
|
|
|
0a07cd |
- blacklist = ['mlx4_core']
|
|
|
0a07cd |
- LOG.debug('Azure: generating fallback configuration')
|
|
|
0a07cd |
- # generate a network config, blacklist picking mlx4_core devs
|
|
|
0a07cd |
- netconfig = net.generate_fallback_config(
|
|
|
0a07cd |
- blacklist_drivers=blacklist, config_driver=True)
|
|
|
0a07cd |
- return netconfig
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="parse_network_config",
|
|
|
0a07cd |
+ description="",
|
|
|
0a07cd |
+ parent=azure_ds_reporter) as evt:
|
|
|
0a07cd |
+ if imds_metadata != sources.UNSET and imds_metadata:
|
|
|
0a07cd |
+ netconfig = {'version': 2, 'ethernets': {}}
|
|
|
0a07cd |
+ LOG.debug('Azure: generating network configuration from IMDS')
|
|
|
0a07cd |
+ network_metadata = imds_metadata['network']
|
|
|
0a07cd |
+ for idx, intf in enumerate(network_metadata['interface']):
|
|
|
0a07cd |
+ nicname = 'eth{idx}'.format(idx=idx)
|
|
|
0a07cd |
+ dev_config = {}
|
|
|
0a07cd |
+ for addr4 in intf['ipv4']['ipAddress']:
|
|
|
0a07cd |
+ privateIpv4 = addr4['privateIpAddress']
|
|
|
0a07cd |
+ if privateIpv4:
|
|
|
0a07cd |
+ if dev_config.get('dhcp4', False):
|
|
|
0a07cd |
+ # Append static address config for nic > 1
|
|
|
0a07cd |
+ netPrefix = intf['ipv4']['subnet'][0].get(
|
|
|
0a07cd |
+ 'prefix', '24')
|
|
|
0a07cd |
+ if not dev_config.get('addresses'):
|
|
|
0a07cd |
+ dev_config['addresses'] = []
|
|
|
0a07cd |
+ dev_config['addresses'].append(
|
|
|
0a07cd |
+ '{ip}/{prefix}'.format(
|
|
|
0a07cd |
+ ip=privateIpv4, prefix=netPrefix))
|
|
|
0a07cd |
+ else:
|
|
|
0a07cd |
+ dev_config['dhcp4'] = True
|
|
|
0a07cd |
+ for addr6 in intf['ipv6']['ipAddress']:
|
|
|
0a07cd |
+ privateIpv6 = addr6['privateIpAddress']
|
|
|
0a07cd |
+ if privateIpv6:
|
|
|
0a07cd |
+ dev_config['dhcp6'] = True
|
|
|
0a07cd |
+ break
|
|
|
0a07cd |
+ if dev_config:
|
|
|
0a07cd |
+ mac = ':'.join(re.findall(r'..', intf['macAddress']))
|
|
|
0a07cd |
+ dev_config.update(
|
|
|
0a07cd |
+ {'match': {'macaddress': mac.lower()},
|
|
|
0a07cd |
+ 'set-name': nicname})
|
|
|
0a07cd |
+ netconfig['ethernets'][nicname] = dev_config
|
|
|
0a07cd |
+ evt.description = "network config from imds"
|
|
|
0a07cd |
+ else:
|
|
|
0a07cd |
+ blacklist = ['mlx4_core']
|
|
|
0a07cd |
+ LOG.debug('Azure: generating fallback configuration')
|
|
|
0a07cd |
+ # generate a network config, blacklist picking mlx4_core devs
|
|
|
0a07cd |
+ netconfig = net.generate_fallback_config(
|
|
|
0a07cd |
+ blacklist_drivers=blacklist, config_driver=True)
|
|
|
0a07cd |
+ evt.description = "network config from fallback"
|
|
|
0a07cd |
+ return netconfig
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def get_metadata_from_imds(fallback_nic, retries):
|
|
|
0a07cd |
"""Query Azure's network metadata service, returning a dictionary.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -1227,6 +1284,7 @@ def get_metadata_from_imds(fallback_nic, retries):
|
|
|
0a07cd |
return util.log_time(**kwargs)
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_metadata_from_imds(retries):
|
|
|
0a07cd |
|
|
|
0a07cd |
url = IMDS_URL + "instance?api-version=2017-12-01"
|
|
|
0a07cd |
@@ -1246,6 +1304,7 @@ def _get_metadata_from_imds(retries):
|
|
|
0a07cd |
return {}
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def maybe_remove_ubuntu_network_config_scripts(paths=None):
|
|
|
0a07cd |
"""Remove Azure-specific ubuntu network config for non-primary nics.
|
|
|
0a07cd |
|
|
|
0a07cd |
@@ -1283,14 +1342,20 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
def _is_platform_viable(seed_dir):
|
|
|
0a07cd |
- """Check platform environment to report if this datasource may run."""
|
|
|
0a07cd |
- asset_tag = util.read_dmi_data('chassis-asset-tag')
|
|
|
0a07cd |
- if asset_tag == AZURE_CHASSIS_ASSET_TAG:
|
|
|
0a07cd |
- return True
|
|
|
0a07cd |
- LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
|
|
|
0a07cd |
- if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
|
|
|
0a07cd |
- return True
|
|
|
0a07cd |
- return False
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name="check-platform-viability",
|
|
|
0a07cd |
+ description="found azure asset tag",
|
|
|
0a07cd |
+ parent=azure_ds_reporter) as evt:
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+ """Check platform environment to report if this datasource may run."""
|
|
|
0a07cd |
+ asset_tag = util.read_dmi_data('chassis-asset-tag')
|
|
|
0a07cd |
+ if asset_tag == AZURE_CHASSIS_ASSET_TAG:
|
|
|
0a07cd |
+ return True
|
|
|
0a07cd |
+ LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
|
|
|
0a07cd |
+ evt.description = "Non-Azure DMI asset tag '%s' discovered.", asset_tag
|
|
|
0a07cd |
+ if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
|
|
|
0a07cd |
+ return True
|
|
|
0a07cd |
+ return False
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
class BrokenAzureDataSource(Exception):
|
|
|
0a07cd |
diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
|
|
|
0a07cd |
old mode 100644
|
|
|
0a07cd |
new mode 100755
|
|
|
0a07cd |
index 2829dd2..d3af05e
|
|
|
0a07cd |
--- a/cloudinit/sources/helpers/azure.py
|
|
|
0a07cd |
+++ b/cloudinit/sources/helpers/azure.py
|
|
|
0a07cd |
@@ -16,10 +16,27 @@ from xml.etree import ElementTree
|
|
|
0a07cd |
|
|
|
0a07cd |
from cloudinit import url_helper
|
|
|
0a07cd |
from cloudinit import util
|
|
|
0a07cd |
+from cloudinit.reporting import events
|
|
|
0a07cd |
|
|
|
0a07cd |
LOG = logging.getLogger(__name__)
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+azure_ds_reporter = events.ReportEventStack(
|
|
|
0a07cd |
+ name="azure-ds",
|
|
|
0a07cd |
+ description="initialize reporter for azure ds",
|
|
|
0a07cd |
+ reporting_enabled=True)
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+def azure_ds_telemetry_reporter(func):
|
|
|
0a07cd |
+ def impl(*args, **kwargs):
|
|
|
0a07cd |
+ with events.ReportEventStack(
|
|
|
0a07cd |
+ name=func.__name__,
|
|
|
0a07cd |
+ description=func.__name__,
|
|
|
0a07cd |
+ parent=azure_ds_reporter):
|
|
|
0a07cd |
+ return func(*args, **kwargs)
|
|
|
0a07cd |
+ return impl
|
|
|
0a07cd |
+
|
|
|
0a07cd |
+
|
|
|
0a07cd |
@contextmanager
|
|
|
0a07cd |
def cd(newdir):
|
|
|
0a07cd |
prevdir = os.getcwd()
|
|
|
0a07cd |
@@ -119,6 +136,7 @@ class OpenSSLManager(object):
|
|
|
0a07cd |
def clean_up(self):
|
|
|
0a07cd |
util.del_dir(self.tmpdir)
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def generate_certificate(self):
|
|
|
0a07cd |
LOG.debug('Generating certificate for communication with fabric...')
|
|
|
0a07cd |
if self.certificate is not None:
|
|
|
0a07cd |
@@ -139,17 +157,20 @@ class OpenSSLManager(object):
|
|
|
0a07cd |
LOG.debug('New certificate generated.')
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _run_x509_action(action, cert):
|
|
|
0a07cd |
cmd = ['openssl', 'x509', '-noout', action]
|
|
|
0a07cd |
result, _ = util.subp(cmd, data=cert)
|
|
|
0a07cd |
return result
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_ssh_key_from_cert(self, certificate):
|
|
|
0a07cd |
pub_key = self._run_x509_action('-pubkey', certificate)
|
|
|
0a07cd |
keygen_cmd = ['ssh-keygen', '-i', '-m', 'PKCS8', '-f', '/dev/stdin']
|
|
|
0a07cd |
ssh_key, _ = util.subp(keygen_cmd, data=pub_key)
|
|
|
0a07cd |
return ssh_key
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_fingerprint_from_cert(self, certificate):
|
|
|
0a07cd |
"""openssl x509 formats fingerprints as so:
|
|
|
0a07cd |
'SHA1 Fingerprint=07:3E:19:D1:4D:1C:79:92:24:C6:A0:FD:8D:DA:\
|
|
|
0a07cd |
@@ -163,6 +184,7 @@ class OpenSSLManager(object):
|
|
|
0a07cd |
octets = raw_fp[eq+1:-1].split(':')
|
|
|
0a07cd |
return ''.join(octets)
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _decrypt_certs_from_xml(self, certificates_xml):
|
|
|
0a07cd |
"""Decrypt the certificates XML document using the our private key;
|
|
|
0a07cd |
return the list of certs and private keys contained in the doc.
|
|
|
0a07cd |
@@ -185,6 +207,7 @@ class OpenSSLManager(object):
|
|
|
0a07cd |
shell=True, data=b'\n'.join(lines))
|
|
|
0a07cd |
return out
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def parse_certificates(self, certificates_xml):
|
|
|
0a07cd |
"""Given the Certificates XML document, return a dictionary of
|
|
|
0a07cd |
fingerprints and associated SSH keys derived from the certs."""
|
|
|
0a07cd |
@@ -265,11 +288,13 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
return socket.inet_ntoa(packed_bytes)
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _networkd_get_value_from_leases(leases_d=None):
|
|
|
0a07cd |
return dhcp.networkd_get_option_from_leases(
|
|
|
0a07cd |
'OPTION_245', leases_d=leases_d)
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_value_from_leases_file(fallback_lease_file):
|
|
|
0a07cd |
leases = []
|
|
|
0a07cd |
content = util.load_file(fallback_lease_file)
|
|
|
0a07cd |
@@ -287,6 +312,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
return leases[-1]
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _load_dhclient_json():
|
|
|
0a07cd |
dhcp_options = {}
|
|
|
0a07cd |
hooks_dir = WALinuxAgentShim._get_hooks_dir()
|
|
|
0a07cd |
@@ -305,6 +331,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
return dhcp_options
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _get_value_from_dhcpoptions(dhcp_options):
|
|
|
0a07cd |
if dhcp_options is None:
|
|
|
0a07cd |
return None
|
|
|
0a07cd |
@@ -318,6 +345,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
return _value
|
|
|
0a07cd |
|
|
|
0a07cd |
@staticmethod
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def find_endpoint(fallback_lease_file=None, dhcp245=None):
|
|
|
0a07cd |
value = None
|
|
|
0a07cd |
if dhcp245 is not None:
|
|
|
0a07cd |
@@ -352,6 +380,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
LOG.debug('Azure endpoint found at %s', endpoint_ip_address)
|
|
|
0a07cd |
return endpoint_ip_address
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def register_with_azure_and_fetch_data(self, pubkey_info=None):
|
|
|
0a07cd |
if self.openssl_manager is None:
|
|
|
0a07cd |
self.openssl_manager = OpenSSLManager()
|
|
|
0a07cd |
@@ -404,6 +433,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
|
|
|
0a07cd |
return keys
|
|
|
0a07cd |
|
|
|
0a07cd |
+ @azure_ds_telemetry_reporter
|
|
|
0a07cd |
def _report_ready(self, goal_state, http_client):
|
|
|
0a07cd |
LOG.debug('Reporting ready to Azure fabric.')
|
|
|
0a07cd |
document = self.REPORT_READY_XML_TEMPLATE.format(
|
|
|
0a07cd |
@@ -419,6 +449,7 @@ class WALinuxAgentShim(object):
|
|
|
0a07cd |
LOG.info('Reported ready to Azure fabric.')
|
|
|
0a07cd |
|
|
|
0a07cd |
|
|
|
0a07cd |
+@azure_ds_telemetry_reporter
|
|
|
0a07cd |
def get_metadata_from_fabric(fallback_lease_file=None, dhcp_opts=None,
|
|
|
0a07cd |
pubkey_info=None):
|
|
|
0a07cd |
shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file,
|
|
|
0a07cd |
--
|
|
|
0a07cd |
1.8.3.1
|
|
|
0a07cd |
|