Blob Blame History Raw
From a87421042f5030e6dd7823cd80d7632b91296519 Mon Sep 17 00:00:00 2001
From: Ken Gaillot <kgaillot@redhat.com>
Date: Fri, 1 Dec 2017 11:02:54 -0600
Subject: [PATCH 1/5] Refactor: pengine: functionize checking whether node was
 unfenced

reduces code duplication and enhances readability
---
 pengine/native.c | 15 ++++++++++-----
 1 file changed, 10 insertions(+), 5 deletions(-)

diff --git a/pengine/native.c b/pengine/native.c
index e72dec4..c998e4b 100644
--- a/pengine/native.c
+++ b/pengine/native.c
@@ -429,6 +429,14 @@ rsc_merge_weights(resource_t * rsc, const char *rhs, GHashTable * nodes, const c
     return work;
 }
 
+static inline bool
+node_has_been_unfenced(node_t *node)
+{
+    const char *unfenced = pe_node_attribute_raw(node, CRM_ATTR_UNFENCED);
+
+    return unfenced && strcmp("0", unfenced);
+}
+
 node_t *
 native_color(resource_t * rsc, node_t * prefer, pe_working_set_t * data_set)
 {
@@ -2524,10 +2532,9 @@ StopRsc(resource_t * rsc, node_t * next, gboolean optional, pe_working_set_t * d
 
         if(is_set(rsc->flags, pe_rsc_needs_unfencing)) {
             action_t *unfence = pe_fence_op(current, "on", TRUE, NULL, data_set);
-            const char *unfenced = pe_node_attribute_raw(current, CRM_ATTR_UNFENCED);
 
             order_actions(stop, unfence, pe_order_implies_first);
-            if (unfenced == NULL || safe_str_eq("0", unfenced)) {
+            if (!node_has_been_unfenced(current)) {
                 pe_proc_err("Stopping %s until %s can be unfenced", rsc->id, current->details->uname);
             }
         }
@@ -2547,11 +2554,9 @@ StartRsc(resource_t * rsc, node_t * next, gboolean optional, pe_working_set_t *
 
     if(is_set(rsc->flags, pe_rsc_needs_unfencing)) {
         action_t *unfence = pe_fence_op(next, "on", TRUE, NULL, data_set);
-        const char *unfenced = pe_node_attribute_raw(next, CRM_ATTR_UNFENCED);
 
         order_actions(unfence, start, pe_order_implies_then);
-
-        if (unfenced == NULL || safe_str_eq("0", unfenced)) {
+        if (!node_has_been_unfenced(next)) {
             char *reason = crm_strdup_printf("Required by %s", rsc->id);
             trigger_unfencing(NULL, next, reason, NULL, data_set);
             free(reason);
-- 
1.8.3.1


From b6b3fb9e8c6c6b34fb39c9d7f0b89ef41e9486fa Mon Sep 17 00:00:00 2001
From: Ken Gaillot <kgaillot@redhat.com>
Date: Fri, 1 Dec 2017 11:45:31 -0600
Subject: [PATCH 2/5] Refactor: pengine: functionize checking for unfence
 device

Reduces code duplication and enhances readability. This also comments out some
dead code from when probe_complete was still used.
---
 pengine/native.c | 24 ++++++++++++++----------
 1 file changed, 14 insertions(+), 10 deletions(-)

diff --git a/pengine/native.c b/pengine/native.c
index c998e4b..e57fbc7 100644
--- a/pengine/native.c
+++ b/pengine/native.c
@@ -437,6 +437,13 @@ node_has_been_unfenced(node_t *node)
     return unfenced && strcmp("0", unfenced);
 }
 
+static inline bool
+is_unfence_device(resource_t *rsc, pe_working_set_t *data_set)
+{
+    return is_set(rsc->flags, pe_rsc_fence_device)
+           && is_set(data_set->flags, pe_flag_enable_unfencing);
+}
+
 node_t *
 native_color(resource_t * rsc, node_t * prefer, pe_working_set_t * data_set)
 {
@@ -3015,12 +3022,8 @@ native_create_probe(resource_t * rsc, node_t * node, action_t * complete,
     crm_debug("Probing %s on %s (%s) %d %p", rsc->id, node->details->uname, role2text(rsc->role),
               is_set(probe->flags, pe_action_runnable), rsc->running_on);
 
-    if(is_set(rsc->flags, pe_rsc_fence_device) && is_set(data_set->flags, pe_flag_enable_unfencing)) {
+    if (is_unfence_device(rsc, data_set) || !pe_rsc_is_clone(top)) {
         top = rsc;
-
-    } else if (pe_rsc_is_clone(top) == FALSE) {
-        top = rsc;
-
     } else {
         crm_trace("Probing %s on %s (%s) as %s", rsc->id, node->details->uname, role2text(rsc->role), top->id);
     }
@@ -3041,17 +3044,18 @@ native_create_probe(resource_t * rsc, node_t * node, action_t * complete,
                         top, reload_key(rsc), NULL,
                         pe_order_optional, data_set);
 
-    if(is_set(rsc->flags, pe_rsc_fence_device) && is_set(data_set->flags, pe_flag_enable_unfencing)) {
+#if 0
+    // complete is always null currently
+    if (!is_unfence_device(rsc, data_set)) {
         /* Normally rsc.start depends on probe complete which depends
-         * on rsc.probe. But this can't be the case in this scenario as
-         * it would create graph loops.
+         * on rsc.probe. But this can't be the case for fence devices
+         * with unfencing, as it would create graph loops.
          *
          * So instead we explicitly order 'rsc.probe then rsc.start'
          */
-
-    } else {
         order_actions(probe, complete, pe_order_implies_then);
     }
+#endif
     return TRUE;
 }
 
-- 
1.8.3.1


From 63431baae2e544dc3b21d51b035942dfeeca5561 Mon Sep 17 00:00:00 2001
From: Ken Gaillot <kgaillot@redhat.com>
Date: Fri, 1 Dec 2017 12:06:16 -0600
Subject: [PATCH 3/5] Fix: pengine: unfence before probing or starting fence
 devices

Regression since 7f8ba307
---
 pengine/native.c | 62 ++++++++++++++++++++++++++++++++------------------------
 1 file changed, 35 insertions(+), 27 deletions(-)

diff --git a/pengine/native.c b/pengine/native.c
index e57fbc7..0013e33 100644
--- a/pengine/native.c
+++ b/pengine/native.c
@@ -2550,6 +2550,39 @@ StopRsc(resource_t * rsc, node_t * next, gboolean optional, pe_working_set_t * d
     return TRUE;
 }
 
+static void
+order_after_unfencing(resource_t *rsc, pe_node_t *node, action_t *action,
+                      enum pe_ordering order, pe_working_set_t *data_set)
+{
+    /* When unfencing is in use, we order unfence actions before any probe or
+     * start of resources that require unfencing, and also of fence devices.
+     *
+     * This might seem to violate the principle that fence devices require
+     * only quorum. However, fence agents that unfence often don't have enough
+     * information to even probe or start unless the node is first unfenced.
+     */
+    if (is_unfence_device(rsc, data_set)
+        || is_set(rsc->flags, pe_rsc_needs_unfencing)) {
+
+        /* Start with an optional ordering. Requiring unfencing would result in
+         * the node being unfenced, and all its resources being stopped,
+         * whenever a new resource is added -- which would be highly suboptimal.
+         */
+        action_t *unfence = pe_fence_op(node, "on", TRUE, NULL, data_set);
+
+        order_actions(unfence, action, order);
+
+        if (!node_has_been_unfenced(node)) {
+            // But unfencing is required if it has never been done
+            char *reason = crm_strdup_printf("required by %s %s",
+                                             rsc->id, action->task);
+
+            trigger_unfencing(NULL, node, reason, NULL, data_set);
+            free(reason);
+        }
+    }
+}
+
 gboolean
 StartRsc(resource_t * rsc, node_t * next, gboolean optional, pe_working_set_t * data_set)
 {
@@ -2559,16 +2592,7 @@ StartRsc(resource_t * rsc, node_t * next, gboolean optional, pe_working_set_t *
     pe_rsc_trace(rsc, "%s on %s %d %d", rsc->id, next ? next->details->uname : "N/A", optional, next ? next->weight : 0);
     start = start_action(rsc, next, TRUE);
 
-    if(is_set(rsc->flags, pe_rsc_needs_unfencing)) {
-        action_t *unfence = pe_fence_op(next, "on", TRUE, NULL, data_set);
-
-        order_actions(unfence, start, pe_order_implies_then);
-        if (!node_has_been_unfenced(next)) {
-            char *reason = crm_strdup_printf("Required by %s", rsc->id);
-            trigger_unfencing(NULL, next, reason, NULL, data_set);
-            free(reason);
-        }
-    }
+    order_after_unfencing(rsc, next, start, pe_order_implies_then, data_set);
 
     if (is_set(start->flags, pe_action_runnable) && optional == FALSE) {
         update_action_flags(start, pe_action_optional | pe_action_clear, __FUNCTION__, __LINE__);
@@ -2989,23 +3013,7 @@ native_create_probe(resource_t * rsc, node_t * node, action_t * complete,
     probe = custom_action(rsc, key, RSC_STATUS, node, FALSE, TRUE, data_set);
     update_action_flags(probe, pe_action_optional | pe_action_clear, __FUNCTION__, __LINE__);
 
-    /* If enabled, require unfencing before probing any fence devices
-     * but ensure it happens after any resources that require
-     * unfencing have been probed.
-     *
-     * Doing it the other way (requiring unfencing after probing
-     * resources that need it) would result in the node being
-     * unfenced, and all its resources being stopped, whenever a new
-     * resource is added.  Which would be highly suboptimal.
-     *
-     * So essentially, at the point the fencing device(s) have been
-     * probed, we know the state of all resources that require
-     * unfencing and that unfencing occurred.
-     */
-    if(is_set(rsc->flags, pe_rsc_needs_unfencing)) {
-        action_t *unfence = pe_fence_op(node, "on", TRUE, NULL, data_set);
-        order_actions(unfence, probe, pe_order_optional);
-    }
+    order_after_unfencing(rsc, node, probe, pe_order_optional, data_set);
 
     /*
      * We need to know if it's running_on (not just known_on) this node
-- 
1.8.3.1


From 9d3840f374122f6258ddfe44bf85ff43d394d209 Mon Sep 17 00:00:00 2001
From: Ken Gaillot <kgaillot@redhat.com>
Date: Fri, 1 Dec 2017 12:24:55 -0600
Subject: [PATCH 4/5] Test: PE: update regression tests for unfencing change

---
 pengine/test10/start-then-stop-with-unfence.dot     |  3 +++
 pengine/test10/start-then-stop-with-unfence.exp     | 15 +++++++++++++--
 pengine/test10/start-then-stop-with-unfence.summary | 10 +++++-----
 pengine/test10/unfence-definition.dot               |  2 ++
 pengine/test10/unfence-definition.exp               |  9 ++++++++-
 pengine/test10/unfence-definition.summary           |  4 ++--
 pengine/test10/unfence-parameters.dot               |  2 ++
 pengine/test10/unfence-parameters.exp               |  9 ++++++++-
 pengine/test10/unfence-parameters.summary           |  4 ++--
 pengine/test10/unfence-startup.dot                  |  1 +
 pengine/test10/unfence-startup.exp                  |  6 +++++-
 pengine/test10/unfence-startup.summary              |  4 ++--
 12 files changed, 53 insertions(+), 16 deletions(-)

diff --git a/pengine/test10/start-then-stop-with-unfence.dot b/pengine/test10/start-then-stop-with-unfence.dot
index 6e9569b..b324339 100644
--- a/pengine/test10/start-then-stop-with-unfence.dot
+++ b/pengine/test10/start-then-stop-with-unfence.dot
@@ -23,5 +23,8 @@ digraph "g" {
 "mpath-node2_monitor_0 rhel7-node1.example.com" [ style=bold color="green" fontcolor="black"]
 "stonith 'on' rhel7-node1.example.com" -> "ip1_start_0 rhel7-node1.example.com" [ style = bold]
 "stonith 'on' rhel7-node1.example.com" -> "jrummy_start_0 rhel7-node1.example.com" [ style = bold]
+"stonith 'on' rhel7-node1.example.com" -> "mpath-node1_monitor_0 rhel7-node1.example.com" [ style = bold]
+"stonith 'on' rhel7-node1.example.com" -> "mpath-node1_start_0 rhel7-node1.example.com" [ style = bold]
+"stonith 'on' rhel7-node1.example.com" -> "mpath-node2_monitor_0 rhel7-node1.example.com" [ style = bold]
 "stonith 'on' rhel7-node1.example.com" [ style=bold color="green" fontcolor="black"]
 }
diff --git a/pengine/test10/start-then-stop-with-unfence.exp b/pengine/test10/start-then-stop-with-unfence.exp
index 75cb356..715ba40 100644
--- a/pengine/test10/start-then-stop-with-unfence.exp
+++ b/pengine/test10/start-then-stop-with-unfence.exp
@@ -6,7 +6,11 @@
         <attributes CRM_meta_on_node="rhel7-node1.example.com" CRM_meta_on_node_uuid="1" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000"  devices="/dev/mapper/clustPVa" key="1234" pcmk_host_list="rhel7-node2.example.com"/>
       </rsc_op>
     </action_set>
-    <inputs/>
+    <inputs>
+      <trigger>
+        <crm_event id="6" operation="stonith" operation_key="stonith-rhel7-node1.example.com-on" on_node="rhel7-node1.example.com" on_node_uuid="1"/>
+      </trigger>
+    </inputs>
   </synapse>
   <synapse id="1">
     <action_set>
@@ -30,6 +34,9 @@
     </action_set>
     <inputs>
       <trigger>
+        <crm_event id="6" operation="stonith" operation_key="stonith-rhel7-node1.example.com-on" on_node="rhel7-node1.example.com" on_node_uuid="1"/>
+      </trigger>
+      <trigger>
         <rsc_op id="9" operation="monitor" operation_key="mpath-node1_monitor_0" on_node="rhel7-node1.example.com" on_node_uuid="1"/>
       </trigger>
     </inputs>
@@ -41,7 +48,11 @@
         <attributes CRM_meta_on_node="rhel7-node1.example.com" CRM_meta_on_node_uuid="1" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000"  devices="/dev/mapper/clustPVa" key="1233" pcmk_host_list="rhel7-node1.example.com"/>
       </rsc_op>
     </action_set>
-    <inputs/>
+    <inputs>
+      <trigger>
+        <crm_event id="6" operation="stonith" operation_key="stonith-rhel7-node1.example.com-on" on_node="rhel7-node1.example.com" on_node_uuid="1"/>
+      </trigger>
+    </inputs>
   </synapse>
   <synapse id="4">
     <action_set>
diff --git a/pengine/test10/start-then-stop-with-unfence.summary b/pengine/test10/start-then-stop-with-unfence.summary
index 2e02a21..b2114d7 100644
--- a/pengine/test10/start-then-stop-with-unfence.summary
+++ b/pengine/test10/start-then-stop-with-unfence.summary
@@ -11,23 +11,23 @@ Online: [ rhel7-node1.example.com rhel7-node2.example.com ]
      Stopped: [ rhel7-node1.example.com ]
 
 Transition Summary:
- * Fence (on) rhel7-node1.example.com 'Required by ip1'
+ * Fence (on) rhel7-node1.example.com 'required by mpath-node2 monitor'
  * Start   mpath-node1	(rhel7-node1.example.com)
  * Move       ip1             ( rhel7-node2.example.com -> rhel7-node1.example.com )  
  * Start   jrummy:1	(rhel7-node1.example.com)
 
 Executing cluster transition:
- * Resource action: mpath-node2     monitor on rhel7-node1.example.com
- * Resource action: mpath-node1     monitor on rhel7-node1.example.com
  * Pseudo action:   jrummy-clone_start_0
  * Fencing rhel7-node1.example.com (on)
- * Resource action: mpath-node1     start on rhel7-node1.example.com
+ * Resource action: mpath-node2     monitor on rhel7-node1.example.com
+ * Resource action: mpath-node1     monitor on rhel7-node1.example.com
  * Resource action: jrummy          start on rhel7-node1.example.com
  * Pseudo action:   jrummy-clone_running_0
- * Resource action: mpath-node1     monitor=60000 on rhel7-node1.example.com
+ * Resource action: mpath-node1     start on rhel7-node1.example.com
  * Resource action: ip1             stop on rhel7-node2.example.com
  * Resource action: jrummy          monitor=10000 on rhel7-node1.example.com
  * Pseudo action:   all_stopped
+ * Resource action: mpath-node1     monitor=60000 on rhel7-node1.example.com
  * Resource action: ip1             start on rhel7-node1.example.com
  * Resource action: ip1             monitor=10000 on rhel7-node1.example.com
 
diff --git a/pengine/test10/unfence-definition.dot b/pengine/test10/unfence-definition.dot
index 3bc29d3..c42391a 100644
--- a/pengine/test10/unfence-definition.dot
+++ b/pengine/test10/unfence-definition.dot
@@ -66,11 +66,13 @@ digraph "g" {
 "fencing_stop_0 virt-1" [ style=bold color="green" fontcolor="black"]
 "stonith 'on' virt-1" -> "clvmd_start_0 virt-1" [ style = bold]
 "stonith 'on' virt-1" -> "dlm_start_0 virt-1" [ style = bold]
+"stonith 'on' virt-1" -> "fencing_start_0 virt-1" [ style = bold]
 "stonith 'on' virt-1" [ style=bold color="green" fontcolor="black"]
 "stonith 'on' virt-3" -> "clvmd:2_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "clvmd:2_start_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_start_0 virt-3" [ style = bold]
+"stonith 'on' virt-3" -> "fencing_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" [ style=bold color="green" fontcolor="black"]
 "stonith 'reboot' virt-4" -> "stonith_complete" [ style = bold]
 "stonith 'reboot' virt-4" [ style=bold color="green" fontcolor="black"]
diff --git a/pengine/test10/unfence-definition.exp b/pengine/test10/unfence-definition.exp
index b1e241a..25c5674 100644
--- a/pengine/test10/unfence-definition.exp
+++ b/pengine/test10/unfence-definition.exp
@@ -11,6 +11,9 @@
         <pseudo_event id="1" operation="all_stopped" operation_key="all_stopped"/>
       </trigger>
       <trigger>
+        <crm_event id="2" operation="stonith" operation_key="stonith-virt-1-on" on_node="virt-1" on_node_uuid="1"/>
+      </trigger>
+      <trigger>
         <rsc_op id="6" operation="stop" operation_key="fencing_stop_0" on_node="virt-1" on_node_uuid="1"/>
       </trigger>
       <trigger>
@@ -28,7 +31,11 @@
         <attributes CRM_meta_on_node="virt-3" CRM_meta_on_node_uuid="3" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000" />
       </rsc_op>
     </action_set>
-    <inputs/>
+    <inputs>
+      <trigger>
+        <crm_event id="4" operation="stonith" operation_key="stonith-virt-3-on" on_node="virt-3" on_node_uuid="3"/>
+      </trigger>
+    </inputs>
   </synapse>
   <synapse id="2">
     <action_set>
diff --git a/pengine/test10/unfence-definition.summary b/pengine/test10/unfence-definition.summary
index 4ca9344..2051c51 100644
--- a/pengine/test10/unfence-definition.summary
+++ b/pengine/test10/unfence-definition.summary
@@ -13,7 +13,7 @@ Online: [ virt-1 virt-2 virt-3 ]
 
 Transition Summary:
  * Fence (reboot) virt-4 'node is unclean'
- * Fence (on) virt-3 'Required by dlm:2'
+ * Fence (on) virt-3 'required by fencing monitor'
  * Fence (on) virt-1 'Device definition changed'
  * Restart    fencing     ( virt-1 )  
  * Restart    dlm:0       ( virt-1 )   due to required stonith
@@ -23,13 +23,13 @@ Transition Summary:
  * Start   clvmd:2	(virt-3)
 
 Executing cluster transition:
- * Resource action: fencing         monitor on virt-3
  * Resource action: fencing         stop on virt-1
  * Resource action: clvmd           monitor on virt-2
  * Pseudo action:   clvmd-clone_stop_0
  * Fencing virt-4 (reboot)
  * Pseudo action:   stonith_complete
  * Fencing virt-3 (on)
+ * Resource action: fencing         monitor on virt-3
  * Resource action: fencing         delete on virt-1
  * Resource action: dlm             monitor on virt-3
  * Resource action: clvmd           stop on virt-1
diff --git a/pengine/test10/unfence-parameters.dot b/pengine/test10/unfence-parameters.dot
index ce006c4..3c27b22 100644
--- a/pengine/test10/unfence-parameters.dot
+++ b/pengine/test10/unfence-parameters.dot
@@ -63,11 +63,13 @@ digraph "g" {
 "fencing_stop_0 virt-1" [ style=bold color="green" fontcolor="black"]
 "stonith 'on' virt-1" -> "clvmd_start_0 virt-1" [ style = bold]
 "stonith 'on' virt-1" -> "dlm_start_0 virt-1" [ style = bold]
+"stonith 'on' virt-1" -> "fencing_start_0 virt-1" [ style = bold]
 "stonith 'on' virt-1" [ style=bold color="green" fontcolor="black"]
 "stonith 'on' virt-3" -> "clvmd:2_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "clvmd:2_start_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_start_0 virt-3" [ style = bold]
+"stonith 'on' virt-3" -> "fencing_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" [ style=bold color="green" fontcolor="black"]
 "stonith 'reboot' virt-4" -> "stonith_complete" [ style = bold]
 "stonith 'reboot' virt-4" [ style=bold color="green" fontcolor="black"]
diff --git a/pengine/test10/unfence-parameters.exp b/pengine/test10/unfence-parameters.exp
index b8053c7..3b73fc7 100644
--- a/pengine/test10/unfence-parameters.exp
+++ b/pengine/test10/unfence-parameters.exp
@@ -15,7 +15,11 @@
         <attributes CRM_meta_on_node="virt-3" CRM_meta_on_node_uuid="3" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000" />
       </rsc_op>
     </action_set>
-    <inputs/>
+    <inputs>
+      <trigger>
+        <crm_event id="4" operation="stonith" operation_key="stonith-virt-3-on" on_node="virt-3" on_node_uuid="3"/>
+      </trigger>
+    </inputs>
   </synapse>
   <synapse id="2">
     <action_set>
@@ -29,6 +33,9 @@
         <pseudo_event id="1" operation="all_stopped" operation_key="all_stopped"/>
       </trigger>
       <trigger>
+        <crm_event id="2" operation="stonith" operation_key="stonith-virt-1-on" on_node="virt-1" on_node_uuid="1"/>
+      </trigger>
+      <trigger>
         <rsc_op id="8" operation="monitor" operation_key="fencing_monitor_0" on_node="virt-3" on_node_uuid="3"/>
       </trigger>
       <trigger>
diff --git a/pengine/test10/unfence-parameters.summary b/pengine/test10/unfence-parameters.summary
index 5b582d9..2cc9e27 100644
--- a/pengine/test10/unfence-parameters.summary
+++ b/pengine/test10/unfence-parameters.summary
@@ -13,7 +13,7 @@ Online: [ virt-1 virt-2 virt-3 ]
 
 Transition Summary:
  * Fence (reboot) virt-4 'node is unclean'
- * Fence (on) virt-3 'Required by dlm:2'
+ * Fence (on) virt-3 'required by fencing monitor'
  * Fence (on) virt-1 'Device parameters changed (reload)'
  * Restart    fencing     ( virt-1 )   due to resource definition change
  * Restart    dlm:0       ( virt-1 )   due to required stonith
@@ -24,12 +24,12 @@ Transition Summary:
 
 Executing cluster transition:
  * Resource action: fencing         stop on virt-1
- * Resource action: fencing         monitor on virt-3
  * Resource action: clvmd           monitor on virt-2
  * Pseudo action:   clvmd-clone_stop_0
  * Fencing virt-4 (reboot)
  * Pseudo action:   stonith_complete
  * Fencing virt-3 (on)
+ * Resource action: fencing         monitor on virt-3
  * Resource action: dlm             monitor on virt-3
  * Resource action: clvmd           stop on virt-1
  * Resource action: clvmd           monitor on virt-3
diff --git a/pengine/test10/unfence-startup.dot b/pengine/test10/unfence-startup.dot
index d496956..642f795 100644
--- a/pengine/test10/unfence-startup.dot
+++ b/pengine/test10/unfence-startup.dot
@@ -29,6 +29,7 @@ digraph "g" {
 "stonith 'on' virt-3" -> "clvmd:2_start_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" -> "dlm:2_start_0 virt-3" [ style = bold]
+"stonith 'on' virt-3" -> "fencing_monitor_0 virt-3" [ style = bold]
 "stonith 'on' virt-3" [ style=bold color="green" fontcolor="black"]
 "stonith 'reboot' virt-4" -> "stonith_complete" [ style = bold]
 "stonith 'reboot' virt-4" [ style=bold color="green" fontcolor="black"]
diff --git a/pengine/test10/unfence-startup.exp b/pengine/test10/unfence-startup.exp
index 70c1686..bfd24c8 100644
--- a/pengine/test10/unfence-startup.exp
+++ b/pengine/test10/unfence-startup.exp
@@ -6,7 +6,11 @@
         <attributes CRM_meta_on_node="virt-3" CRM_meta_on_node_uuid="3" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000" />
       </rsc_op>
     </action_set>
-    <inputs/>
+    <inputs>
+      <trigger>
+        <crm_event id="4" operation="stonith" operation_key="stonith-virt-3-on" on_node="virt-3" on_node_uuid="3"/>
+      </trigger>
+    </inputs>
   </synapse>
   <synapse id="1">
     <action_set>
diff --git a/pengine/test10/unfence-startup.summary b/pengine/test10/unfence-startup.summary
index 276358c..4601f31 100644
--- a/pengine/test10/unfence-startup.summary
+++ b/pengine/test10/unfence-startup.summary
@@ -13,18 +13,18 @@ Online: [ virt-1 virt-2 virt-3 ]
 
 Transition Summary:
  * Fence (reboot) virt-4 'node is unclean'
- * Fence (on) virt-3 'Required by dlm:2'
+ * Fence (on) virt-3 'required by fencing monitor'
  * Start   dlm:2	(virt-3)
  * Start   clvmd:1	(virt-2)
  * Start   clvmd:2	(virt-3)
 
 Executing cluster transition:
- * Resource action: fencing         monitor on virt-3
  * Resource action: clvmd           monitor on virt-2
  * Fencing virt-4 (reboot)
  * Pseudo action:   stonith_complete
  * Fencing virt-3 (on)
  * Pseudo action:   all_stopped
+ * Resource action: fencing         monitor on virt-3
  * Resource action: dlm             monitor on virt-3
  * Pseudo action:   dlm-clone_start_0
  * Resource action: clvmd           monitor on virt-3
-- 
1.8.3.1


From c11d10ef4f04bbdb2e6b7e6251b88e50faccaaca Mon Sep 17 00:00:00 2001
From: Ken Gaillot <kgaillot@redhat.com>
Date: Fri, 1 Dec 2017 14:36:03 -0600
Subject: [PATCH 5/5] Test: PE: add regression test for unfencing with only
 fence devices

---
 pengine/regression.sh                 |   1 +
 pengine/test10/unfence-device.dot     |  18 ++++++
 pengine/test10/unfence-device.exp     | 100 ++++++++++++++++++++++++++++++++++
 pengine/test10/unfence-device.scores  |   5 ++
 pengine/test10/unfence-device.summary |  29 ++++++++++
 pengine/test10/unfence-device.xml     |  66 ++++++++++++++++++++++
 6 files changed, 219 insertions(+)
 create mode 100644 pengine/test10/unfence-device.dot
 create mode 100644 pengine/test10/unfence-device.exp
 create mode 100644 pengine/test10/unfence-device.scores
 create mode 100644 pengine/test10/unfence-device.summary
 create mode 100644 pengine/test10/unfence-device.xml

diff --git a/pengine/regression.sh b/pengine/regression.sh
index db101e7..47cf0ba 100755
--- a/pengine/regression.sh
+++ b/pengine/regression.sh
@@ -393,6 +393,7 @@ echo ""
 do_test unfence-startup "Clean unfencing"
 do_test unfence-definition "Unfencing when the agent changes"
 do_test unfence-parameters "Unfencing when the agent parameters changes"
+do_test unfence-device "Unfencing when a cluster has only fence devices"
 
 echo ""
 do_test master-0 "Stopped -> Slave"
diff --git a/pengine/test10/unfence-device.dot b/pengine/test10/unfence-device.dot
new file mode 100644
index 0000000..e383fd2
--- /dev/null
+++ b/pengine/test10/unfence-device.dot
@@ -0,0 +1,18 @@
+digraph "g" {
+"fence_scsi_monitor_0 virt-008" -> "fence_scsi_start_0 virt-008" [ style = bold]
+"fence_scsi_monitor_0 virt-008" [ style=bold color="green" fontcolor="black"]
+"fence_scsi_monitor_0 virt-009" -> "fence_scsi_start_0 virt-008" [ style = bold]
+"fence_scsi_monitor_0 virt-009" [ style=bold color="green" fontcolor="black"]
+"fence_scsi_monitor_0 virt-013" -> "fence_scsi_start_0 virt-008" [ style = bold]
+"fence_scsi_monitor_0 virt-013" [ style=bold color="green" fontcolor="black"]
+"fence_scsi_monitor_60000 virt-008" [ style=bold color="green" fontcolor="black"]
+"fence_scsi_start_0 virt-008" -> "fence_scsi_monitor_60000 virt-008" [ style = bold]
+"fence_scsi_start_0 virt-008" [ style=bold color="green" fontcolor="black"]
+"stonith 'on' virt-008" -> "fence_scsi_monitor_0 virt-008" [ style = bold]
+"stonith 'on' virt-008" -> "fence_scsi_start_0 virt-008" [ style = bold]
+"stonith 'on' virt-008" [ style=bold color="green" fontcolor="black"]
+"stonith 'on' virt-009" -> "fence_scsi_monitor_0 virt-009" [ style = bold]
+"stonith 'on' virt-009" [ style=bold color="green" fontcolor="black"]
+"stonith 'on' virt-013" -> "fence_scsi_monitor_0 virt-013" [ style = bold]
+"stonith 'on' virt-013" [ style=bold color="green" fontcolor="black"]
+}
diff --git a/pengine/test10/unfence-device.exp b/pengine/test10/unfence-device.exp
new file mode 100644
index 0000000..98cb548
--- /dev/null
+++ b/pengine/test10/unfence-device.exp
@@ -0,0 +1,100 @@
+<transition_graph cluster-delay="60s" stonith-timeout="60s" failed-stop-offset="INFINITY" failed-start-offset="INFINITY"  transition_id="0">
+  <synapse id="0">
+    <action_set>
+      <rsc_op id="9" operation="monitor" operation_key="fence_scsi_monitor_60000" on_node="virt-008" on_node_uuid="1">
+        <primitive id="fence_scsi" class="stonith" type="fence_scsi"/>
+        <attributes CRM_meta_interval="60000" CRM_meta_name="monitor" CRM_meta_on_node="virt-008" CRM_meta_on_node_uuid="1" CRM_meta_timeout="20000"  devices="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87" pcmk_host_check="static-list" pcmk_host_list="virt-008,virt-009,virt-013" pcmk_reboot_action="off"/>
+      </rsc_op>
+    </action_set>
+    <inputs>
+      <trigger>
+        <rsc_op id="8" operation="start" operation_key="fence_scsi_start_0" on_node="virt-008" on_node_uuid="1"/>
+      </trigger>
+    </inputs>
+  </synapse>
+  <synapse id="1">
+    <action_set>
+      <rsc_op id="8" operation="start" operation_key="fence_scsi_start_0" on_node="virt-008" on_node_uuid="1">
+        <primitive id="fence_scsi" class="stonith" type="fence_scsi"/>
+        <attributes CRM_meta_on_node="virt-008" CRM_meta_on_node_uuid="1" CRM_meta_timeout="20000"  devices="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87" pcmk_host_check="static-list" pcmk_host_list="virt-008,virt-009,virt-013" pcmk_reboot_action="off"/>
+      </rsc_op>
+    </action_set>
+    <inputs>
+      <trigger>
+        <rsc_op id="2" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-008" on_node_uuid="1"/>
+      </trigger>
+      <trigger>
+        <crm_event id="3" operation="stonith" operation_key="stonith-virt-008-on" on_node="virt-008" on_node_uuid="1"/>
+      </trigger>
+      <trigger>
+        <rsc_op id="4" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-009" on_node_uuid="2"/>
+      </trigger>
+      <trigger>
+        <rsc_op id="6" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-013" on_node_uuid="3"/>
+      </trigger>
+    </inputs>
+  </synapse>
+  <synapse id="2">
+    <action_set>
+      <rsc_op id="6" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-013" on_node_uuid="3">
+        <primitive id="fence_scsi" class="stonith" type="fence_scsi"/>
+        <attributes CRM_meta_on_node="virt-013" CRM_meta_on_node_uuid="3" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000"  devices="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87" pcmk_host_check="static-list" pcmk_host_list="virt-008,virt-009,virt-013" pcmk_reboot_action="off"/>
+      </rsc_op>
+    </action_set>
+    <inputs>
+      <trigger>
+        <crm_event id="7" operation="stonith" operation_key="stonith-virt-013-on" on_node="virt-013" on_node_uuid="3"/>
+      </trigger>
+    </inputs>
+  </synapse>
+  <synapse id="3">
+    <action_set>
+      <rsc_op id="4" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-009" on_node_uuid="2">
+        <primitive id="fence_scsi" class="stonith" type="fence_scsi"/>
+        <attributes CRM_meta_on_node="virt-009" CRM_meta_on_node_uuid="2" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000"  devices="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87" pcmk_host_check="static-list" pcmk_host_list="virt-008,virt-009,virt-013" pcmk_reboot_action="off"/>
+      </rsc_op>
+    </action_set>
+    <inputs>
+      <trigger>
+        <crm_event id="5" operation="stonith" operation_key="stonith-virt-009-on" on_node="virt-009" on_node_uuid="2"/>
+      </trigger>
+    </inputs>
+  </synapse>
+  <synapse id="4">
+    <action_set>
+      <rsc_op id="2" operation="monitor" operation_key="fence_scsi_monitor_0" on_node="virt-008" on_node_uuid="1">
+        <primitive id="fence_scsi" class="stonith" type="fence_scsi"/>
+        <attributes CRM_meta_on_node="virt-008" CRM_meta_on_node_uuid="1" CRM_meta_op_target_rc="7" CRM_meta_timeout="20000"  devices="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87" pcmk_host_check="static-list" pcmk_host_list="virt-008,virt-009,virt-013" pcmk_reboot_action="off"/>
+      </rsc_op>
+    </action_set>
+    <inputs>
+      <trigger>
+        <crm_event id="3" operation="stonith" operation_key="stonith-virt-008-on" on_node="virt-008" on_node_uuid="1"/>
+      </trigger>
+    </inputs>
+  </synapse>
+  <synapse id="5">
+    <action_set>
+      <crm_event id="7" operation="stonith" operation_key="stonith-virt-013-on" on_node="virt-013" on_node_uuid="3">
+        <attributes CRM_meta_on_node="virt-013" CRM_meta_on_node_uuid="3" CRM_meta_shutdown="0" CRM_meta_stonith_action="on" />
+      </crm_event>
+    </action_set>
+    <inputs/>
+  </synapse>
+  <synapse id="6">
+    <action_set>
+      <crm_event id="5" operation="stonith" operation_key="stonith-virt-009-on" on_node="virt-009" on_node_uuid="2">
+        <attributes CRM_meta_on_node="virt-009" CRM_meta_on_node_uuid="2" CRM_meta_shutdown="0" CRM_meta_stonith_action="on" />
+      </crm_event>
+    </action_set>
+    <inputs/>
+  </synapse>
+  <synapse id="7">
+    <action_set>
+      <crm_event id="3" operation="stonith" operation_key="stonith-virt-008-on" on_node="virt-008" on_node_uuid="1">
+        <attributes CRM_meta_on_node="virt-008" CRM_meta_on_node_uuid="1" CRM_meta_shutdown="0" CRM_meta_stonith_action="on" />
+      </crm_event>
+    </action_set>
+    <inputs/>
+  </synapse>
+</transition_graph>
diff --git a/pengine/test10/unfence-device.scores b/pengine/test10/unfence-device.scores
new file mode 100644
index 0000000..8ea5036
--- /dev/null
+++ b/pengine/test10/unfence-device.scores
@@ -0,0 +1,5 @@
+Allocation scores:
+Using the original execution date of: 2017-11-30 10:44:29Z
+native_color: fence_scsi allocation score on virt-008: 0
+native_color: fence_scsi allocation score on virt-009: 0
+native_color: fence_scsi allocation score on virt-013: 0
diff --git a/pengine/test10/unfence-device.summary b/pengine/test10/unfence-device.summary
new file mode 100644
index 0000000..181724b
--- /dev/null
+++ b/pengine/test10/unfence-device.summary
@@ -0,0 +1,29 @@
+Using the original execution date of: 2017-11-30 10:44:29Z
+
+Current cluster status:
+Online: [ virt-008 virt-009 virt-013 ]
+
+ fence_scsi	(stonith:fence_scsi):	Stopped
+
+Transition Summary:
+ * Fence (on) virt-013 'required by fence_scsi monitor'
+ * Fence (on) virt-009 'required by fence_scsi monitor'
+ * Fence (on) virt-008 'required by fence_scsi monitor'
+ * Start      fence_scsi     ( virt-008 )  
+
+Executing cluster transition:
+ * Fencing virt-013 (on)
+ * Fencing virt-009 (on)
+ * Fencing virt-008 (on)
+ * Resource action: fence_scsi      monitor on virt-013
+ * Resource action: fence_scsi      monitor on virt-009
+ * Resource action: fence_scsi      monitor on virt-008
+ * Resource action: fence_scsi      start on virt-008
+ * Resource action: fence_scsi      monitor=60000 on virt-008
+Using the original execution date of: 2017-11-30 10:44:29Z
+
+Revised cluster status:
+Online: [ virt-008 virt-009 virt-013 ]
+
+ fence_scsi	(stonith:fence_scsi):	Started virt-008
+
diff --git a/pengine/test10/unfence-device.xml b/pengine/test10/unfence-device.xml
new file mode 100644
index 0000000..e977d9b
--- /dev/null
+++ b/pengine/test10/unfence-device.xml
@@ -0,0 +1,66 @@
+<cib crm_feature_set="3.0.12" validate-with="pacemaker-2.8" epoch="6" num_updates="0" admin_epoch="0" cib-last-written="Thu Nov 30 11:44:29 2017" update-origin="virt-008" update-client="cibadmin" update-user="root" have-quorum="1" dc-uuid="2" execution-date="1512038669">
+  <configuration>
+    <crm_config>
+      <cluster_property_set id="cib-bootstrap-options">
+        <nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
+        <nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="1.1.16-12.el7_4.4-94ff4df"/>
+        <nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
+        <nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="STSRHTS22676"/>
+      </cluster_property_set>
+    </crm_config>
+    <nodes>
+      <node id="1" uname="virt-008"/>
+      <node id="2" uname="virt-009"/>
+      <node id="3" uname="virt-013"/>
+    </nodes>
+    <resources>
+      <primitive class="stonith" id="fence_scsi" type="fence_scsi">
+        <instance_attributes id="fence_scsi-instance_attributes">
+          <nvpair id="fence_scsi-instance_attributes-devices" name="devices" value="/dev/disk/by-id/scsi-36001405a853dfde18e94ae885bda3b65,/dev/disk/by-id/scsi-360014059f350cbdba7243278a4b5da87"/>
+          <nvpair id="fence_scsi-instance_attributes-pcmk_host_check" name="pcmk_host_check" value="static-list"/>
+          <nvpair id="fence_scsi-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="virt-008,virt-009,virt-013"/>
+          <nvpair id="fence_scsi-instance_attributes-pcmk_reboot_action" name="pcmk_reboot_action" value="off"/>
+        </instance_attributes>
+        <meta_attributes id="fence_scsi-meta_attributes">
+          <nvpair id="fence_scsi-meta_attributes-provides" name="provides" value="unfencing"/>
+        </meta_attributes>
+        <operations>
+          <op id="fence_scsi-monitor-interval-60s" interval="60s" name="monitor"/>
+        </operations>
+      </primitive>
+    </resources>
+    <constraints/>
+  </configuration>
+  <status>
+    <node_state id="3" uname="virt-013" in_ccm="true" crmd="online" crm-debug-origin="do_state_transition" join="member" expected="member">
+      <lrm id="3">
+        <lrm_resources/>
+      </lrm>
+      <transient_attributes id="3">
+        <instance_attributes id="status-3">
+          <nvpair id="status-3-shutdown" name="shutdown" value="0"/>
+        </instance_attributes>
+      </transient_attributes>
+    </node_state>
+    <node_state id="1" uname="virt-008" in_ccm="true" crmd="online" crm-debug-origin="do_state_transition" join="member" expected="member">
+      <lrm id="1">
+        <lrm_resources/>
+      </lrm>
+      <transient_attributes id="1">
+        <instance_attributes id="status-1">
+          <nvpair id="status-1-shutdown" name="shutdown" value="0"/>
+        </instance_attributes>
+      </transient_attributes>
+    </node_state>
+    <node_state id="2" uname="virt-009" in_ccm="true" crmd="online" crm-debug-origin="do_state_transition" join="member" expected="member">
+      <transient_attributes id="2">
+        <instance_attributes id="status-2">
+          <nvpair id="status-2-shutdown" name="shutdown" value="0"/>
+        </instance_attributes>
+      </transient_attributes>
+      <lrm id="2">
+        <lrm_resources/>
+      </lrm>
+    </node_state>
+  </status>
+</cib>
-- 
1.8.3.1