<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 03:15:48 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-15140] recovery-random-scale: No sub tests failed in this test set, FAIL: remove sub-test dirs failed</title>
                <link>https://jira.whamcloud.com/browse/LU-15140</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Andreas Dilger &amp;lt;adilger@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run:&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/dfbec373-e4f6-416f-83b9-2265475a3b80&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/dfbec373-e4f6-416f-83b9-2265475a3b80&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/0d2d3b02-4871-4160-a995-a51a74e4cd3b&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/0d2d3b02-4871-4160-a995-a51a74e4cd3b&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/f060ce64-9e1e-4546-b4bd-2f740787c589&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/f060ce64-9e1e-4546-b4bd-2f740787c589&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/2640a2ac-2fff-48d5-82a6-cab054add322&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/2640a2ac-2fff-48d5-82a6-cab054add322&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/530acf01-f304-4dfa-94aa-16b5c1280cc5&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/530acf01-f304-4dfa-94aa-16b5c1280cc5&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.whamcloud.com/test_sets/902b1bb9-ed8b-400b-895a-baa328d4b7c5&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/902b1bb9-ed8b-400b-895a-baa328d4b7c5&lt;/a&gt;&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;== recovery-random-scale test complete, duration 85944 sec =========================================== 19:34:45 (1634672085)
rm: cannot remove &apos;/mnt/lustre/d0.tar-trevis-68vm4.trevis.whamcloud.com/etc/selinux/targeted/active/modules/100&apos;: Directory not empty
 recovery-random-scale : @@@@@@ FAIL: remove sub-test dirs failed 
Stopping clients: trevis-68vm1.trevis.whamcloud.com,trevis-68vm3,trevis-68vm4 /mnt/lustre (opts:)
while umount  /mnt/lustre 2&amp;gt;&amp;amp;1 | grep -q busy; do
    echo /mnt/lustre is still busy, wait one second &amp;amp;&amp;amp; sleep 1;
done;
fi
Stopping client trevis-68vm1.trevis.whamcloud.com /mnt/lustre opts:
Stopping client trevis-68vm3.trevis.whamcloud.com /mnt/lustre opts:
Stopping client trevis-68vm4.trevis.whamcloud.com /mnt/lustre opts:
COMMAND    PID USER   FD   TYPE      DEVICE SIZE/OFF               NODE NAME
run_tar.s 2934 root  cwd    DIR 1273,181606    11264 144116446786489609 /mnt/lustre/d0.tar-trevis-68vm4.trevis.whamcloud.com
tar       3222 root  cwd    DIR 1273,181606    11264 144116446786489609 /mnt/lustre/d0.tar-trevis-68vm4.trevis.whamcloud.com
tar       3223 root  cwd    DIR 1273,181606    11264 144116446786489609 /mnt/lustre/d0.tar-trevis-68vm4.trevis.whamcloud.com
tar       3223 root    3w   REG 1273,181606     5156 144117587637177621 /mnt/lustre/d0.tar-trevis-68vm4.trevis.whamcloud.com/etc/selinux/targeted/active/modules/100/gpg/cil
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
COMMAND    PID USER   FD      TYPE      DEVICE   SIZE/OFF               NODE NAME
run_dd.sh 2747 root  cwd   unknown 1273,181606                               /mnt/lustre/d0.dd-trevis-68vm3.trevis.whamcloud.com
dd        2772 root  cwd   unknown 1273,181606                               /mnt/lustre/d0.dd-trevis-68vm3.trevis.whamcloud.com
dd        2772 root    1w      REG 1273,181606 1160388608 144117486973878288 /mnt/lustre/d0.dd-trevis-68vm3.trevis.whamcloud.com/dd-file
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
:
:
Stopping clients: trevis-68vm1.trevis.whamcloud.com,trevis-68vm3,trevis-68vm4 /mnt/lustre2 (opts:)
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;so it looks like the client filesystem is eventually unmounted correctly, after the running jobs complete.  It appears from the jobs that are still running that &quot;&lt;tt&gt;tar&lt;/tt&gt;&quot; may still be writing into that directory tree at the time that &quot;&lt;tt&gt;rm -r&lt;/tt&gt;&quot; is called, causing the directory not to be empty.&lt;/p&gt;

&lt;p&gt;It would make sense to ensure that the running jobs are stopped before trying to delete the directory tree.&lt;/p&gt;</description>
                <environment></environment>
        <key id="66770">LU-15140</key>
            <summary>recovery-random-scale: No sub tests failed in this test set, FAIL: remove sub-test dirs failed</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="egryaznova">Elena Gryaznova</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Thu, 21 Oct 2021 02:31:33 +0000</created>
                <updated>Tue, 9 Jan 2024 17:45:07 +0000</updated>
                            <resolved>Thu, 6 Jan 2022 23:22:20 +0000</resolved>
                                    <version>Lustre 2.12.8</version>
                    <version>Lustre 2.15.0</version>
                                    <fixVersion>Lustre 2.15.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>3</watches>
                                                                            <comments>
                            <comment id="320016" author="JIRAUSER17102" created="Fri, 3 Dec 2021 18:35:31 +0000"  >&lt;p&gt;Very similar issues can be observed in recovery-double-scale, recovery-mds-scale, recovery-double-scale test sets on 2.12.8:&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.whamcloud.com/test_sets/700f5bee-22e3-4ea7-b49d-b42ce30895f5&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/700f5bee-22e3-4ea7-b49d-b42ce30895f5&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.whamcloud.com/test_sets/66753f7e-69b3-43a7-bd3e-2d4b415a204d&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/66753f7e-69b3-43a7-bd3e-2d4b415a204d&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.whamcloud.com/test_sets/226e5c40-191f-4929-b0d8-a41775b12ecd&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/226e5c40-191f-4929-b0d8-a41775b12ecd&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Subtests are either skipped or passed, but the test set is marked as failed. Very similar logs can be found in these test runs, for example:&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
== recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale test complete, duration 1846 sec ============================================ 19:24:04 (1637436244)
rm: cannot remove &lt;span class=&quot;code-quote&quot;&gt;&apos;/mnt/lustre/d0.tar-onyx-64vm4.onyx.whamcloud.com/etc/selinux/targeted/active/modules/100&apos;&lt;/span&gt;: Directory not empty
 recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale : @@@@@@ FAIL: remove sub-test dirs failed 
  Trace dump:
  = /usr/lib64/lustre/tests/test-framework.sh:5919:error()
  = /usr/lib64/lustre/tests/test-framework.sh:5404:check_and_cleanup_lustre()
  = /usr/lib64/lustre/tests/recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale.sh:309:main()
Dumping lctl log to /autotest/autotest-1/2021-11-20/lustre-b2_12_failover-part-1_150_1_40_13db9919-f21e-4132-8be9-3d11b4f5908e&lt;span class=&quot;code-comment&quot;&gt;//recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale..*.1637436272.log
&lt;/span&gt;CMD: onyx-109vm9,onyx-24vm8,onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 /usr/sbin/lctl dk &amp;gt; /autotest/autotest-1/2021-11-20/lustre-b2_12_failover-part-1_150_1_40_13db9919-f21e-4132-8be9-3d11b4f5908e&lt;span class=&quot;code-comment&quot;&gt;//recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale..debug_log.\$(hostname -s).1637436272.log;
&lt;/span&gt;         dmesg &amp;gt; /autotest/autotest-1/2021-11-20/lustre-b2_12_failover-part-1_150_1_40_13db9919-f21e-4132-8be9-3d11b4f5908e&lt;span class=&quot;code-comment&quot;&gt;//recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale..dmesg.\$(hostname -s).1637436272.log
&lt;/span&gt;CMD: onyx-109vm9,onyx-24vm8,onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 rsync -az /autotest/autotest-1/2021-11-20/lustre-b2_12_failover-part-1_150_1_40_13db9919-f21e-4132-8be9-3d11b4f5908e&lt;span class=&quot;code-comment&quot;&gt;//recovery-&lt;span class=&quot;code-object&quot;&gt;double&lt;/span&gt;-scale..*.1637436272.log onyx-64vm1.onyx.whamcloud.com:/autotest/autotest-1/2021-11-20/lustre-b2_12_failover-part-1_150_1_40_13db9919-f21e-4132-8be9-3d11b4f5908e/
&lt;/span&gt;Resetting fail_loc on all nodes...CMD: onyx-109vm9,onyx-24vm8,onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 lctl set_param -n fail_loc=0 	    fail_val=0 2&amp;gt;/dev/&lt;span class=&quot;code-keyword&quot;&gt;null&lt;/span&gt;
done.
Stopping clients: onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 /mnt/lustre (opts:)
CMD: onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 running=\$(grep -c /mnt/lustre&lt;span class=&quot;code-quote&quot;&gt;&apos; &apos;&lt;/span&gt; /proc/mounts);
&lt;span class=&quot;code-keyword&quot;&gt;if&lt;/span&gt; [ \$running -ne 0 ] ; then
echo Stopping client \$(hostname) /mnt/lustre opts:;
lsof /mnt/lustre || need_kill=no;
&lt;span class=&quot;code-keyword&quot;&gt;if&lt;/span&gt; [ x != x -a x\$need_kill != xno ]; then
    pids=\$(lsof -t /mnt/lustre | sort -u);
    &lt;span class=&quot;code-keyword&quot;&gt;if&lt;/span&gt; [ -n \&lt;span class=&quot;code-quote&quot;&gt;&quot;\$pids\&quot;&lt;/span&gt; ]; then
             kill -9 \$pids;
    fi
fi;
&lt;span class=&quot;code-keyword&quot;&gt;while&lt;/span&gt; umount  /mnt/lustre 2&amp;gt;&amp;amp;1 | grep -q busy; &lt;span class=&quot;code-keyword&quot;&gt;do&lt;/span&gt;
    echo /mnt/lustre is still busy, wait one second &amp;amp;&amp;amp; sleep 1;
done;
fi
Stopping client onyx-64vm4.onyx.whamcloud.com /mnt/lustre opts:
Stopping client onyx-64vm3.onyx.whamcloud.com /mnt/lustre opts:
Stopping client onyx-64vm1.onyx.whamcloud.com /mnt/lustre opts:
COMMAND    PID USER   FD   TYPE      DEVICE SIZE/OFF               NODE NAME
run_tar.s 2826 root  cwd    DIR 1273,181606     4096 144116614575426332 /mnt/lustre/d0.tar-onyx-64vm4.onyx.whamcloud.com
tar       2938 root  cwd    DIR 1273,181606     4096 144116614575426332 /mnt/lustre/d0.tar-onyx-64vm4.onyx.whamcloud.com
tar       2939 root  cwd    DIR 1273,181606     4096 144116614575426332 /mnt/lustre/d0.tar-onyx-64vm4.onyx.whamcloud.com
COMMAND    PID USER   FD      TYPE      DEVICE   SIZE/OFF               NODE NAME
run_dd.sh 2920 root  cwd   unknown 1273,181606                               /mnt/lustre/d0.dd-onyx-64vm3.onyx.whamcloud.com
dd        3353 root  cwd   unknown 1273,181606                               /mnt/lustre/d0.dd-onyx-64vm3.onyx.whamcloud.com
dd        3353 root    1w      REG 1273,181606 2920480768 144116681667510276 /mnt/lustre/d0.dd-onyx-64vm3.onyx.whamcloud.com/dd-file
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
/mnt/lustre is still busy, wait one second
... 
Stopping clients: onyx-64vm1.onyx.whamcloud.com,onyx-64vm3,onyx-64vm4 /mnt/lustre2 (opts:)&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="320508" author="gerrit" created="Fri, 10 Dec 2021 16:07:13 +0000"  >&lt;p&gt;&quot;Elena Gryaznova &amp;lt;elena.gryaznova@hpe.com&amp;gt;&quot; uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/45824&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/45824&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-15140&quot; title=&quot;recovery-random-scale: No sub tests failed in this test set, FAIL: remove sub-test dirs failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-15140&quot;&gt;&lt;del&gt;LU-15140&lt;/del&gt;&lt;/a&gt; tests: cleanup of recovery-*-scale tests fails&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: c53265963c38501247c1d5063490164838f967dd&lt;/p&gt;</comment>
                            <comment id="321966" author="gerrit" created="Thu, 6 Jan 2022 22:04:06 +0000"  >&lt;p&gt;&quot;Oleg Drokin &amp;lt;green@whamcloud.com&amp;gt;&quot; merged in patch &lt;a href=&quot;https://review.whamcloud.com/45824/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/45824/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-15140&quot; title=&quot;recovery-random-scale: No sub tests failed in this test set, FAIL: remove sub-test dirs failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-15140&quot;&gt;&lt;del&gt;LU-15140&lt;/del&gt;&lt;/a&gt; tests: cleanup of recovery-*-scale tests fails&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: f252abc6690247ee9608dbde80238add0ecaed8c&lt;/p&gt;</comment>
                            <comment id="321994" author="pjones" created="Thu, 6 Jan 2022 23:22:20 +0000"  >&lt;p&gt;Landed for 2.15&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="46524">LU-9602</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i027zb:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>