<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:04:23 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-6915] sanity-lfsck test 31h fail: &#8220;(3) unexpected status&#8221;</title>
                <link>https://jira.whamcloud.com/browse/LU-6915</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;sanity-lfsck test 31h fails with &#8220;(3) unexpected status&#8221;. Logs are at: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/ef98233e-3293-11e5-8214-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/ef98233e-3293-11e5-8214-5254006e85c2&lt;/a&gt; &lt;/p&gt;

&lt;p&gt;From the LFSCK namespace output, we see:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;20:24:57:status: partial
20:24:57:flags: incomplete
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>review-dne-part-2 in autotest</environment>
        <key id="31244">LU-6915</key>
            <summary>sanity-lfsck test 31h fail: &#8220;(3) unexpected status&#8221;</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="yong.fan">nasf</assignee>
                                    <reporter username="jamesanunez">James Nunez</reporter>
                        <labels>
                            <label>lfsck</label>
                    </labels>
                <created>Mon, 27 Jul 2015 22:55:14 +0000</created>
                <updated>Thu, 11 Feb 2016 05:28:50 +0000</updated>
                            <resolved>Thu, 11 Feb 2016 05:28:50 +0000</resolved>
                                    <version>Lustre 2.8.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>3</watches>
                                                                            <comments>
                            <comment id="132382" author="jamesanunez" created="Mon, 2 Nov 2015 17:56:22 +0000"  >&lt;p&gt;Another failure on master:&lt;br/&gt;
2015-10-31 04:03:03 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/0248405c-7fbc-11e5-bf12-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/0248405c-7fbc-11e5-bf12-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another failure on master for sanity-lfsck test_31g:&lt;br/&gt;
2015-11-02 19:16:01 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/fdb229ae-81cd-11e5-af7b-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/fdb229ae-81cd-11e5-af7b-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="134948" author="yujian" created="Wed, 2 Dec 2015 01:13:59 +0000"  >&lt;p&gt;More instance on master:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/79ea4116-9784-11e5-b72a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/79ea4116-9784-11e5-b72a-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="141746" author="yong.fan" created="Wed, 10 Feb 2016 09:25:07 +0000"  >&lt;p&gt;Another failure instance:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/fdb5d7b8-cb18-11e5-be8d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/fdb5d7b8-cb18-11e5-be8d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="141950" author="yong.fan" created="Thu, 11 Feb 2016 05:28:14 +0000"  >&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;00000020:00000080:0.0:1448881260.165609:0:29625:0:(class_obd.c:229:class_handle_ioctl()) cmd = c00866e6
00000004:00000080:0.0:1448881260.165616:0:29625:0:(mdt_handler.c:5587:mdt_iocontrol()) handling ioctl cmd 0xc00866e6
00100000:10000000:0.0:1448881260.166859:0:29625:0:(lfsck_namespace.c:3798:lfsck_namespace_reset()) lustre-MDT0000-osd: namespace LFSCK reset: rc = 0
00100000:10000000:1.0:1448881260.167039:0:29627:0:(osd_scrub.c:652:osd_scrub_prep()) lustre-MDT0000: OI scrub prep, flags = 0x46
00100000:10000000:1.0:1448881260.167043:0:29627:0:(osd_scrub.c:278:osd_scrub_file_reset()) lustre-MDT0000: reset OI scrub file, old flags = 0x0, add flags = 0x0
00100000:10000000:1.0:1448881260.167157:0:29628:0:(lfsck_engine.c:1562:lfsck_assistant_engine()) lustre-MDT0000-osd: lfsck_namespace LFSCK assistant thread start
00100000:10000000:1.0:1448881260.167179:0:29626:0:(lfsck_namespace.c:4041:lfsck_namespace_prep()) lustre-MDT0000-osd: namespace LFSCK prep done, start pos [1, [0x0:0x0:0x0], 0x0]: rc = 0
00100000:10000000:1.0:1448881260.167185:0:29627:0:(osd_scrub.c:1498:osd_scrub_main()) lustre-MDT0000: OI scrub start, flags = 0x46, pos = 12
00100000:10000000:1.0:1448881260.167673:0:29626:0:(lfsck_namespace.c:3940:lfsck_namespace_checkpoint()) lustre-MDT0000-osd: namespace LFSCK checkpoint at the pos [12, [0x0:0x0:0x0], 0x0]: rc = 0
00100000:10000000:1.0:1448881260.167676:0:29626:0:(lfsck_engine.c:1046:lfsck_master_engine()) LFSCK entry: oit_flags = 0x60000, dir_flags = 0x8006, oit_cookie = 12, dir_cookie = 0x0, parent = [0x0:0x0:0x0], pid = 29626
00000100:00100000:0.0:1448881260.167737:0:29625:0:(client.c:1530:ptlrpc_send_new_req()) Sending RPC pname:cluuid:pid:xid:nid:opc lctl:lustre-MDT0000-mdtlov_UUID:29625:1519247244731748:10.2.4.167@tcp:1101
00000100:00100000:0.0:1448881260.167775:0:29625:0:(client.c:1530:ptlrpc_send_new_req()) Sending RPC pname:cluuid:pid:xid:nid:opc lctl:lustre-MDT0000-mdtlov_UUID:29625:1519247244731752:10.2.4.167@tcp:1101
00000100:00100000:0.0:1448881260.167784:0:29625:0:(client.c:1530:ptlrpc_send_new_req()) Sending RPC pname:cluuid:pid:xid:nid:opc lctl:lustre-MDT0000-mdtlov_UUID:29625:1519247244731756:10.2.4.167@tcp:1101
00000100:00100000:0.0:1448881260.167790:0:29625:0:(client.c:2210:ptlrpc_set_wait()) set ffff880059e146c0 going to sleep for 6 seconds
00100000:10000000:0.0:1448881260.170033:0:29625:0:(lfsck_lib.c:2031:lfsck_async_interpret_common()) lustre-MDT0000-osd: fail to notify MDT 3 for lfsck_namespace start: rc = -114
...
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;The logs shows that some former LFSCK instance has not finished yet when the new LFSCK start, that caused only part of MDTs joined the current LFSCK run, as to the finial LFSCK status was &quot;partial&quot;, not &quot;completed&quot;.&lt;/p&gt;

&lt;p&gt;We should make all the LFSCK instances to be completed before next LFSCK run. We already have the solution with the patch &lt;a href=&quot;http://review.whamcloud.com/#/c/17406/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/#/c/17406/&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="141951" author="yong.fan" created="Thu, 11 Feb 2016 05:28:50 +0000"  >&lt;p&gt;It is another failure instance of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7256&quot; title=&quot;sanity-lfsck TIMEOUT on umount /mnt/mds4&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7256&quot;&gt;&lt;del&gt;LU-7256&lt;/del&gt;&lt;/a&gt;.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                            <outwardlinks description="duplicates">
                                        <issuelink>
            <issuekey id="27566">LU-5911</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxj53:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>