<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 03:35:01 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-17385] sanity-lfsck test_26a: only 3 of 4 MDTs are in completed </title>
                <link>https://jira.whamcloud.com/browse/LU-17385</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Andreas Dilger &amp;lt;adilger@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/e80cc085-ac08-4f47-b354-22551a7da132&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/e80cc085-ac08-4f47-b354-22551a7da132&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;test_26a failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;(7) only 3 of 4 MDTs are in completed
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Test session details:&lt;br/&gt;
clients: &lt;a href=&quot;https://build.whamcloud.com/job/lustre-master-patchless/840&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.whamcloud.com/job/lustre-master-patchless/840&lt;/a&gt; - 4.18.0-425.10.1.el8_7.x86_64&lt;br/&gt;
servers: &lt;a href=&quot;https://build.whamcloud.com/job/lustre-master-patchless/840&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.whamcloud.com/job/lustre-master-patchless/840&lt;/a&gt; - 4.18.0-425.10.1.el8_7.x86_64&lt;/p&gt;

&lt;p&gt;&amp;lt;&amp;lt;Please provide additional information about the failure here&amp;gt;&amp;gt;&lt;/p&gt;


&lt;p&gt;First started on 2023-12-20 for full runs, may be related to recent patch landing. &lt;/p&gt;





&lt;p&gt;VVVVVVV DO NOT REMOVE LINES BELOW, Added by Maloo for auto-association VVVVVVV&lt;br/&gt;
sanity-lfsck test_26a - (7) only 3 of 4 MDTs are in completed&lt;/p&gt;</description>
                <environment></environment>
        <key id="79700">LU-17385</key>
            <summary>sanity-lfsck test_26a: only 3 of 4 MDTs are in completed </summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="zam">Alexander Zarochentsev</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                            <label>always_except</label>
                    </labels>
                <created>Thu, 21 Dec 2023 18:56:17 +0000</created>
                <updated>Sun, 4 Feb 2024 14:24:50 +0000</updated>
                            <resolved>Sun, 4 Feb 2024 14:24:50 +0000</resolved>
                                    <version>Lustre 2.16.0</version>
                                    <fixVersion>Lustre 2.16.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>7</watches>
                                                                            <comments>
                            <comment id="397850" author="pjones" created="Fri, 22 Dec 2023 01:24:33 +0000"  >&lt;p&gt;Hongchao&lt;/p&gt;

&lt;p&gt;This seems to have started failing only very recently. Can you identify which change introduced this issue?&lt;/p&gt;

&lt;p&gt;Thanks&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="397866" author="hongchao.zhang" created="Fri, 22 Dec 2023 07:58:50 +0000"  >&lt;p&gt;The LFSCK_NOTIFY request is failed to be processed by MDT, but it is strange that other LFSCK_NOTIFY request succeed&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;00000100:00100000:1.0:1703075647.451663:0:1223156:0:(service.c:2333:ptlrpc_server_handle_request()) Handling RPC req@00000000a373e95a pname:cluuid+ref:pid:xid:nid:opc:job mdt_out00_003:lustre-MDT0000-mdtlov_UUID+5:1744723:x1785766154025664:12345-10.240.26.106@tcp:1101:lctl.0
00000100:00100000:0.0:1703075647.451664:0:1215944:0:(nrs_fifo.c:179:nrs_fifo_req_get()) NRS start fifo request from 12345-10.240.26.106@tcp, seq: 1159
00000100:00100000:0.0:1703075647.451667:0:1215944:0:(service.c:2333:ptlrpc_server_handle_request()) Handling RPC req@00000000617d7544 pname:cluuid+ref:pid:xid:nid:opc:job mdt_out00_001:lustre-MDT0000-mdtlov_UUID+5:1744723:x1785766154025792:12345-10.240.26.106@tcp:1101:lctl.0
00000100:00100000:1.0:1703075647.451692:0:1223156:0:(service.c:2382:ptlrpc_server_handle_request()) Handled RPC req@00000000a373e95a pname:cluuid+ref:pid:xid:nid:opc:job mdt_out00_003:lustre-MDT0000-mdtlov_UUID+5:1744723:x1785766154025664:12345-10.240.26.106@tcp:1101:lctl.0 Request processed in 29us (98us total) trans 0 rc -95/-95
00000100:00100000:1.0:1703075647.451695:0:1223156:0:(nrs_fifo.c:241:nrs_fifo_req_stop()) NRS stop fifo request from 12345-10.240.26.106@tcp, seq: 1158
00100000:10000000:0.0:1703075647.451784:0:1215944:0:(lfsck_lib.c:2707:lfsck_load_one_trace_file()) lustre-MDT0003-osd: unlink lfsck sub trace file lfsck_namespace_01: rc = 0
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;the request (xid= x1785766154025664) failed with -95 immediately, but the similar request (xid = x1785766154025792) succeeded&lt;/p&gt;

&lt;p&gt;will create a debug patch to collect the logs&lt;/p&gt;</comment>
                            <comment id="397867" author="gerrit" created="Fri, 22 Dec 2023 08:07:03 +0000"  >&lt;p&gt;&quot;Hongchao Zhang &amp;lt;hongchao@whamcloud.com&amp;gt;&quot; uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53530&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53530&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;EX-8860&lt;/del&gt;&lt;/a&gt; lfsck: debug patch&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 17251801b1cf5516132edebd6677e2f34fcbc61c&lt;/p&gt;</comment>
                            <comment id="397970" author="adilger" created="Sat, 23 Dec 2023 01:10:58 +0000"  >&lt;p&gt;It looks like this test failure was introduced by patch &lt;a href=&quot;https://review.whamcloud.com/50998&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/50998&lt;/a&gt; &quot;&lt;tt&gt;&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-16826&quot; title=&quot;MDS nodes panicked running lfsck repair create lost objects: (osd_handler.c:6260:osd_index_declare_ea_insert()) ASSERTION( fid != ((void *)0) ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-16826&quot;&gt;&lt;del&gt;LU-16826&lt;/del&gt;&lt;/a&gt; tests: lfsck to repair a dangling remote entry&lt;/tt&gt;&quot; landing on 2023-12-20 which added sanity-lfsck.sh test_23d, but used:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Test-Parameters: trivial testlist=sanity-lfsck ... env=ONLY=23d
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;so it is likely leaving the filesystem in a bad state after test_23d finished and this causes test_24 and test_26a to also fail.&lt;/p&gt;</comment>
                            <comment id="397980" author="gerrit" created="Sat, 23 Dec 2023 07:19:42 +0000"  >&lt;p&gt;&quot;Andreas Dilger &amp;lt;adilger@whamcloud.com&amp;gt;&quot; uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53544&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53544&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;LU-17385&lt;/del&gt;&lt;/a&gt; tests: add sanity-lfsck/24 debugging&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 94f62d0d5bea764b3b0287662384a524283dd419&lt;/p&gt;</comment>
                            <comment id="397981" author="adilger" created="Sat, 23 Dec 2023 07:22:03 +0000"  >&lt;p&gt;This is failing 22/62 runs since the &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-16826&quot; title=&quot;MDS nodes panicked running lfsck repair create lost objects: (osd_handler.c:6260:osd_index_declare_ea_insert()) ASSERTION( fid != ((void *)0) ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-16826&quot;&gt;&lt;del&gt;LU-16826&lt;/del&gt;&lt;/a&gt; test case landed.&#160; I don&apos;t see anything obvious in the test logs, like an MDT reconnecting in test_26/test_27 after it was stopped/started in test_23d, so I added some more debugging to see why this is failing.&lt;/p&gt;

&lt;p&gt;I&apos;ll also push a revert of the patch that added test_23d and confirm that this stops the problem from being hit, and we&apos;ll have it ready if there is no quick solution.&lt;/p&gt;</comment>
                            <comment id="397982" author="gerrit" created="Sat, 23 Dec 2023 07:24:41 +0000"  >&lt;p&gt;&lt;del&gt;&quot;Andreas Dilger &amp;lt;adilger@whamcloud.com&amp;gt;&quot; uploaded a new patch:&lt;/del&gt; &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53545&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53545&lt;/a&gt;&lt;br/&gt;
&lt;del&gt;Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;LU-17385&lt;/del&gt;&lt;/a&gt; revert: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-16826&quot; title=&quot;MDS nodes panicked running lfsck repair create lost objects: (osd_handler.c:6260:osd_index_declare_ea_insert()) ASSERTION( fid != ((void *)0) ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-16826&quot;&gt;&lt;del&gt;LU-16826&lt;/del&gt;&lt;/a&gt; tests: lfsck to repair a dangling remote entry&lt;/del&gt;&lt;br/&gt;
&lt;del&gt;Project: fs/lustre-release&lt;/del&gt;&lt;br/&gt;
&lt;del&gt;Branch: master&lt;/del&gt;&lt;br/&gt;
&lt;del&gt;Current Patch Set: 1&lt;/del&gt;&lt;br/&gt;
&lt;del&gt;Commit: fb6c848ef816ecb17f02ac461c2352ced320c593&lt;/del&gt;&lt;/p&gt;</comment>
                            <comment id="397986" author="zam" created="Sat, 23 Dec 2023 08:31:47 +0000"  >&lt;p&gt;&amp;gt; It looks like this test failure was introduced by patch &lt;a href=&quot;https://review.whamcloud.com/50998&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/50998&lt;/a&gt; &quot;&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-16826&quot; title=&quot;MDS nodes panicked running lfsck repair create lost objects: (osd_handler.c:6260:osd_index_declare_ea_insert()) ASSERTION( fid != ((void *)0) ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-16826&quot;&gt;&lt;del&gt;LU-16826&lt;/del&gt;&lt;/a&gt; tests: lfsck to repair a dangling remote entry&quot; landing on 2023-12-20 which added sanity-lfsck.sh test_23c, but used:&lt;/p&gt;

&lt;p&gt;no, the patch adds test_23d, test_23c is an old one with similar name:&lt;/p&gt;

&lt;p&gt;run_test 23c &quot;LFSCK can repair dangling name entry (3)&lt;br/&gt;
run_test 23d &quot;LFSCK can repair a dangling name entry to a remote object&lt;/p&gt;</comment>
                            <comment id="397990" author="adilger" created="Sat, 23 Dec 2023 18:56:43 +0000"  >&lt;p&gt;Sorry, a typo on my part, and fixed in my comment. It is the new test that landed which caused the problem.&#160;&lt;/p&gt;</comment>
                            <comment id="398008" author="adilger" created="Mon, 25 Dec 2023 12:07:57 +0000"  >&lt;p&gt;It looks like MDT0000 has &quot;finished&quot; the LFSCK run, but with an error:&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;status: partial
flags: incomplete&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;It isn&apos;t clear from the test output why it is &quot;partial&quot;. It doesn&apos;t look like waiting longer (600s) in &lt;tt&gt;wait_all_targets_blocked()&lt;/tt&gt; is better than using &quot;&lt;tt&gt;&amp;#45;w&lt;/tt&gt;&quot; in this case, because the LFSCK threads are all finished, but with an error. &lt;/p&gt;</comment>
                            <comment id="398130" author="gerrit" created="Wed, 27 Dec 2023 19:25:19 +0000"  >&lt;p&gt;&quot;Oleg Drokin &amp;lt;green@whamcloud.com&amp;gt;&quot; merged in patch &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53544/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53544/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;LU-17385&lt;/del&gt;&lt;/a&gt; tests: always_except sanity-lfsck/24&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 955e38051765609fe3a661035c0fab2cfca733ce&lt;/p&gt;</comment>
                            <comment id="398147" author="hongchao.zhang" created="Thu, 28 Dec 2023 08:08:25 +0000"  >&lt;p&gt;The LFSCK command used in test_23d to start layout LFSCK on MDT0000 uses option &quot;-o&quot;, which will broadcast LFSCK to all MDTs&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;int jt_lfsck_start(int argc, char **argv)
{
                ...
                case &apos;o&apos;:
                        start.ls_flags |= LPF_ALL_TGT | LPF_BROADCAST |         
                                          LPF_OST_ORPHAN;
                        break;
                ...
}
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;the LFSCK command used in test_24 to start namespace LFSCK is also sent to all MDTs, but some MDT could not complete&lt;br/&gt;
the previous layout LFSCK yet, then it will find the two LFSCKs are different and return -EOPNOTSUPP&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;int lfsck_start(const struct lu_env *env, struct dt_device *key,
                struct lfsck_start_param *lsp)
{
        ...
        if (!thread_is_init(thread) &amp;amp;&amp;amp; !thread_is_stopped(thread)) {
                rc = -EALREADY;
                if (unlikely(start == NULL)) {
                        spin_unlock(&amp;amp;lfsck-&amp;gt;li_lock);
                        GOTO(out, rc);
                }

                while (start-&amp;gt;ls_active != 0) {
                        if (!(type &amp;amp; start-&amp;gt;ls_active)) {
                                type &amp;lt;&amp;lt;= 1;
                                continue;
                        }

                        com = __lfsck_component_find(lfsck, type,
                                                     &amp;amp;lfsck-&amp;gt;li_list_scan);
                        if (com == NULL)
                                com = __lfsck_component_find(lfsck, type,
                                                &amp;amp;lfsck-&amp;gt;li_list_double_scan);
                        if (com == NULL) {
                                rc = -EOPNOTSUPP;      &amp;lt;--------- return with error -EOPNOTSUPP
                                break;
                        }
                        ...
                }
                ...
        }
        ...
}
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;the corresponding logs&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;00000020:00000001:1.0:1703243105.771867:0:217993:0:(tgt_handler.c:1621:tgt_handle_lfsck_notify()) Process entered
00100000:00000001:0.0:1703243105.771867:0:228721:0:(lfsck_lib.c:3489:lfsck_in_notify()) Process entered
00100000:00000001:1.0:1703243105.771868:0:217993:0:(lfsck_lib.c:3489:lfsck_in_notify()) Process entered
00100000:00000001:0.0:1703243105.771868:0:228721:0:(lfsck_lib.c:3104:lfsck_start()) Process entered
00100000:00000001:1.0:1703243105.771869:0:217993:0:(lfsck_lib.c:3104:lfsck_start()) Process entered
00100000:00000001:1.0:1703243105.771873:0:217993:0:(lfsck_bookmark.c:107:lfsck_bookmark_store()) Process entered
00080000:00000001:1.0:1703243105.771874:0:217993:0:(osd_handler.c:1912:osd_trans_create()) Process entered
00080000:00000010:1.0:1703243105.771877:0:217993:0:(osd_handler.c:1927:osd_trans_create()) kmalloced &apos;(oh)&apos;: 288 at 00000000a80bb82f.
00100000:00000001:0.0:1703243105.771880:0:228721:0:(lfsck_lib.c:3174:lfsck_start()) Process leaving via out (rc=18446744073709551521 : -95 : 0xffffffffffffffa1)
00100000:00000001:0.0:1703243105.771886:0:228721:0:(lfsck_lib.c:3546:lfsck_in_notify()) Process leaving (rc=18446744073709551521 : -95 : ffffffffffffffa1)
00000020:00000001:0.0:1703243105.771887:0:228721:0:(tgt_handler.c:1629:tgt_handle_lfsck_notify()) Process leaving (rc=18446744073709551521 : -95 : ffffffffffffffa1)
00080000:00000001:1.0:1703243105.771888:0:217993:0:(osd_handler.c:1955:osd_trans_create()) Process leaving (rc=18446619811807463424 : -124261902088192 : ffff8efc05778800)
00010000:00000040:0.0:1703243105.771889:0:228721:0:(ldlm_lib.c:3238:target_committed_to_req()) last_committed 0, transno 0, xid 1785973155960256
00010000:00000001:0.0:1703243105.771890:0:228721:0:(ldlm_lib.c:3307:target_send_reply()) Process entered
00010000:00000200:0.0:1703243105.771892:0:228721:0:(ldlm_lib.c:3295:target_send_reply_msg()) @@@ sending reply  req@00000000c24bd5b0 x1785973155960256/t0(0) o1101-&amp;gt;lustre-MDT0000-mdtlov_UUID@10.240.38.25@tcp:111/0 lens 320/224 e 0 to 0 dl 1703243116 ref 1 fl Interpret:/200/0 rc -95/0 job:&apos;lctl.0&apos; uid:0 gid:0
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="398194" author="adilger" created="Thu, 28 Dec 2023 18:22:10 +0000"  >&lt;p&gt;The sanity-lfsck test_23d is now skipped, so any patch that is fixing it needs to remove the &lt;tt&gt;always_except&lt;/tt&gt; line. &lt;/p&gt;</comment>
                            <comment id="398596" author="zam" created="Thu, 4 Jan 2024 19:15:32 +0000"  >&lt;p&gt;&lt;a href=&quot;https://jira.whamcloud.com/secure/ViewProfile.jspa?name=hongchao.zhang&quot; class=&quot;user-hover&quot; rel=&quot;hongchao.zhang&quot;&gt;hongchao.zhang&lt;/a&gt;, thanks for the analysis, I am submitting a patch to test the idea.&lt;/p&gt;</comment>
                            <comment id="398597" author="gerrit" created="Thu, 4 Jan 2024 19:15:52 +0000"  >&lt;p&gt;&quot;Alexander Zarochentsev &amp;lt;alexander.zarochentsev@hpe.com&amp;gt;&quot; uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53591&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53591&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;LU-17385&lt;/del&gt;&lt;/a&gt; tests: sanity-lfsck 23d fix and enable&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 2f4f656947703d8b44a7ea49a8b2c84020591307&lt;/p&gt;</comment>
                            <comment id="402536" author="gerrit" created="Sun, 4 Feb 2024 08:25:56 +0000"  >&lt;p&gt;&quot;Oleg Drokin &amp;lt;green@whamcloud.com&amp;gt;&quot; merged in patch &lt;a href=&quot;https://review.whamcloud.com/c/fs/lustre-release/+/53591/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/c/fs/lustre-release/+/53591/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-17385&quot; title=&quot;sanity-lfsck test_26a: only 3 of 4 MDTs are in completed &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-17385&quot;&gt;&lt;del&gt;LU-17385&lt;/del&gt;&lt;/a&gt; tests: sanity-lfsck 23d fix and enable&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 028ed64d90cfdeb908fb5574aacf2f71c259e2c2&lt;/p&gt;</comment>
                            <comment id="402571" author="pjones" created="Sun, 4 Feb 2024 14:24:50 +0000"  >&lt;p&gt;Merged for 2.16&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="76035">LU-16826</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i045mv:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>