<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:46:31 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-11741] MDS hit ASSERTION( ((o)-&gt;lo_header-&gt;loh_attr &amp; LOHA_EXISTS) != 0 ) failed</title>
                <link>https://jira.whamcloud.com/browse/LU-11741</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;2.10.6-RC3 EL7.6 mlx build #91&lt;br/&gt;
Hit LBUG on 1 MDS after system reboot and in recovery&lt;br/&gt;
on soak-9&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[2018-12-07T07:44:37+00:00] INFO: Report handlers complete
[  330.682671] LNet: HW NUMA nodes: 2, HW CPU cores: 32, npartitions: 2
[  330.693009] alg: No test for adler32 (adler32-zlib)
[  331.546478] Lustre: Lustre: Build Version: 2.10.6_RC3
[  331.781927] LNet: Added LNI 192.168.1.109@o2ib [8/256/0/180]
[  332.277308] LDISKFS-fs (dm-2): mounted filesystem with ordered data mode. Opts: user_xattr,errors=remount-ro,user_xattr,no_mbcache,nodelalloc
[  332.597960] LustreError: 137-5: soaked-MDT0001_UUID: not available for connect from 192.168.1.105@o2ib (no target). If you are running an HA pair check that the target i
s mounted on the other server.
[  332.617898] LustreError: Skipped 1 previous similar message
[  332.685778] Lustre: soaked-MDT0001: Not available for connect from 192.168.1.110@o2ib (not set up)
[  332.796813] Lustre: soaked-MDT0001: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900
[  332.828088] LustreError: 12736:0:(llog_osd.c:978:llog_osd_next_block()) soaked-MDT0003-osp-MDT0001: missed desired record? 3 &amp;gt; 1
[  332.841078] LustreError: 12736:0:(lod_dev.c:419:lod_sub_recovery_thread()) soaked-MDT0003-osp-MDT0001 getting update log failed: rc = -2
[  337.090525] Lustre: soaked-MDT0001: Connection restored to 192.168.1.105@o2ib (at 192.168.1.105@o2ib)
[  337.100943] Lustre: Skipped 1 previous similar message
[  338.169752] Lustre: soaked-MDT0001: Connection restored to b2e346f6-066a-02d8-6774-b6710264a342 (at 192.168.1.123@o2ib)
[  338.184884] Lustre: 12737:0:(ldlm_lib.c:2059:target_recovery_overseer()) recovery is aborted, evict exports in recovery
[  338.197356] Lustre: soaked-MDT0001: disconnecting 27 stale clients
[  339.169010] LustreError: 12748:0:(lu_object.h:862:lu_object_attr()) ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed: 
[  339.182782] LustreError: 12748:0:(lu_object.h:862:lu_object_attr()) LBUG
[  339.190342] Pid: 12748, comm: mdt00_005 3.10.0-957.el7_lustre.x86_64 #1 SMP Fri Nov 30 18:46:05 UTC 2018
[  339.201005] Call Trace:
[  339.203791]  [&amp;lt;ffffffffc0c687cc&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[  339.211192]  [&amp;lt;ffffffffc0c6887c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[  339.218193]  [&amp;lt;ffffffffc14959f7&amp;gt;] mo_invalidate.part.29+0x0/0x36 [mdt]
[  339.225606]  [&amp;lt;ffffffffc1455d5a&amp;gt;] mdt_intent_layout+0xfca/0xfe0 [mdt]
[  339.232927]  [&amp;lt;ffffffffc1459681&amp;gt;] mdt_intent_policy+0x441/0xc70 [mdt]
[  339.240246]  [&amp;lt;ffffffffc0fa12ba&amp;gt;] ldlm_lock_enqueue+0x38a/0x980 [ptlrpc]
[  339.247934]  [&amp;lt;ffffffffc0fcab53&amp;gt;] ldlm_handle_enqueue0+0x9d3/0x16a0 [ptlrpc]
[  339.255982]  [&amp;lt;ffffffffc10504f2&amp;gt;] tgt_enqueue+0x62/0x210 [ptlrpc]
[  339.262969]  [&amp;lt;ffffffffc105442a&amp;gt;] tgt_request_handle+0x92a/0x1370 [ptlrpc]
[  339.270812]  [&amp;lt;ffffffffc0ffce5b&amp;gt;] ptlrpc_server_handle_request+0x23b/0xaa0 [ptlrpc]
[  339.279529]  [&amp;lt;ffffffffc10005a2&amp;gt;] ptlrpc_main+0xa92/0x1e40 [ptlrpc]
[  339.286697]  [&amp;lt;ffffffff8f8c1c31&amp;gt;] kthread+0xd1/0xe0
[  339.292269]  [&amp;lt;ffffffff8ff74c37&amp;gt;] ret_from_fork_nospec_end+0x0/0x39
[  339.299373]  [&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
[  339.305038] Kernel panic - not syncing: LBUG
[  339.309856] CPU: 7 PID: 12748 Comm: mdt00_005 Kdump: loaded Tainted: G           OE  ------------   3.10.0-957.el7_lustre.x86_64 #1
[  339.323162] Hardware name: Intel Corporation S2600GZ ........../S2600GZ, BIOS SE5C600.86B.01.08.0003.022620131521 02/26/2013
[  339.338236] Call Trace:
[  339.343554]  [&amp;lt;ffffffff8ff61dc1&amp;gt;] dump_stack+0x19/0x1b
[  339.351863]  [&amp;lt;ffffffff8ff5b4d0&amp;gt;] panic+0xe8/0x21f
[  339.359694]  [&amp;lt;ffffffffc0c688cb&amp;gt;] lbug_with_loc+0x9b/0xa0 [libcfs]
[  339.369077]  [&amp;lt;ffffffffc14959f7&amp;gt;] lu_object_attr.isra.26.part.27+0x36/0x36 [mdt]
[  339.379806]  [&amp;lt;ffffffffc1455d5a&amp;gt;] mdt_intent_layout+0xfca/0xfe0 [mdt]
[  339.389463]  [&amp;lt;ffffffffc1459681&amp;gt;] mdt_intent_policy+0x441/0xc70 [mdt]
[  339.399140]  [&amp;lt;ffffffffc0fa81db&amp;gt;] ? ldlm_resource_get+0xab/0xa60 [ptlrpc]
[  339.409196]  [&amp;lt;ffffffffc0fa12ba&amp;gt;] ldlm_lock_enqueue+0x38a/0x980 [ptlrpc]
[  339.413469] Lustre: soaked-MDT0001: Connection restored to 0f40ef87-b54f-1f70-d2cc-cb9f522aad77 (at 192.168.1.119@o2ib)
[  339.413472] Lustre: Skipped 6 previous similar messages
[  339.441901]  [&amp;lt;ffffffffc0fcab53&amp;gt;] ldlm_handle_enqueue0+0x9d3/0x16a0 [ptlrpc]
[  339.452226]  [&amp;lt;ffffffffc0ff2e10&amp;gt;] ? lustre_swab_ldlm_lock_desc+0x30/0x30 [ptlrpc]
[  339.463035]  [&amp;lt;ffffffffc10504f2&amp;gt;] tgt_enqueue+0x62/0x210 [ptlrpc]
[  339.472295]  [&amp;lt;ffffffffc105442a&amp;gt;] tgt_request_handle+0x92a/0x1370 [ptlrpc]
[  339.482324]  [&amp;lt;ffffffffc0ffce5b&amp;gt;] ptlrpc_server_handle_request+0x23b/0xaa0 [ptlrpc]
[  339.493185]  [&amp;lt;ffffffffc0ff9488&amp;gt;] ? ptlrpc_wait_event+0x98/0x340 [ptlrpc]
[  339.502977]  [&amp;lt;ffffffff8f8d67c2&amp;gt;] ? default_wake_function+0x12/0x20
[  339.512214]  [&amp;lt;ffffffff8f8cba9b&amp;gt;] ? __wake_up_common+0x5b/0x90
[  339.520955]  [&amp;lt;ffffffffc10005a2&amp;gt;] ptlrpc_main+0xa92/0x1e40 [ptlrpc]
[  339.530165]  [&amp;lt;ffffffffc0fffb10&amp;gt;] ? ptlrpc_register_service+0xe30/0xe30 [ptlrpc]
[  339.540551]  [&amp;lt;ffffffff8f8c1c31&amp;gt;] kthread+0xd1/0xe0
[  339.548088]  [&amp;lt;ffffffff8f8c1b60&amp;gt;] ? insert_kthread_work+0x40/0x40
[  339.556962]  [&amp;lt;ffffffff8ff74c37&amp;gt;] ret_from_fork_nospec_begin+0x21/0x21
[  339.566286]  [&amp;lt;ffffffff8f8c1b60&amp;gt;] ? insert_kthread_work+0x40/0x40
[    0.000000] Initializing cgroup subsys cpuset
[    0.000000] Initializing cgroup subsys cpu
[    0.000000] Initializing cgroup subsys cpuacct
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="54220">LU-11741</key>
            <summary>MDS hit ASSERTION( ((o)-&gt;lo_header-&gt;loh_attr &amp; LOHA_EXISTS) != 0 ) failed</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="sarah">Sarah Liu</reporter>
                        <labels>
                            <label>soak</label>
                    </labels>
                <created>Fri, 7 Dec 2018 17:38:13 +0000</created>
                <updated>Thu, 11 Jul 2019 17:57:57 +0000</updated>
                            <resolved>Wed, 17 Apr 2019 17:43:47 +0000</resolved>
                                    <version>Lustre 2.10.6</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>6</watches>
                                                                            <comments>
                            <comment id="238445" author="bzzz" created="Wed, 12 Dec 2018 11:34:57 +0000"  >&lt;p&gt;is there maloo logs for this? what test?&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;</comment>
                            <comment id="238563" author="sarah" created="Thu, 13 Dec 2018 21:37:55 +0000"  >&lt;p&gt;This is testing running on soak, so no Maloo logs&lt;/p&gt;</comment>
                            <comment id="245730" author="gerrit" created="Sat, 13 Apr 2019 13:54:22 +0000"  >&lt;p&gt;Andreas Dilger (adilger@whamcloud.com) uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/34655&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/34655&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11741&quot; title=&quot;MDS hit ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11741&quot;&gt;&lt;del&gt;LU-11741&lt;/del&gt;&lt;/a&gt; mdt: verify object exists for intent&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: b2_10&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 6e9e24c1fd240b6c5a317af2b3e02db1c8bc50af&lt;/p&gt;</comment>
                            <comment id="245738" author="bzzz" created="Sat, 13 Apr 2019 15:35:29 +0000"  >&lt;p&gt;on master this problem was fixed with &lt;a href=&quot;https://review.whamcloud.com/29090&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/29090&lt;/a&gt; - &lt;br/&gt;
&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-9771&quot; title=&quot;FLR1: Landing tickets for File Level Redundancy Phase 1&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-9771&quot;&gt;&lt;del&gt;LU-9771&lt;/del&gt;&lt;/a&gt; mdt: revise layout_change() to take md_layout_change&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                                                <inwardlinks description="is duplicated by">
                                                        </inwardlinks>
                                    </issuelinktype>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                                        </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i007kf:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>