<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:29:33 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-9818] replay-single test 29: lu_object_attr()) ASSERTION( ((o)-&gt;lo_header-&gt;loh_attr &amp; LOHA_EXISTS) != 0 ) failed</title>
                <link>https://jira.whamcloud.com/browse/LU-9818</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Only got one such failure so far, while the assertion was hit before in other tickets, the stack here is unique it seems, so filing a new one.&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[69962.151605] Lustre: DEBUG MARKER: == replay-single test 29: open(O_CREAT), |X| unlink two, replay, close two (test mds_cleanup_orphans) ====================================================================================================== 08:57:55 (1501592275)
[69963.207449] Lustre: DEBUG MARKER: mds1 REPLAY BARRIER on lustre-MDT0000
[69963.214497] Lustre: DEBUG MARKER: local REPLAY BARRIER on lustre-MDT0000
[69971.727296] LustreError: 20636:0:(mgc_request.c:603:do_requeue()) failed processing log: -5
[69989.855016] Lustre: lustre-MDT0000: Imperative Recovery not enabled, recovery window 60-180
[69989.857644] Lustre: Skipped 16 previous similar messages
[69991.061406] Lustre: DEBUG MARKER: centos-69.localnet: executing wait_import_state_mount FULL mdc.lustre-MDT0000-mdc-*.mds_server_uuid
[69991.289012] Lustre: DEBUG MARKER: mdc.lustre-MDT0000-mdc-*.mds_server_uuid in FULL state after 0 sec
[69992.926591] Lustre: lustre-OST0000: deleting orphan objects from 0x0:1315 to 0x0:1345
[69992.926597] Lustre: lustre-OST0001: deleting orphan objects from 0x0:1283 to 0x0:1313
[69992.969758] LustreError: 29133:0:(lu_object.h:862:lu_object_attr()) ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed: 
[69992.979293] LustreError: 29133:0:(lu_object.h:862:lu_object_attr()) LBUG
[69992.980129] Pid: 29133, comm: orph_cleanup_lu
[69992.980875] 
Call Trace:
[69992.982288]  [&amp;lt;ffffffffa027d7ce&amp;gt;] libcfs_call_trace+0x4e/0x60 [libcfs]
[69992.983218]  [&amp;lt;ffffffffa027d85c&amp;gt;] lbug_with_loc+0x4c/0xb0 [libcfs]
[69992.985387]  [&amp;lt;ffffffffa0901a59&amp;gt;] orph_declare_index_delete+0x409/0x450 [mdd]
[69992.986129]  [&amp;lt;ffffffffa13b5429&amp;gt;] ? lod_trans_create+0x39/0x50 [lod]
[69992.986803]  [&amp;lt;ffffffffa0901ed1&amp;gt;] orph_key_test_and_del+0x431/0xd20 [mdd]
[69992.987471]  [&amp;lt;ffffffffa0902d25&amp;gt;] __mdd_orphan_cleanup+0x565/0x7e0 [mdd]
[69992.990291]  [&amp;lt;ffffffffa09027c0&amp;gt;] ? __mdd_orphan_cleanup+0x0/0x7e0 [mdd]
[69992.991410]  [&amp;lt;ffffffff810a2eba&amp;gt;] kthread+0xea/0xf0
[69992.992168]  [&amp;lt;ffffffff810a2dd0&amp;gt;] ? kthread+0x0/0xf0
[69992.992935]  [&amp;lt;ffffffff8170fb98&amp;gt;] ret_from_fork+0x58/0x90
[69993.005696]  [&amp;lt;ffffffff810a2dd0&amp;gt;] ? kthread+0x0/0xf0
[69993.006502] 
[69993.007197] Kernel panic - not syncing: LBUG
[69993.007936] CPU: 9 PID: 29133 Comm: orph_cleanup_lu Tainted: P           OE  ------------   3.10.0-debug #2
[69993.009789] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2011
[69993.010582]  ffffffffa029ced2 0000000091292d93 ffff8800a719fcb0 ffffffff816fd3e4
[69993.026450]  ffff8800a719fd30 ffffffff816f8c34 ffffffff00000008 ffff8800a719fd40
[69993.029288]  ffff8800a719fce0 0000000091292d93 0000000091292d93 ffff88033e52d948
[69993.076412] Call Trace:
[69993.077147]  [&amp;lt;ffffffff816fd3e4&amp;gt;] dump_stack+0x19/0x1b
[69993.077788]  [&amp;lt;ffffffff816f8c34&amp;gt;] panic+0xd8/0x1e7
[69993.078553]  [&amp;lt;ffffffffa027d874&amp;gt;] lbug_with_loc+0x64/0xb0 [libcfs]
[69993.079210]  [&amp;lt;ffffffffa0901a59&amp;gt;] orph_declare_index_delete+0x409/0x450 [mdd]
[69993.079900]  [&amp;lt;ffffffffa13b5429&amp;gt;] ? lod_trans_create+0x39/0x50 [lod]
[69993.080558]  [&amp;lt;ffffffffa0901ed1&amp;gt;] orph_key_test_and_del+0x431/0xd20 [mdd]
[69993.082775]  [&amp;lt;ffffffffa0902d25&amp;gt;] __mdd_orphan_cleanup+0x565/0x7e0 [mdd]
[69993.083436]  [&amp;lt;ffffffffa09027c0&amp;gt;] ? orph_key_test_and_del+0xd20/0xd20 [mdd]
[69993.084105]  [&amp;lt;ffffffff810a2eba&amp;gt;] kthread+0xea/0xf0
[69993.084726]  [&amp;lt;ffffffff810a2dd0&amp;gt;] ? kthread_create_on_node+0x140/0x140
[69993.085372]  [&amp;lt;ffffffff8170fb98&amp;gt;] ret_from_fork+0x58/0x90
[69993.086314]  [&amp;lt;ffffffff810a2dd0&amp;gt;] ? kthread_create_on_node+0x140/0x140
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;crashdump and modules in /exports/crashdumps/192.168.123.169-2017-08-01-08:58:32 on onyx-68&lt;/p&gt;

&lt;p&gt;tag in centos7 chroot master-20170801&lt;/p&gt;</description>
                <environment></environment>
        <key id="47620">LU-9818</key>
            <summary>replay-single test 29: lu_object_attr()) ASSERTION( ((o)-&gt;lo_header-&gt;loh_attr &amp; LOHA_EXISTS) != 0 ) failed</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="green">Oleg Drokin</reporter>
                        <labels>
                    </labels>
                <created>Wed, 2 Aug 2017 01:48:23 +0000</created>
                <updated>Thu, 11 Jul 2019 17:51:45 +0000</updated>
                            <resolved>Thu, 11 Jul 2019 17:51:45 +0000</resolved>
                                                                        <due></due>
                            <votes>0</votes>
                                    <watches>1</watches>
                                                                            <comments>
                            <comment id="251113" author="adilger" created="Thu, 11 Jul 2019 17:51:45 +0000"  >&lt;p&gt;Close as a duplicate of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11516&quot; title=&quot;ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed: LBUG&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11516&quot;&gt;&lt;del&gt;LU-11516&lt;/del&gt;&lt;/a&gt;.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="53595">LU-11516</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzzhmf:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>