<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:17:34 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-1542] Failure on sanity.sh, subtest test_132</title>
                <link>https://jira.whamcloud.com/browse/LU-1542</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Andreas Dilger &amp;lt;adilger@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/dd62db8a-b9da-11e1-86c2-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/dd62db8a-b9da-11e1-86c2-52540035b04c&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_132 failed with the following error:&lt;/p&gt;
&lt;blockquote&gt;
&lt;p&gt;test failed to respond and timed out&lt;/p&gt;&lt;/blockquote&gt;

&lt;p&gt;This may relate to the startup issue in &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1541&quot; title=&quot;Failure on lustre-initialization-1: llog_origin_handle_create operation failed with -2&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1541&quot;&gt;&lt;del&gt;LU-1541&lt;/del&gt;&lt;/a&gt;, since this subtest is remounting the servers with SOM enabled.  However, I&apos;m filing it separately for now for tracking and in case it ends up being a separate bug.&lt;/p&gt;

&lt;p&gt;Info required for matching: sanity 132&lt;/p&gt;</description>
                <environment></environment>
        <key id="14978">LU-1542</key>
            <summary>Failure on sanity.sh, subtest test_132</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Tue, 19 Jun 2012 17:06:15 +0000</created>
                <updated>Tue, 3 Sep 2013 17:10:05 +0000</updated>
                            <resolved>Tue, 3 Sep 2013 17:10:05 +0000</resolved>
                                    <version>Lustre 2.4.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>2</watches>
                                                                            <comments>
                            <comment id="41285" author="ian" created="Thu, 28 Jun 2012 17:52:48 +0000"  >&lt;p&gt;23:48:19:Lustre: MGS has stopped.&lt;br/&gt;
23:48:20:LustreError: 8956:0:(ldlm_request.c:1166:ldlm_cli_cancel_req()) Got rc -108 from cancel RPC: canceling anyway&lt;br/&gt;
23:48:20:LustreError: 8956:0:(ldlm_request.c:1792:ldlm_cli_cancel_list()) ldlm_cli_cancel_list: -108&lt;/p&gt;</comment>
                            <comment id="41286" author="ian" created="Thu, 28 Jun 2012 17:54:40 +0000"  >&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/747c424e-c166-11e1-9055-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/747c424e-c166-11e1-9055-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;From Client Console&lt;br/&gt;
08:15:04:LustreError: 6547:0:(ldlm_request.c:1166:ldlm_cli_cancel_req()) Got rc -108 from cancel RPC: canceling anyway&lt;br/&gt;
08:15:04:LustreError: 6547:0:(ldlm_request.c:1166:ldlm_cli_cancel_req()) Skipped 2 previous similar messages&lt;br/&gt;
08:15:04:LustreError: 6547:0:(ldlm_request.c:1792:ldlm_cli_cancel_list()) ldlm_cli_cancel_list: -108&lt;br/&gt;
08:15:05:LustreError: 6547:0:(ldlm_request.c:1792:ldlm_cli_cancel_list()) Skipped 2 previous similar messages&lt;br/&gt;
08:15:05:Lustre: Unmounted lustre-client&lt;br/&gt;
08:16:03:LNet: 7089:0:(debug.c:324:libcfs_debug_str2mask()) You are trying to use a numerical value for the mask - this will be deprecated in a future release.&lt;br/&gt;
08:16:03:LNet: 7089:0:(debug.c:324:libcfs_debug_str2mask()) Skipped 1 previous similar message&lt;br/&gt;
08:16:11:LustreError: 152-6: Ignoring deprecated mount option &apos;acl&apos;.&lt;br/&gt;
08:16:11:Lustre: MGC10.10.4.110@tcp: Reactivating import&lt;br/&gt;
08:16:11:Lustre: Increasing default stripe size to min 1048576&lt;br/&gt;
08:16:12:Lustre: Mounted lustre-client&lt;br/&gt;
08:16:12:LNet: 7509:0:(debug.c:324:libcfs_debug_str2mask()) You are trying to use a numerical value for the mask - this will be deprecated in a future release.&lt;br/&gt;
08:16:12:LNet: 7509:0:(debug.c:324:libcfs_debug_str2mask()) Skipped 1 previous similar message&lt;br/&gt;
08:16:14:Lustre: DEBUG MARKER: Using TIMEOUT=20&lt;br/&gt;
08:16:15:LustreError: 7803:0:(mdc_request.c:1429:mdc_quotactl()) ptlrpc_queue_wait failed, rc: -114&lt;br/&gt;
08:16:19:Lustre: DEBUG MARKER: cancel_lru_locks osc start&lt;br/&gt;
08:16:20:LustreError: 7498:0:(cl_lock.c:2171:cl_lock_hold_add()) ASSERTION( lock-&amp;gt;cll_state != CLS_FREEING ) failed: &lt;br/&gt;
08:16:20:LustreError: 7498:0:(cl_lock.c:2171:cl_lock_hold_add()) LBUG&lt;br/&gt;
08:16:20:Pid: 7498, comm: ll_close&lt;/p&gt;</comment>
                            <comment id="42490" author="liwei" created="Tue, 31 Jul 2012 09:12:05 +0000"  >&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/fb5f0dea-daf8-11e1-9ebb-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/fb5f0dea-daf8-11e1-9ebb-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="43142" author="ian" created="Mon, 13 Aug 2012 16:40:47 +0000"  >&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/53d27ff8-e561-11e1-ae4e-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/53d27ff8-e561-11e1-ae4e-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="51897" author="keith" created="Wed, 6 Feb 2013 13:54:25 +0000"  >&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sessions/13798a36-6f5a-11e2-93c1-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sessions/13798a36-6f5a-11e2-93c1-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Well this may not be 100$ this is the same issue but it an assertion failure in the same spot that causes the MDS to reboot while the test_132 times out.&lt;/p&gt;

&lt;p&gt;The logs tell me 4/100 failures Feb06.   &lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;14:06:17:LustreError: 11-0: lustre-OST0004-osc-MDT0000: Communicating with 10.10.4.195@tcp, operation ost_connect failed with -19.
14:06:18:Lustre: DEBUG MARKER: lctl get_param -n timeout
14:06:19:Lustre: DEBUG MARKER: /usr/sbin/lctl mark Using TIMEOUT=20
14:06:19:Lustre: DEBUG MARKER: Using TIMEOUT=20
14:06:19:Lustre: DEBUG MARKER: lctl dl | grep &apos; IN osc &apos; 2&amp;gt;/dev/null | wc -l
14:06:19:Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param lustre.sys.jobid_var=procname_uid
14:07:12:Lustre: MGS: haven&apos;t heard from client 2b7f8516-fc0a-afb9-790c-1965aaaa46c2 (at 10.10.4.197@tcp) in 50 seconds. I think it&apos;s dead, and I am evicting it. exp ffff880078f2e800, cur 1360015629 expire 1360015599 last 1360015579
14:07:23:Lustre: lustre-MDT0000: haven&apos;t heard from client 5fcc94dc-d9c0-7c5c-7665-6b8afe791bb0 (at 10.10.4.197@tcp) in 50 seconds. I think it&apos;s dead, and I am evicting it. exp ffff88007832ec00, cur 1360015634 expire 1360015604 last 1360015584
14:07:23:LustreError: 17820:0:(lu_object.c:1982:lu_ucred_assert()) ASSERTION( uc != ((void *)0) ) failed: 
14:07:23:LustreError: 17820:0:(lu_object.c:1982:lu_ucred_assert()) LBUG
14:07:23:Pid: 17820, comm: ll_evictor
14:07:23:
14:07:23:Call Trace:
14:07:23: [&amp;lt;ffffffffa04d7895&amp;gt;] libcfs_debug_dumpstack+0x55/0x80 [libcfs]
14:07:23: [&amp;lt;ffffffffa04d7e97&amp;gt;] lbug_with_loc+0x47/0xb0 [libcfs]
14:07:23: [&amp;lt;ffffffffa0664755&amp;gt;] lu_ucred_assert+0x45/0x50 [obdclass]
14:07:23: [&amp;lt;ffffffffa0c52c66&amp;gt;] mdd_xattr_sanity_check+0x36/0x1f0 [mdd]
14:07:23: [&amp;lt;ffffffffa0c58221&amp;gt;] mdd_xattr_del+0xf1/0x540 [mdd]
14:07:23: [&amp;lt;ffffffffa0e3fe0a&amp;gt;] mdt_som_attr_set+0xfa/0x390 [mdt]
14:07:23: [&amp;lt;ffffffffa0e401ec&amp;gt;] mdt_ioepoch_close_on_eviction+0x14c/0x170 [mdt]
14:07:23: [&amp;lt;ffffffffa0f100c9&amp;gt;] ? osp_key_init+0x59/0x1a0 [osp]
14:07:23: [&amp;lt;ffffffffa0e40c4b&amp;gt;] mdt_ioepoch_close+0x2ab/0x3b0 [mdt]
14:07:23: [&amp;lt;ffffffffa0e411fe&amp;gt;] mdt_mfd_close+0x4ae/0x6e0 [mdt]
14:07:23: [&amp;lt;ffffffffa0e1297e&amp;gt;] mdt_obd_disconnect+0x3ae/0x4d0 [mdt]
14:07:23: [&amp;lt;ffffffffa061cd78&amp;gt;] class_fail_export+0x248/0x580 [obdclass]
14:07:23: [&amp;lt;ffffffffa07f9079&amp;gt;] ping_evictor_main+0x249/0x640 [ptlrpc]
14:07:23: [&amp;lt;ffffffff8105fa40&amp;gt;] ? default_wake_function+0x0/0x20
14:07:23: [&amp;lt;ffffffffa07f8e30&amp;gt;] ? ping_evictor_main+0x0/0x640 [ptlrpc]
14:07:23: [&amp;lt;ffffffff8100c0ca&amp;gt;] child_rip+0xa/0x20
14:07:23: [&amp;lt;ffffffffa07f8e30&amp;gt;] ? ping_evictor_main+0x0/0x640 [ptlrpc]
14:07:23: [&amp;lt;ffffffffa07f8e30&amp;gt;] ? ping_evictor_main+0x0/0x640 [ptlrpc]
14:07:23: [&amp;lt;ffffffff8100c0c0&amp;gt;] ? child_rip+0x0/0x20
14:07:23:
14:07:23:Kernel panic - not syncing: LBUG
.....
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="51904" author="keith" created="Wed, 6 Feb 2013 14:11:01 +0000"  >&lt;p&gt;Is seem the above may be caused by the patch being tested. &lt;a href=&quot;http://review.whamcloud.com/5222&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/5222&lt;/a&gt; &lt;/p&gt;</comment>
                            <comment id="65629" author="adilger" created="Tue, 3 Sep 2013 17:10:05 +0000"  >&lt;p&gt;Closing this old Orion bug for now.  I don&apos;t think the last comments were related to this problem.&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzv3k7:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>4076</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>