<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:21:16 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-8870] conf-sanity test_32b: lov.: error writing proc entry &apos;stripesize&apos;: rc = -22</title>
                <link>https://jira.whamcloud.com/browse/LU-8870</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Saurabh Tandan &amp;lt;saurabh.tandan@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/4cd2a468-b32e-11e6-85c4-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/4cd2a468-b32e-11e6-85c4-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_32b failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;test failed to respond and timed out
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;client console logs:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;17:11:42:[24917.250751] Lustre: DEBUG MARKER: == conf-sanity test 32b: Upgrade with writeconf ====================================================== 16:54:55 (1480035295)
17:11:42:[25068.948171] LustreError: 119721:0:(obd_config.c:1393:class_process_proc_param()) lov.: error writing proc entry &apos;stripesize&apos;: rc = -22
17:11:42:[25163.969833] LustreError: 11-0: t32fs-MDT0000-mdc-ffff880c09ec2800: operation mds_connect to node 192.168.5.144@o2ib failed: rc = -11
17:11:42:[25313.971651] LustreError: 11-0: t32fs-MDT0000-mdc-ffff880c09ec2800: operation mds_connect to node 192.168.5.144@o2ib failed: rc = -11
17:11:42:[25463.973132] LustreError: 11-0: t32fs-MDT0000-mdc-ffff880c09ec2800: operation mds_connect to node 192.168.5.144@o2ib failed: rc = -11
17:11:42:[25613.974891] LustreError: 11-0: t32fs-MDT0000-mdc-ffff880c09ec2800: operation mds_connect to node 192.168.5.144@o2ib failed: rc = -11
17:11:42:[25763.976465] LustreError: 11-0: t32fs-MDT0000-mdc-ffff880c09ec2800: operation mds_connect to node 192.168.5.144@o2ib failed: rc = -11
17:55:56:********** Timeout by autotest system **********
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Might be related to &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-8338&quot; title=&quot;lov.*-clilov-*.stripe* params not consistent with FS defaults&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-8338&quot;&gt;LU-8338&lt;/a&gt;&lt;/p&gt;</description>
                <environment>Full - EL7.3 Server/EL7.3 Client&lt;br/&gt;
b2_9, build# 21</environment>
        <key id="41827">LU-8870</key>
            <summary>conf-sanity test_32b: lov.: error writing proc entry &apos;stripesize&apos;: rc = -22</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="6" iconUrl="https://jira.whamcloud.com/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="ys">Yang Sheng</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Tue, 29 Nov 2016 14:10:27 +0000</created>
                <updated>Wed, 16 Jan 2019 07:13:13 +0000</updated>
                            <resolved>Wed, 16 Jan 2019 07:13:13 +0000</resolved>
                                    <version>Lustre 2.9.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>4</watches>
                                                                            <comments>
                            <comment id="175890" author="bogl" created="Thu, 1 Dec 2016 14:38:24 +0000"  >&lt;p&gt;another on master:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/cb2a99e6-b776-11e6-be4d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/cb2a99e6-b776-11e6-be4d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="199846" author="ys" created="Wed, 21 Jun 2017 14:29:22 +0000"  >&lt;p&gt;The message &apos;lov.: error writing proc entry &apos;stripesize&apos;: rc = -22&apos; is not a root cause. Many of test results marked as this ticket was cause by different issue. &lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/9310f828-5228-11e7-a749-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/9310f828-5228-11e7-a749-5254006e85c2&lt;/a&gt;&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Jun 15 22:33:56 trevis-49vm1 kernel: sha1sum         D ffff88000d59fa90     0 28001  27999 0x00000080
Jun 15 22:33:56 trevis-49vm1 kernel: ffff88000d59f930 0000000000000082 ffff880079773ec0 ffff88000d59ffd8
Jun 15 22:33:56 trevis-49vm1 kernel: ffff88000d59ffd8 ffff88000d59ffd8 ffff880079773ec0 ffff88007fc16c40
Jun 15 22:33:56 trevis-49vm1 kernel: 0000000000000000 7fffffffffffffff ffffffff8168a630 ffff88000d59fa90
Jun 15 22:33:56 trevis-49vm1 kernel: Call Trace:
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168a630&amp;gt;] ? bit_wait+0x50/0x50
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168c5d9&amp;gt;] schedule+0x29/0x70
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168a019&amp;gt;] schedule_timeout+0x239/0x2c0
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff81060c1f&amp;gt;] ? kvm_clock_get_cycles+0x1f/0x30
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff810eb0dc&amp;gt;] ? ktime_get_ts64+0x4c/0xf0
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168a630&amp;gt;] ? bit_wait+0x50/0x50
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168bb7e&amp;gt;] io_schedule_timeout+0xae/0x130
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168bc18&amp;gt;] io_schedule+0x18/0x20
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168a641&amp;gt;] bit_wait_io+0x11/0x50
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8168a35f&amp;gt;] __wait_on_bit_lock+0x5f/0xc0
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff81180684&amp;gt;] __lock_page_killable+0x74/0x90
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff810b1be0&amp;gt;] ? wake_bit_function+0x40/0x40
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff81182dd8&amp;gt;] generic_file_aio_read+0x748/0x790
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa0cafb47&amp;gt;] vvp_io_read_start+0x4b7/0x600 [lustre]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa0819b65&amp;gt;] cl_io_start+0x65/0x130 [obdclass]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa081bf2e&amp;gt;] cl_io_loop+0x12e/0xc90 [obdclass]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa0c5eb38&amp;gt;] ll_file_io_generic+0x498/0xc40 [lustre]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff8121a1ff&amp;gt;] ? touch_atime+0x12f/0x160
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa0c5fbaa&amp;gt;] ll_file_aio_read+0x34a/0x3e0 [lustre]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffffa0c5fd0e&amp;gt;] ll_file_read+0xce/0x1e0 [lustre]
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff811fe69e&amp;gt;] vfs_read+0x9e/0x170
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff811ff26f&amp;gt;] SyS_read+0x7f/0xe0
Jun 15 22:33:56 trevis-49vm1 kernel: [&amp;lt;ffffffff816975c9&amp;gt;] system_call_fastpath+0x16/0x1b
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/088ad91a-519e-11e7-a743-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/088ad91a-519e-11e7-a743-5254006e85c2&lt;/a&gt;&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;00:07:47:[ 2654.638556] Lustre: DEBUG MARKER: == conf-sanity test 32b: Upgrade with writeconf ====================================================== 00:07:13 (1497485233)
00:07:47:[ 2676.277933] LustreError: 16554:0:(obd_config.c:1429:class_process_proc_param()) lov.: error writing proc entry &apos;stripesize&apos;: rc = -22
00:07:47:[ 2676.355021] Lustre: Mounted t32fs-client
00:07:47:[ 2677.713520] LustreError: 16606:0:(layout.c:1882:req_capsule_filled_sizes()) ASSERTION( loc != RCL_SERVER ) failed: 
00:07:47:[ 2677.716394] LustreError: 16606:0:(layout.c:1882:req_capsule_filled_sizes()) LBUG
00:07:47:[ 2677.719036] Pid: 16606, comm: ls
00:07:47:[ 2677.721293] 
00:07:47:[ 2677.721293] Call Trace:
00:07:47:[ 2677.725433]  [&amp;lt;ffffffffa06a77ee&amp;gt;] libcfs_call_trace+0x4e/0x60 [libcfs]
00:07:47:[ 2677.727842]  [&amp;lt;ffffffffa06a787c&amp;gt;] lbug_with_loc+0x4c/0xb0 [libcfs]
00:07:47:[ 2677.730243]  [&amp;lt;ffffffffa09edb0c&amp;gt;] req_capsule_filled_sizes+0xcc/0xd0 [ptlrpc]
00:07:47:[ 2677.732593]  [&amp;lt;ffffffffa09c5dde&amp;gt;] ptlrpc_request_set_replen+0x1e/0x50 [ptlrpc]
00:07:47:[ 2677.734966]  [&amp;lt;ffffffffa0ae42b0&amp;gt;] mdc_enqueue_base+0xd20/0x1870 [mdc]
00:07:47:[ 2677.737208]  [&amp;lt;ffffffffa0ae568b&amp;gt;] mdc_intent_lock+0x26b/0x520 [mdc]
00:07:47:[ 2677.739479]  [&amp;lt;ffffffffa0c6e9b0&amp;gt;] ? ll_md_blocking_ast+0x0/0x730 [lustre]
00:07:47:[ 2677.741691]  [&amp;lt;ffffffffa099ce00&amp;gt;] ? ldlm_completion_ast+0x0/0x910 [ptlrpc]
00:07:47:[ 2677.743929]  [&amp;lt;ffffffffa095bf0f&amp;gt;] lmv_intent_lock+0x5cf/0x1b50 [lmv]
00:07:47:[ 2677.746034]  [&amp;lt;ffffffffa0c6e9b0&amp;gt;] ? ll_md_blocking_ast+0x0/0x730 [lustre]
00:07:47:[ 2677.748213]  [&amp;lt;ffffffffa0c7c6fb&amp;gt;] ll_xattr_cache_refill+0x5fb/0x1860 [lustre]
00:07:47:[ 2677.750320]  [&amp;lt;ffffffffa0c7db2b&amp;gt;] ll_xattr_cache_get+0x9b/0x4b0 [lustre]
00:07:47:[ 2677.752446]  [&amp;lt;ffffffffa0c79fe6&amp;gt;] ll_getxattr_common+0x196/0xca0 [lustre]
00:07:47:[ 2677.754525]  [&amp;lt;ffffffffa07cd319&amp;gt;] ? lprocfs_counter_add+0xf9/0x160 [obdclass]
00:07:47:[ 2677.756674]  [&amp;lt;ffffffffa0c7ac23&amp;gt;] ll_getxattr+0x133/0x1b0 [lustre]
00:07:47:[ 2677.758682]  [&amp;lt;ffffffff81223e98&amp;gt;] vfs_getxattr+0x88/0xb0
00:07:47:[ 2677.760656]  [&amp;lt;ffffffff81223fdb&amp;gt;] getxattr+0xab/0x1d0
00:07:47:[ 2677.762551]  [&amp;lt;ffffffff8120f24d&amp;gt;] ? putname+0x3d/0x60
00:07:47:[ 2677.764487]  [&amp;lt;ffffffff812103f2&amp;gt;] ? user_path_at_empty+0x72/0xc0
00:07:47:[ 2677.766459]  [&amp;lt;ffffffffa06af324&amp;gt;] ? libcfs_log_return+0x24/0x30 [libcfs]
00:07:47:[ 2677.768505]  [&amp;lt;ffffffffa0c2b6f8&amp;gt;] ? ll_ddelete+0x218/0x290 [lustre]
00:07:47:[ 2677.770439]  [&amp;lt;ffffffff8121eede&amp;gt;] ? mntput_no_expire+0x3e/0x120
00:07:47:[ 2677.772347]  [&amp;lt;ffffffff81224d04&amp;gt;] SyS_getxattr+0x64/0xc0
00:07:47:[ 2677.774125]  [&amp;lt;ffffffff816975c9&amp;gt;] system_call_fastpath+0x16/0x1b
00:07:47:[ 2677.775997] 
00:07:47:[ 2677.777471] Kernel panic - not syncing: LBUG
00:07:47:[ 2677.778463] CPU: 0 PID: 16606 Comm: ls Tainted: G        W  OE  ------------   3.10.0-514.21.1.el7.x86_64 #1
00:07:47:[ 2677.778463] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2007
00:07:47:[ 2677.778463]  ffffffffa06c5e8b 00000000f8feb9fe ffff88007a49f870 ffffffff81686f13
00:07:47:[ 2677.778463]  ffff88007a49f8f0 ffffffff8168031a ffffffff00000008 ffff88007a49f900
00:07:47:[ 2677.778463]  ffff88007a49f8a0 00000000f8feb9fe 00000000f8feb9fe ffff88007fc0f838
00:07:47:[ 2677.778463] Call Trace:
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff81686f13&amp;gt;] dump_stack+0x19/0x1b
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff8168031a&amp;gt;] panic+0xe3/0x1f2
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa06a7894&amp;gt;] lbug_with_loc+0x64/0xb0 [libcfs]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa09edb0c&amp;gt;] req_capsule_filled_sizes+0xcc/0xd0 [ptlrpc]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa09c5dde&amp;gt;] ptlrpc_request_set_replen+0x1e/0x50 [ptlrpc]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0ae42b0&amp;gt;] mdc_enqueue_base+0xd20/0x1870 [mdc]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0ae568b&amp;gt;] mdc_intent_lock+0x26b/0x520 [mdc]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c6e9b0&amp;gt;] ? ll_invalidate_negative_children+0x1d0/0x1d0 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa099ce00&amp;gt;] ? ldlm_expired_completion_wait+0x240/0x240 [ptlrpc]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa095bf0f&amp;gt;] lmv_intent_lock+0x5cf/0x1b50 [lmv]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c6e9b0&amp;gt;] ? ll_invalidate_negative_children+0x1d0/0x1d0 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c7c6fb&amp;gt;] ll_xattr_cache_refill+0x5fb/0x1860 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c7db2b&amp;gt;] ll_xattr_cache_get+0x9b/0x4b0 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c79fe6&amp;gt;] ll_getxattr_common+0x196/0xca0 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa07cd319&amp;gt;] ? lprocfs_counter_add+0xf9/0x160 [obdclass]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c7ac23&amp;gt;] ll_getxattr+0x133/0x1b0 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff81223e98&amp;gt;] vfs_getxattr+0x88/0xb0
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff81223fdb&amp;gt;] getxattr+0xab/0x1d0
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff8120f24d&amp;gt;] ? putname+0x3d/0x60
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff812103f2&amp;gt;] ? user_path_at_empty+0x72/0xc0
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa06af324&amp;gt;] ? libcfs_log_return+0x24/0x30 [libcfs]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffffa0c2b6f8&amp;gt;] ? ll_ddelete+0x218/0x290 [lustre]
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff8121eede&amp;gt;] ? mntput_no_expire+0x3e/0x120
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff81224d04&amp;gt;] SyS_getxattr+0x64/0xc0
00:07:47:[ 2677.778463]  [&amp;lt;ffffffff816975c9&amp;gt;] system_call_fastpath+0x16/0x1b
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="240096" author="ys" created="Wed, 16 Jan 2019 07:13:13 +0000"  >&lt;p&gt;Please feel free to reopen it.&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzywpz:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>