<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:26:11 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-2554] replay-single test 80b: dd: opening `/mnt/lustre/f80b&apos;: Input/output error</title>
                <link>https://jira.whamcloud.com/browse/LU-2554</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;replay-single test 80b failed as follows:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;== replay-single test 80b: write replay with changed data (checksum resend) ========================== 03:49:14 (1356781754)
CMD: client-27vm4 lctl get_param obdfilter.lustre-OST0000.sync_journal
obdfilter.lustre-OST0000.sync_journal=1
CMD: client-27vm4 lctl set_param -n obdfilter.lustre-OST0000.sync_journal 0
CMD: client-27vm4 sync
Filesystem           1K-blocks      Used Available Use% Mounted on
client-27vm3:client-27vm7:/lustre
                      36535940   1731756  32947784   5% /mnt/lustre
CMD: client-27vm4 /usr/sbin/lctl --device %lustre-OST0000 notransno
CMD: client-27vm4 /usr/sbin/lctl --device %lustre-OST0000 readonly
CMD: client-27vm4 /usr/sbin/lctl mark ost1 REPLAY BARRIER on lustre-OST0000
error on ioctl 0x4008669a for &apos;/mnt/lustre/f80b&apos; (3): Input/output error
error: setstripe: create stripe file &apos;/mnt/lustre/f80b&apos; failed
dd: opening `/mnt/lustre/f80b&apos;: Input/output error
 replay-single test_80b: @@@@@@ FAIL: Cannot write
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Syslog on MDS client-27vm7 showed that:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Dec 29 03:49:16 client-27vm7 kernel: Lustre: DEBUG MARKER: == replay-single test 80b: write replay with changed data (checksum resend) ========================== 03:49:14 (1356781754)
Dec 29 03:49:16 client-27vm7 rshd[3181]: pam_unix(rsh:session): session closed for user root
Dec 29 03:49:16 client-27vm7 xinetd[2057]: EXIT: shell status=0 pid=3181 duration=0(sec)
Dec 29 03:49:32 client-27vm7 kernel: LustreError: 2442:0:(lov_request.c:694:lov_update_create_set()) error creating fid 0x30003c sub-object on OST idx 0/1: rc = -11
Dec 29 03:50:18 client-27vm7 kernel: Lustre: Service thread pid 2526 was inactive for 56.00s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes:
Dec 29 03:50:19 client-27vm7 kernel: Pid: 2526, comm: ll_mdt_02
Dec 29 03:50:19 client-27vm7 kernel:
Dec 29 03:50:19 client-27vm7 kernel: Call Trace:
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff88921220&amp;gt;] lustre_pack_request+0x630/0x6f0 [ptlrpc]
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff8006389f&amp;gt;] schedule_timeout+0x8a/0xad
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff8009a41d&amp;gt;] process_timeout+0x0/0x5 
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff889e7695&amp;gt;] osc_create+0xc75/0x13d0 [osc]
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff8008ee84&amp;gt;] default_wake_function+0x0/0xe 
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff88a96edb&amp;gt;] qos_remedy_create+0x45b/0x570 [lov]
Dec 29 03:50:19 client-27vm7 kernel:  [&amp;lt;ffffffff8002deea&amp;gt;] __wake_up+0x38/0x4f
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff8008e67d&amp;gt;] dequeue_task+0x18/0x37
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88a90df3&amp;gt;] lov_fini_create_set+0x243/0x11e0 [lov]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88a84b72&amp;gt;] lov_create+0x1552/0x1860 [lov]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88a857a8&amp;gt;] lov_iocontrol+0x928/0xf0f [lov]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff8008ee84&amp;gt;] default_wake_function+0x0/0xe 
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c72b21&amp;gt;] mds_finish_open+0x1fa1/0x4370 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff80009860&amp;gt;] __d_lookup+0xb0/0xff
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff8000d543&amp;gt;] dput+0x2c/0x114
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c52fad&amp;gt;] mds_verify_child+0x2dd/0x870 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff888f59a0&amp;gt;] ldlm_blocking_ast+0x0/0x2a0 [ptlrpc]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c79d41&amp;gt;] mds_open+0x2f01/0x386b [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff887bacfd&amp;gt;] libcfs_debug_vmsg2+0x70d/0x970 [libcfs]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff888d886c&amp;gt;] _ldlm_lock_debug+0x57c/0x6e0 [ptlrpc]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff8891f5f1&amp;gt;] lustre_swab_buf+0x81/0x170 [ptlrpc]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff8000d543&amp;gt;] dput+0x2c/0x114
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c500a5&amp;gt;] mds_reint_rec+0x365/0x550 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c7ac6e&amp;gt;] mds_update_unpack+0x1fe/0x280 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c42eda&amp;gt;] mds_reint+0x35a/0x420 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c41dea&amp;gt;] fixup_handle_for_resent_req+0x5a/0x2c0 [mds]
Dec 29 03:50:20 client-27vm7 kernel:  [&amp;lt;ffffffff88c4cbee&amp;gt;] mds_intent_policy+0x49e/0xc10 [mds]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff888e0270&amp;gt;] ldlm_resource_putref_internal+0x230/0x460 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff888ddeb6&amp;gt;] ldlm_lock_enqueue+0x186/0xb20 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff888da7fd&amp;gt;] ldlm_lock_create+0x9bd/0x9f0 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff88902870&amp;gt;] ldlm_server_blocking_ast+0x0/0x83d [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff888ffb39&amp;gt;] ldlm_handle_enqueue+0xc09/0x1210 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff88c4bb2e&amp;gt;] mds_handle+0x40ce/0x4cf0 [mds]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff887b7868&amp;gt;] libcfs_ip_addr2str+0x38/0x40 [libcfs]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff887b7c7e&amp;gt;] libcfs_nid2str+0xbe/0x110 [libcfs]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8892aaf5&amp;gt;] ptlrpc_server_log_handling_request+0x105/0x130 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8892d874&amp;gt;] ptlrpc_server_handle_request+0x984/0xe00 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8892dfd5&amp;gt;] ptlrpc_wait_event+0x2e5/0x310 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8008d2a9&amp;gt;] __wake_up_common+0x3e/0x68
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8892ef16&amp;gt;] ptlrpc_main+0xf16/0x10e0 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8005dfb1&amp;gt;] child_rip+0xa/0x11
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8892e000&amp;gt;] ptlrpc_main+0x0/0x10e0 [ptlrpc]
Dec 29 03:50:21 client-27vm7 kernel:  [&amp;lt;ffffffff8005dfa7&amp;gt;] child_rip+0x0/0x11
Dec 29 03:50:21 client-27vm7 kernel:
Dec 29 03:50:21 client-27vm7 kernel: LustreError: dumping log to /tmp/lustre-log.1356781818.2526
Dec 29 03:50:32 client-27vm7 kernel: LustreError: 2526:0:(lov_request.c:694:lov_update_create_set()) error creating fid 0x30003c sub-object on OST idx 0/1: rc = -5
Dec 29 03:50:32 client-27vm7 kernel: LustreError: 2526:0:(mds_open.c:440:mds_create_objects()) error creating objects for inode 3145788: rc = -5
Dec 29 03:50:32 client-27vm7 kernel: LustreError: 2526:0:(mds_open.c:825:mds_finish_open()) mds_create_objects: rc = -5
Dec 29 03:50:32 client-27vm7 kernel: Lustre: Service thread pid 2526 completed after 69.90s. This indicates the system was overloaded (too many service threads, or there were not enough hardware resources).
Dec 29 03:50:42 client-27vm7 kernel: LustreError: 2442:0:(lov_request.c:694:lov_update_create_set()) error creating fid 0x30003c sub-object on OST idx 1/1: rc = -11
Dec 29 03:51:42 client-27vm7 kernel: LustreError: 2532:0:(lov_request.c:694:lov_update_create_set()) error creating fid 0x30003c sub-object on OST idx 1/1: rc = -5
Dec 29 03:51:42 client-27vm7 kernel: LustreError: 2532:0:(mds_open.c:440:mds_create_objects()) error creating objects for inode 3145788: rc = -5
Dec 29 03:51:42 client-27vm7 kernel: LustreError: 2532:0:(mds_open.c:825:mds_finish_open()) mds_create_objects: rc = -5
Dec 29 03:51:42 client-27vm7 rshd[3223]: root@client-27vm1.lab.whamcloud.com as root: cmd=&apos;/usr/sbin/lctl mark &quot;/usr/sbin/lctl mark  replay-single test_80b: @@@@@@ FAIL: Cannot write &quot;;echo XXRETCODE:$?&apos;
Dec 29 03:51:42 client-27vm7 kernel: Lustre: DEBUG MARKER: /usr/sbin/lctl mark  replay-single test_80b: @@@@@@ FAIL: Cannot write
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Maloo report: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/56e3c084-51b9-11e2-a904-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/56e3c084-51b9-11e2-a904-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Test 81a,81b,82,83 also failed with the same issue.&lt;/p&gt;</description>
                <environment>Lustre Branch: b1_8&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b1_8/236/&quot;&gt;http://build.whamcloud.com/job/lustre-b1_8/236/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL5.8/x86_64 (kernel version: 2.6.18-308.11.1.el5)&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
Test Group: failover&lt;br/&gt;
&lt;br/&gt;
ENABLE_QUOTA=yes&lt;br/&gt;
FAILURE_MODE=HARD&lt;br/&gt;
&lt;br/&gt;
MGS/MDS Nodes: client-27vm3 (active), client-27vm7(passive)&lt;br/&gt;
&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;\ /&lt;br/&gt;
&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;1 combined MGS/MDT&lt;br/&gt;
&lt;br/&gt;
OSS Nodes: client-27vm4 (active), client-27vm8(active)&lt;br/&gt;
</environment>
        <key id="17057">LU-2554</key>
            <summary>replay-single test 80b: dd: opening `/mnt/lustre/f80b&apos;: Input/output error</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="6" iconUrl="https://jira.whamcloud.com/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="2">Won&apos;t Fix</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="yujian">Jian Yu</reporter>
                        <labels>
                    </labels>
                <created>Mon, 31 Dec 2012 10:10:00 +0000</created>
                <updated>Sun, 14 Aug 2016 17:21:48 +0000</updated>
                            <resolved>Sun, 14 Aug 2016 17:21:48 +0000</resolved>
                                    <version>Lustre 1.8.8</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>4</watches>
                                                                            <comments>
                            <comment id="49925" author="yujian" created="Thu, 3 Jan 2013 22:23:33 +0000"  >&lt;p&gt;The replay-single test 80b, 81a, 81b, 82, 83 passed in another failover test run on the same Lustre b1_8 build #236:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/f9b78b36-5503-11e2-9b6a-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/f9b78b36-5503-11e2-9b6a-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="50197" author="yujian" created="Wed, 9 Jan 2013 05:38:45 +0000"  >&lt;p&gt;One more instance:&lt;/p&gt;

&lt;p&gt;Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b1_8/238&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b1_8/238&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/e19843b4-5a2c-11e2-bcf5-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/e19843b4-5a2c-11e2-bcf5-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="161844" author="simmonsja" created="Sun, 14 Aug 2016 17:21:48 +0000"  >&lt;p&gt;Old blocker for unsupported version&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzvefb:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>5978</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>