<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:10:32 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-802] Test failure on test suite parallel-scale, subtest test_write_append_truncate</title>
                <link>https://jira.whamcloud.com/browse/LU-802</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Chris Gearing &amp;lt;chris@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/0219bc62-00f6-11e1-bb4f-52540025f9af&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/0219bc62-00f6-11e1-bb4f-52540025f9af&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;See dmesg client vm1&lt;/p&gt;

&lt;p&gt;The sub-test test_write_append_truncate failed with the following error:&lt;/p&gt;
&lt;blockquote&gt;
&lt;p&gt;test failed to respond and timed out&lt;/p&gt;&lt;/blockquote&gt;

&lt;p&gt;Info required for matching: parallel-scale write_append_truncate&lt;/p&gt;</description>
                <environment></environment>
        <key id="12276">LU-802</key>
            <summary>Test failure on test suite parallel-scale, subtest test_write_append_truncate</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="6" iconUrl="https://jira.whamcloud.com/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="chris">Chris Gearing</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Fri, 28 Oct 2011 04:01:50 +0000</created>
                <updated>Thu, 31 Oct 2013 09:33:31 +0000</updated>
                            <resolved>Thu, 31 Oct 2013 09:33:31 +0000</resolved>
                                    <version>Lustre 2.2.0</version>
                    <version>Lustre 2.1.1</version>
                    <version>Lustre 2.1.2</version>
                    <version>Lustre 1.8.8</version>
                    <version>Lustre 2.4.1</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>5</watches>
                                                                            <comments>
                            <comment id="28486" author="yujian" created="Mon, 13 Feb 2012 07:16:16 +0000"  >&lt;p&gt;Lustre Tag: v2_1_1_0_RC2&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_1/41/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_1/41/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6/x86_64(server), RHEL6/i686(client)&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
ENABLE_QUOTA=yes&lt;/p&gt;

&lt;p&gt;The same issue occurred: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/e04b00fe-55d3-11e1-9aa8-5254004bbbd3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/e04b00fe-55d3-11e1-9aa8-5254004bbbd3&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The stack trace on Client 1 (client-11vm1) showed that:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;12:26:40:write_append_ S 0002b90d     0 23901  23898 0x00000080
12:26:40: f63c5570 00000086 4f381feb 0002b90d 00000000 000003c8 00005d5d 0000a53a
12:26:40: 00000000 c3318200 00002643 00000000 00002643 c0b0f0c0 c0b0f0c0 f63c5818
12:26:40: c0b0f0c0 c0b0aaa4 c0b0f0c0 f63c5818 c69b4000 f98351ed f982663f 0000008f
12:26:40:Call Trace:
12:26:40: [&amp;lt;f980de2e&amp;gt;] ? cl_lock_mutex_put+0x3e/0x80 [obdclass]
12:26:40: [&amp;lt;f980e328&amp;gt;] ? cl_lock_state_wait+0x1d8/0x340 [obdclass]
12:26:40: [&amp;lt;f980b8f5&amp;gt;] ? cl_lock_used_mod+0x25/0x60 [obdclass]
12:26:40: [&amp;lt;f980bbad&amp;gt;] ? cl_lock_user_add+0x4d/0x200 [obdclass]
12:26:40: [&amp;lt;c044d4d0&amp;gt;] ? default_wake_function+0x0/0x10
12:26:40: [&amp;lt;f9813969&amp;gt;] ? cl_enqueue_locked+0x1a9/0x2d0 [obdclass]
12:26:40: [&amp;lt;f93faee6&amp;gt;] ? lov_io_call+0x76/0x250 [lov]
12:26:40: [&amp;lt;f9813d69&amp;gt;] ? cl_lock_request+0x89/0x2d0 [obdclass]
12:26:40: [&amp;lt;f93fccc2&amp;gt;] ? lov_io_lock+0x72/0x260 [lov]
12:26:40: [&amp;lt;f89ec572&amp;gt;] ? vvp_io_init+0xe2/0x380 [lustre]
12:26:40: [&amp;lt;f84c4eaf&amp;gt;] ? libcfs_debug_vmsg2+0x34f/0x820 [libcfs]
12:26:40: [&amp;lt;f981bbe0&amp;gt;] ? cl_io_lock+0x4e0/0xa10 [obdclass]
12:26:40: [&amp;lt;f9814fee&amp;gt;] ? cl_io_iter_init+0x6e/0x240 [obdclass]
12:26:41: [&amp;lt;f981c232&amp;gt;] ? cl_io_loop+0x122/0x2a0 [obdclass]
12:26:41: [&amp;lt;f896497a&amp;gt;] ? ll_file_io_generic+0x41a/0x6c0 [lustre]
12:26:41: [&amp;lt;f9801af6&amp;gt;] ? cl_env_get+0x16/0x480 [obdclass]
12:26:41: [&amp;lt;f84c8ce2&amp;gt;] ? cfs_hash_dual_bd_unlock+0x22/0x50 [libcfs]
12:26:41: [&amp;lt;f84ccc4e&amp;gt;] ? cfs_hash_find_or_add+0x7e/0x160 [libcfs]
12:26:41: [&amp;lt;f8964d41&amp;gt;] ? ll_file_aio_read+0x121/0x4d0 [lustre]
12:26:41: [&amp;lt;c052ee2e&amp;gt;] ? cp_new_stat64+0xee/0x100
12:26:41: [&amp;lt;f8970c65&amp;gt;] ? ll_file_read+0x165/0x440 [lustre]
12:26:41: [&amp;lt;c05a24fc&amp;gt;] ? security_file_permission+0xc/0x10
12:26:41: [&amp;lt;c052a656&amp;gt;] ? rw_verify_area+0x66/0xe0
12:26:41: [&amp;lt;f8970b00&amp;gt;] ? ll_file_read+0x0/0x440 [lustre]
12:26:41: [&amp;lt;c052b01d&amp;gt;] ? vfs_read+0x9d/0x190
12:26:41: [&amp;lt;c04afccc&amp;gt;] ? audit_syscall_entry+0x21c/0x240
12:26:41: [&amp;lt;c052b151&amp;gt;] ? sys_read+0x41/0x70
12:26:41: [&amp;lt;c0409a9f&amp;gt;] ? sysenter_do_call+0x12/0x28
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="28787" author="pjones" created="Wed, 15 Feb 2012 15:13:39 +0000"  >&lt;p&gt;Jinshan&lt;/p&gt;

&lt;p&gt;Could you please look at this one?&lt;/p&gt;

&lt;p&gt;Thanks&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="28792" author="jay" created="Wed, 15 Feb 2012 16:19:29 +0000"  >&lt;p&gt;From the debug log, it seems everything was fine. How much time was set for this test? Let&apos;s try longer time before timing it out.&lt;/p&gt;</comment>
                            <comment id="28869" author="yujian" created="Thu, 16 Feb 2012 08:27:51 +0000"  >&lt;p&gt;&amp;gt; From the debug log, it seems everything was fine. How much time was set for this test? Let&apos;s try longer time before timing it out.&lt;/p&gt;

&lt;p&gt;It seems the TIMEOUT value set by the autotest system is 3600.&lt;/p&gt;

&lt;p&gt;Per the following search result on Maloo:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/sub_tests/query?utf8=%E2%9C%93&amp;amp;test_set[test_set_script_id]=b10ed7ea-55b4-11e0-bb3d-52540025f9af&amp;amp;sub_test[sub_test_script_id]=6c0ca7dc-55d0-11e0-bb3d-52540025f9af&amp;amp;sub_test[status]=&amp;amp;sub_test[query_bugs]=&amp;amp;test_session[test_host]=&amp;amp;test_session[test_group]=&amp;amp;test_session[user_id]=&amp;amp;test_session[query_date]=&amp;amp;test_session[query_recent_period]=1209600&amp;amp;test_node[os_type_id]=&amp;amp;test_node[distribution_type_id]=&amp;amp;test_node[architecture_type_id]=&amp;amp;test_node[file_system_type_id]=&amp;amp;test_node[lustre_branch_id]=&amp;amp;test_node_network[network_type_id]=&amp;amp;commit=Update+results&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/sub_tests/query?utf8=%E2%9C%93&amp;amp;test_set[test_set_script_id]=b10ed7ea-55b4-11e0-bb3d-52540025f9af&amp;amp;sub_test[sub_test_script_id]=6c0ca7dc-55d0-11e0-bb3d-52540025f9af&amp;amp;sub_test[status]=&amp;amp;sub_test[query_bugs]=&amp;amp;test_session[test_host]=&amp;amp;test_session[test_group]=&amp;amp;test_session[user_id]=&amp;amp;test_session[query_date]=&amp;amp;test_session[query_recent_period]=1209600&amp;amp;test_node[os_type_id]=&amp;amp;test_node[distribution_type_id]=&amp;amp;test_node[architecture_type_id]=&amp;amp;test_node[file_system_type_id]=&amp;amp;test_node[lustre_branch_id]=&amp;amp;test_node_network[network_type_id]=&amp;amp;commit=Update+results&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The write_append_truncate test took very long time (large than 2000s) to run on the VMs, but took less than 10mins to run on the physical nodes with real devices:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/sub_tests/e59823a4-584c-11e1-9df1-5254004bbbd3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/sub_tests/e59823a4-584c-11e1-9df1-5254004bbbd3&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/sub_tests/4f4f93d0-579a-11e1-99fa-5254004bbbd3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/sub_tests/4f4f93d0-579a-11e1-99fa-5254004bbbd3&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Per the parallel-scale test script:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;#
# write_append_truncate
#
# threads per client
write_THREADS=${write_THREADS:-8}
write_REP=${write_REP:-10000}
[ &quot;$SLOW&quot; = &quot;no&quot; ] &amp;amp;&amp;amp; write_REP=100
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;I think we could specify small values to write_REP/write_THREADS to reduce the run time on VMs by the autotest system.&lt;/p&gt;</comment>
                            <comment id="28948" author="jay" created="Thu, 16 Feb 2012 12:50:33 +0000"  >&lt;p&gt;Thank you, yujian. Please let me know if you meet this problem again.&lt;/p&gt;</comment>
                            <comment id="30564" author="sarah" created="Mon, 5 Mar 2012 17:51:24 +0000"  >&lt;p&gt;got the similar issue: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/9ff40214-6701-11e1-a9a4-5254004bbbd3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/9ff40214-6701-11e1-a9a4-5254004bbbd3&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;build lustre-master/493/RHEL6-x86_64&lt;/p&gt;</comment>
                            <comment id="38618" author="yujian" created="Fri, 11 May 2012 08:27:14 +0000"  >&lt;p&gt;Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b1_8/194/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b1_8/194/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL5.8/x86_64(server), RHEL6.2/x86_64(client)&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
ENABLE_QUOTA=yes&lt;/p&gt;

&lt;p&gt;The similar issue occurred: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/3c4bbb96-9b3e-11e1-a0a0-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/3c4bbb96-9b3e-11e1-a0a0-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="39703" author="yujian" created="Thu, 31 May 2012 04:23:59 +0000"  >&lt;p&gt;Lustre Tag: v2_1_2_RC2&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_1/86/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_1/86/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6.2/x86_64(server), RHEL6.2/i686(client)&lt;br/&gt;
Network: TCP (1GigE)                             &lt;br/&gt;
ENABLE_QUOTA=yes&lt;/p&gt;

&lt;p&gt;The same issue occurred: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/19c1ae10-aac9-11e1-bd84-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/19c1ae10-aac9-11e1-bd84-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="49355" author="yujian" created="Tue, 18 Dec 2012 01:11:53 +0000"  >&lt;p&gt;Lustre Client: 2.3.0&lt;br/&gt;
Lustre Server: v2_1_4_RC1&lt;br/&gt;
Distro/Arch: RHEL6.3/x86_64&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
ENABLE_QUOTA=yes&lt;/p&gt;

&lt;p&gt;The same issue occurred: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/4f1c6314-4864-11e2-8cdc-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/4f1c6314-4864-11e2-8cdc-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="66028" author="yujian" created="Mon, 9 Sep 2013 03:10:37 +0000"  >&lt;p&gt;The timeout issue keeps occurring while testing Lustre 2.4.1 RC2:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/8e327ac8-1796-11e3-8f44-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/8e327ac8-1796-11e3-8f44-52540035b04c&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/6edeb926-17b3-11e3-8f44-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/6edeb926-17b3-11e3-8f44-52540035b04c&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/da608a9e-1830-11e3-b39a-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/da608a9e-1830-11e3-b39a-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The test timed out in 3600s. However, the same test passed on Lustre 2.4.1 RC1 in about 5570s several days ago.&lt;/p&gt;

&lt;p&gt;Chris, could you please check whether the timeout mechanism in autotest was changed or not? Thanks.&lt;/p&gt;

&lt;p&gt;The timeout failure is preventing the remaining sub-tests in parallel-scale.sh from running. So, I flagged this ticket as a blocker.&lt;/p&gt;</comment>
                            <comment id="70341" author="yujian" created="Thu, 31 Oct 2013 09:33:17 +0000"  >&lt;p&gt;The parallel-scale write_append_truncate test passed in the latest full group test sessions without timeout issue. Let&apos;s close this ticket. If it hit timeout failure again, please feel free to reopen the ticket.&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzv3qv:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>4130</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>