<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 03:29:43 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-16753] replay-single: test_135 timeout</title>
                <link>https://jira.whamcloud.com/browse/LU-16753</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Lai Siyao &amp;lt;lai.siyao@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/92be109d-b483-4d74-9042-171b3c44c8d3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/92be109d-b483-4d74-9042-171b3c44c8d3&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Test session details:&lt;br/&gt;
clients: &lt;a href=&quot;https://build.whamcloud.com/job/lustre-reviews/94117&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.whamcloud.com/job/lustre-reviews/94117&lt;/a&gt; - 4.18.0-425.10.1.el8_7.x86_64&lt;br/&gt;
servers: &lt;a href=&quot;https://build.whamcloud.com/job/lustre-reviews/94117&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.whamcloud.com/job/lustre-reviews/94117&lt;/a&gt; - 4.18.0-425.10.1.el8_lustre.x86_64&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;== replay-single test 135: Server failure in lock replay phase ========================================================== 17:17:59 (1681924679)
...
Failover ost1 to onyx-78vm3
CMD: onyx-78vm3 hostname
CMD: onyx-78vm3 /usr/sbin/lctl set_param fail_val=20
fail_val=20
CMD: onyx-78vm3 /usr/sbin/lctl set_param fail_loc=0x32d
fail_loc=0x32d
CMD: onyx-78vm3 dmsetup status /dev/mapper/ost1_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
CMD: onyx-78vm3 dmsetup status /dev/mapper/ost1_flakey 2&amp;gt;&amp;amp;1
CMD: onyx-78vm3 dmsetup table /dev/mapper/ost1_flakey
CMD: onyx-78vm3 dmsetup suspend --nolockfs --noflush /dev/mapper/ost1_flakey
CMD: onyx-78vm3 dmsetup load /dev/mapper/ost1_flakey --table \&quot;0 20111360 linear 252:0 0\&quot;
CMD: onyx-78vm3 dmsetup resume /dev/mapper/ost1_flakey
CMD: onyx-78vm3 test -b /dev/mapper/ost1_flakey
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey
Starting ost1: -o localrecov  /dev/mapper/ost1_flakey /mnt/lustre-ost1
CMD: onyx-78vm3 mkdir -p /mnt/lustre-ost1; mount -t lustre -o localrecov  /dev/mapper/ost1_flakey /mnt/lustre-ost1
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 2&amp;gt;/dev/null
CMD: onyx-78vm3 /usr/sbin/lctl set_param 				seq.cli-lustre-OST0000-super.width=0x3fff
seq.cli-lustre-OST0000-super.width=0x3fff
CMD: onyx-78vm3 /usr/sbin/lctl get_param -n health_check
CMD: onyx-78vm3 PATH=/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:/opt/iozone/bin:/opt/iozone/bin:/usr/lib64/lustre/tests/mpi:/usr/lib64/lustre/tests/racer:/usr/lib64/lustre/../lustre-iokit/sgpdd-survey:/usr/lib64/lustre/tests:/usr/lib64/lustre/utils/gss:/usr/lib64/lustre/utils:/usr/lib64/openmpi/bin:/usr/share/Modules/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/sbin:/sbin:/bin::/sbin:/bin:/usr/sbin: NAME=autotest_config 		TESTLOG_PREFIX=/autotest/autotest-1/2023-04-19/lustre-reviews_review-dne-part-6_94117_8_7aa47c47-d979-4023-84f3-0d3eb4670464//replay-single TESTNAME=test_135 		bash rpc.sh set_default_debug \&quot;vfstrace rpctrace dlmtrace neterror ha config ioctl super lfsck\&quot; \&quot;all\&quot; 4 
onyx-78vm3: onyx-78vm3.onyx.whamcloud.com: executing set_default_debug vfstrace rpctrace dlmtrace neterror ha config ioctl super lfsck all 4
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 				2&amp;gt;/dev/null | grep -E &apos;:[a-zA-Z]{3}[0-9]{4}&apos;
pdsh@onyx-78vm1: onyx-78vm3: ssh exited with exit code 1
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 2&amp;gt;/dev/null
Started lustre-OST0000
CMD: onyx-78vm3 grep -c /mnt/lustre-ost1&apos; &apos; /proc/mounts || true
Stopping /mnt/lustre-ost1 (opts:) on onyx-78vm3
CMD: onyx-78vm3 umount -d /mnt/lustre-ost1
CMD: onyx-78vm3 lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
Failover ost1 to onyx-78vm3
CMD: onyx-78vm3 hostname
CMD: onyx-78vm3 /usr/sbin/lctl set_param fail_loc=0
fail_loc=0
CMD: onyx-78vm3 dmsetup status /dev/mapper/ost1_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
CMD: onyx-78vm3 dmsetup status /dev/mapper/ost1_flakey 2&amp;gt;&amp;amp;1
CMD: onyx-78vm3 test -b /dev/mapper/ost1_flakey
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey
Starting ost1: -o localrecov  /dev/mapper/ost1_flakey /mnt/lustre-ost1
CMD: onyx-78vm3 mkdir -p /mnt/lustre-ost1; mount -t lustre -o localrecov  /dev/mapper/ost1_flakey /mnt/lustre-ost1
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 2&amp;gt;/dev/null
CMD: onyx-78vm3 /usr/sbin/lctl set_param 				seq.cli-lustre-OST0000-super.width=0x3fff
seq.cli-lustre-OST0000-super.width=0x3fff
CMD: onyx-78vm3 /usr/sbin/lctl get_param -n health_check
CMD: onyx-78vm3 PATH=/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:/opt/iozone/bin:/opt/iozone/bin:/usr/lib64/lustre/tests/mpi:/usr/lib64/lustre/tests/racer:/usr/lib64/lustre/../lustre-iokit/sgpdd-survey:/usr/lib64/lustre/tests:/usr/lib64/lustre/utils/gss:/usr/lib64/lustre/utils:/usr/lib64/openmpi/bin:/usr/share/Modules/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/sbin:/sbin:/bin::/sbin:/bin:/usr/sbin: NAME=autotest_config 		TESTLOG_PREFIX=/autotest/autotest-1/2023-04-19/lustre-reviews_review-dne-part-6_94117_8_7aa47c47-d979-4023-84f3-0d3eb4670464//replay-single TESTNAME=test_135 		bash rpc.sh set_default_debug \&quot;vfstrace rpctrace dlmtrace neterror ha config ioctl super lfsck\&quot; \&quot;all\&quot; 4 
onyx-78vm3: onyx-78vm3.onyx.whamcloud.com: executing set_default_debug vfstrace rpctrace dlmtrace neterror ha config ioctl super lfsck all 4
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 				2&amp;gt;/dev/null | grep -E &apos;:[a-zA-Z]{3}[0-9]{4}&apos;
pdsh@onyx-78vm1: onyx-78vm3: ssh exited with exit code 1
CMD: onyx-78vm3 e2label /dev/mapper/ost1_flakey 2&amp;gt;/dev/null
Started lustre-OST0000
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="75645">LU-16753</key>
            <summary>replay-single: test_135 timeout</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="1" iconUrl="https://jira.whamcloud.com/images/icons/statuses/open.png" description="The issue is open and ready for the assignee to start work on it.">Open</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Thu, 20 Apr 2023 08:04:00 +0000</created>
                <updated>Thu, 25 Jan 2024 09:13:08 +0000</updated>
                                                                                <due></due>
                            <votes>0</votes>
                                    <watches>3</watches>
                                                                            <comments>
                            <comment id="376859" author="adilger" created="Thu, 29 Jun 2023 04:13:42 +0000"  >&lt;p&gt;Possibly also related to &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-16536&quot; title=&quot;MDS umount can get stuck due to LDLM locks&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-16536&quot;&gt;&lt;del&gt;LU-16536&lt;/del&gt;&lt;/a&gt;.&lt;/p&gt;</comment>
                            <comment id="394723" author="arshad512" created="Wed, 29 Nov 2023 12:18:13 +0000"  >&lt;p&gt;+1 on master (&lt;a href=&quot;https://testing.whamcloud.com/test_sets/12362470-f71f-41f4-be55-ea42efb827b0&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/12362470-f71f-41f4-be55-ea42efb827b0&lt;/a&gt;)&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="74367">LU-16536</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i03jd3:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>