<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:37:38 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-10723] Interop 2.10.3&lt;-&gt;2.11 sanity test_232b: OSS hung</title>
                <link>https://jira.whamcloud.com/browse/LU-10723</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;sanity test_232b - Timeout occurred after 168 mins, last suite running was sanity, restarting cluster to continue tests&lt;br/&gt;
^^^^^^^^^^^^^ DO NOT REMOVE LINE ABOVE ^^^^^^^^^^^^^&lt;/p&gt;

&lt;p&gt;This issue was created by maloo for sarah_lw &amp;lt;wei3.liu@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/4c514328-12aa-11e8-a6ad-52540065bddc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/4c514328-12aa-11e8-a6ad-52540065bddc&lt;/a&gt;&lt;br/&gt;
test_232b failed with the following error:&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Timeout occurred after 168 mins, last suite running was sanity, restarting cluster to continue tests
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;client: lustre-master tag-2.10.58&lt;br/&gt;
server: 2.10.3&lt;/p&gt;

&lt;p&gt;OSS console&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[ 6415.456311] Lustre: DEBUG MARKER: /usr/sbin/lctl mark == sanity test 232b: failed data version lock should not block umount ================================ 22:43:59 \(1518648239\)
[ 6415.640059] Lustre: DEBUG MARKER: == sanity test 232b: failed data version lock should not block umount ================================ 22:43:59 (1518648239)
[ 6416.022119] Lustre: DEBUG MARKER: /usr/sbin/lctl set_param fail_loc=0x31c
[ 6416.188210] Lustre: *** cfs_fail_loc=31c, val=0***
[ 6416.188807] LustreError: 5316:0:(ldlm_request.c:469:ldlm_cli_enqueue_local()) ### delayed lvb init failed (rc -12) ns: filter-lustre-OST0000_UUID lock: ffff88005e9f6400/0xf4fc5b448cfd123c lrc: 2/0,0 mode: --/PR res: [0x99c2:0x0:0x0].0x0 rrc: 2 type: EXT [0-&amp;gt;0] (req 0-&amp;gt;0) flags: 0x40000000000000 nid: local remote: 0x0 expref: -99 pid: 5316 timeout: 0 lvb_type: 0
[ 6416.345912] Lustre: DEBUG MARKER: /usr/sbin/lctl set_param fail_loc=0
[ 6416.882703] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost1&apos; &apos; /proc/mounts
[ 6417.199191] Lustre: DEBUG MARKER: umount -d /mnt/lustre-ost1
[ 6417.366042] Lustre: Failing over lustre-OST0000
[ 6417.427264] LustreError: 24697:0:(ldlm_resource.c:1100:ldlm_resource_complain()) filter-lustre-OST0000_UUID: namespace resource [0x99c2:0x0:0x0].0x0 (ffff88005d0809c0) refcount nonzero (1) after lock cleanup; forcing cleanup.
[ 6417.429304] LustreError: 24697:0:(ldlm_resource.c:1682:ldlm_resource_dump()) --- Resource: [0x99c2:0x0:0x0].0x0 (ffff88005d0809c0) refcount = 2
[ 6418.378000] Lustre: lustre-OST0000: Not available for connect from 10.2.8.127@tcp (stopping)
[ 6421.517980] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6422.430852] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6426.515485] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6426.516597] Lustre: Skipped 1 previous similar message
[ 6427.431867] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6431.515551] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6431.516800] Lustre: Skipped 2 previous similar messages
[ 6432.432858] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6436.515436] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6436.516663] Lustre: Skipped 2 previous similar messages
[ 6437.433853] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6442.434854] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6446.515570] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6446.516867] Lustre: Skipped 5 previous similar messages
[ 6452.435856] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6452.437151] LustreError: Skipped 1 previous similar message
[ 6466.515613] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6466.516623] Lustre: Skipped 11 previous similar messages
[ 6472.436880] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6472.438283] LustreError: Skipped 3 previous similar messages
[ 6501.515401] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6501.516599] Lustre: Skipped 20 previous similar messages
[ 6507.438850] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
[ 6507.440165] LustreError: Skipped 6 previous similar messages
[ 6530.986984] LustreError: 24702:0:(client.c:1166:ptlrpc_import_delay_req()) @@@ IMP_CLOSED   req@ffff880007849200 x1592411468623712/t0(0) o101-&amp;gt;lustre-MDT0000-lwp-OST0000@10.2.8.127@tcp:23/10 lens 456/496 e 0 to 0 dl 0 ref 2 fl Rpc:/0/ffffffff rc 0/-1
[ 6530.989429] LustreError: 24702:0:(qsd_reint.c:56:qsd_reint_completion()) lustre-OST0000: failed to enqueue global quota lock, glb fid:[0x200000006:0x20000:0x0], rc:-5
[ 6530.990942] LustreError: 24702:0:(qsd_reint.c:56:qsd_reint_completion()) Skipped 1 previous similar message
[ 6566.515348] Lustre: lustre-OST0000: Not available for connect from 10.2.8.125@tcp (stopping)
[ 6566.516397] Lustre: Skipped 38 previous similar messages
[ 6572.439881] LustreError: 0-0: Forced cleanup waiting for filter-lustre-OST0000_UUID namespace with 1 resources in use, (rc=-110)
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="50959">LU-10723</key>
            <summary>Interop 2.10.3&lt;-&gt;2.11 sanity test_232b: OSS hung</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="bougetq">Quentin Bouget</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Mon, 26 Feb 2018 18:13:32 +0000</created>
                <updated>Thu, 8 Mar 2018 17:42:35 +0000</updated>
                            <resolved>Thu, 8 Mar 2018 17:42:35 +0000</resolved>
                                    <version>Lustre 2.11.0</version>
                                    <fixVersion>Lustre 2.11.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>4</watches>
                                                                            <comments>
                            <comment id="221946" author="jamesanunez" created="Wed, 28 Feb 2018 17:15:23 +0000"  >&lt;p&gt;sanity test 232b was added by patch &lt;a href=&quot;https://review.whamcloud.com/30477&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/30477&lt;/a&gt; fro &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-10302&quot; title=&quot;hsm: obscure bug with multi-mountpoints and ldlm&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-10302&quot;&gt;&lt;del&gt;LU-10302&lt;/del&gt;&lt;/a&gt; and landed to master 2.10.57~70. We should skip this test for servers with version number less than 2.10.58.&lt;/p&gt;</comment>
                            <comment id="222118" author="gerrit" created="Fri, 2 Mar 2018 08:26:40 +0000"  >&lt;p&gt;Quentin Bouget (quentin.bouget@cea.fr) uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/31487&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/31487&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-10723&quot; title=&quot;Interop 2.10.3&amp;lt;-&amp;gt;2.11 sanity test_232b: OSS hung&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-10723&quot;&gt;&lt;del&gt;LU-10723&lt;/del&gt;&lt;/a&gt; tests: disable sanity 232b before 2.10.58&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 0c3be07197ccd9ca87fd28276c8896701c35c12c&lt;/p&gt;</comment>
                            <comment id="222837" author="gerrit" created="Thu, 8 Mar 2018 17:36:19 +0000"  >&lt;p&gt;Oleg Drokin (oleg.drokin@intel.com) merged in patch &lt;a href=&quot;https://review.whamcloud.com/31487/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/31487/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-10723&quot; title=&quot;Interop 2.10.3&amp;lt;-&amp;gt;2.11 sanity test_232b: OSS hung&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-10723&quot;&gt;&lt;del&gt;LU-10723&lt;/del&gt;&lt;/a&gt; tests: disable sanity 232b before 2.10.58&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 2bf0f9873af6524b974851abf680866e06d26505&lt;/p&gt;</comment>
                            <comment id="222848" author="pjones" created="Thu, 8 Mar 2018 17:42:35 +0000"  >&lt;p&gt;Landed for 2.11&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="49492">LU-10302</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzztdb:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>