<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:06:17 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-7133] Interop 2.7.0 &lt;-&gt; master- conf-sanity test_43: check lustre-MDTall.mdt.nosquash_nids failed!</title>
                <link>https://jira.whamcloud.com/browse/LU-7133</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Saurabh Tandan &amp;lt;saurabh.tandan@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/d46e0c62-514d-11e5-9f68-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/d46e0c62-514d-11e5-9f68-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_43 failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;check lustre-MDTall.mdt.nosquash_nids failed!
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Test log:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Setting lustre.mdt.root_squash from 0:0 to 500:500
CMD: shadow-18vm12 /usr/sbin/lctl conf_param lustre.mdt.root_squash=&apos;500:500&apos;
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.root_squash
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.root_squash
Waiting 90 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.root_squash
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.root_squash
Updated after 2s: wanted &apos;500:500&apos; got &apos;500:500&apos;
CMD: shadow-18vm5.shadow.whamcloud.com /usr/sbin/lctl get_param -n llite.lustre*.root_squash
CMD: shadow-18vm5.shadow.whamcloud.com /usr/sbin/lctl get_param -n llite.lustre*.root_squash
/mnt/lustre/f43.conf-sanity-userfile: owner uid 500 (-rw-------): root read permission is granted - ok
/mnt/lustre/f43.conf-sanity-userfile: owner uid 500 (-rw-------): root write permission is granted - ok
/mnt/lustre/f43.conf-sanity-rootfile: owner uid 0 (-rw-------): root read permission is denied - ok
/mnt/lustre/f43.conf-sanity-rootfile: owner uid 0 (-rw-------): root write permission is denied - ok
/mnt/lustre/d43.conf-sanity-rootdir: owner uid 0 (drwx------): root unlink permission is denied - ok
/mnt/lustre/d43.conf-sanity-rootdir: owner uid 0 (drwx------): root create permission is denied - ok
/mnt/lustre/f43.conf-sanity-user1file: owner uid 501 (-rw-------): root read permission is denied - ok
/mnt/lustre/f43.conf-sanity-user1file: owner uid 501 (-rw-------): root write permission is denied - ok
/usr/lib64/lustre/tests/conf-sanity.sh: line 2844: 29182 Terminated              runas -u $ID1 tail -f $DIR/$tfile-user1file &amp;gt; /dev/null 2&amp;gt;&amp;amp;1
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Setting lustre-MDTall.mdt.nosquash_nids from NONE to 2@elan 0@lo 10.1.4.215@tcp 192.168.0.[2,10]@tcp
CMD: shadow-18vm12 /usr/sbin/lctl conf_param lustre-MDTall.mdt.nosquash_nids=&apos;2@elan 0@lo 10.1.4.215@tcp 192.168.0.[2,10]@tcp&apos;
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 90 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 80 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 70 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 60 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 50 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 40 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 30 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 20 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Waiting 10 secs for update
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
CMD: shadow-18vm12 /usr/sbin/lctl get_param -n mdt.lustre-MDT0000.nosquash_nids
Update not seen after 90s: wanted &apos;2@elan 0@lo 10.1.4.215@tcp 192.168.0.[2,10]@tcp&apos; got &apos;NONE&apos;
 conf-sanity test_43: @@@@@@ FAIL: check lustre-MDTall.mdt.nosquash_nids failed! 
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Console :&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;09:31:40:Lustre: DEBUG MARKER: == conf-sanity test 43: check root_squash and nosquash_nids == 09:28:26 (1441099706)
09:31:40:Lustre: DEBUG MARKER: mkdir -p /mnt/lustre
09:31:40:Lustre: DEBUG MARKER: mount -t lustre -o user_xattr,flock shadow-18vm12@tcp:/lustre /mnt/lustre
09:31:40:LustreError: 28945:0:(obd_config.c:1322:class_process_proc_param()) llite: lustre-client-ffff8800795b0800 unknown param some_wrong_param=10
09:31:40:Lustre: Mounted lustre-client
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.root_squash
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.root_squash
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.nosquash_nids
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.nosquash_nids
09:31:40:Lustre: lustre: nosquash_nids is cleared
09:31:40:Lustre: lustre: root_squash is set to 500:500
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.root_squash
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n llite.lustre*.root_squash
09:31:40:Lustre: lustre: nosquash_nids set to 2@elan 0@lo 10.1.4.215@tcp 192.168.0.[2,10]@tcp
09:31:40:Lustre: DEBUG MARKER: /usr/sbin/lctl mark  conf-sanity test_43: @@@@@@ FAIL: check lustre-MDTall.mdt.nosquash_nids failed! 
09:31:40:Lustre: DEBUG MARKER: conf-sanity test_43: @@@@@@ FAIL: check lustre-MDTall.mdt.nosquash_nids failed!
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>Client: 2.7.0&lt;br/&gt;
Server: lustre-master# 3166 , RHEL 7</environment>
        <key id="32041">LU-7133</key>
            <summary>Interop 2.7.0 &lt;-&gt; master- conf-sanity test_43: check lustre-MDTall.mdt.nosquash_nids failed!</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="4" iconUrl="https://jira.whamcloud.com/images/icons/statuses/reopened.png" description="This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.">Reopened</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="bogl">Bob Glossman</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Thu, 10 Sep 2015 19:14:04 +0000</created>
                <updated>Mon, 10 Sep 2018 16:41:57 +0000</updated>
                                            <version>Lustre 2.7.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>8</watches>
                                                                            <comments>
                            <comment id="128714" author="adilger" created="Tue, 29 Sep 2015 08:03:14 +0000"  >&lt;p&gt;This is one of the top failing autotests:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/a0ca191c-664e-11e5-ba6e-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/a0ca191c-664e-11e5-ba6e-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/0d7fd0e6-65ce-11e5-997c-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/0d7fd0e6-65ce-11e5-997c-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/18cccdd8-65af-11e5-997c-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/18cccdd8-65af-11e5-997c-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/dfbe33d6-65b5-11e5-997c-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/dfbe33d6-65b5-11e5-997c-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="128771" author="pjones" created="Tue, 29 Sep 2015 17:52:50 +0000"  >&lt;p&gt;Bob&lt;/p&gt;

&lt;p&gt;Could you please look into this one?&lt;/p&gt;

&lt;p&gt;Thanks&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="128772" author="adilger" created="Tue, 29 Sep 2015 17:54:28 +0000"  >&lt;p&gt;This just started before 2.7.59, so it may be possible to trace this to a specific patch landing. It might just be a test failure due to a feature, but it needs to be verified that it isn&apos;t an interop regression. &lt;/p&gt;</comment>
                            <comment id="129193" author="bogl" created="Fri, 2 Oct 2015 21:26:12 +0000"  >&lt;p&gt;here&apos;s the problem.  from dmesg log of mds1, running new (master) version:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[29879.051694] LNet: 14647:0:(nidstrings.c:271:parse_nidrange()) can&apos;t parse nidrange: &quot;2@elan&quot;
[29879.053687] Lustre: 14647:0:(lprocfs_status.c:1981:lprocfs_wr_nosquash_nids()) lustre-MDT0000: failed to set nosquash_nids to &quot;2@elan 0@lo 10.1.4.215@tcp 192.168.0.[2,10]@tcp&quot;, can&apos;t parse rc = -22
[29879.057391] LustreError: 14647:0:(obd_config.c:1389:class_process_proc_param()) mdt.: error writing proc entry &apos;nosquash_nids&apos;: rc = -22
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;elan is one of the obsolete lnds eliminated from master.  however it&apos;s still used in example test nidlist in old version of conf-sanity.sh in v2.7.0.   master server code can&apos;t parse it, so just throws up its hands and complains.   I don&apos;t see this as easily fixable on the server side in master. could be fixed by moving part of the master fix in cont-sanity.sh into b2_7, but that won&apos;t fix the problem with interop of current released 2.7 with master.&lt;/p&gt;</comment>
                            <comment id="129210" author="bogl" created="Fri, 2 Oct 2015 22:32:10 +0000"  >&lt;p&gt;from the commit header of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6210&quot; title=&quot;change positional struct initializers to C99 initializers&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6210&quot;&gt;&lt;del&gt;LU-6210&lt;/del&gt;&lt;/a&gt; mod that removed the obsolete LNDs:&lt;/p&gt;

&lt;p&gt;Remove old LND types from the netstrfns table, as they are&lt;br/&gt;
long obsolete and shouldn&apos;t be needed even for interop anymore.&lt;/p&gt;

&lt;p&gt;Clearly this was a misstatement.  At least one obsolete LND is still needed for interop, as there&apos;s a reference to it embedded in old cont-sanity.sh&lt;/p&gt;</comment>
                            <comment id="129211" author="bogl" created="Fri, 2 Oct 2015 22:42:36 +0000"  >&lt;p&gt;A possible fix might be to just put back an entry to the otherwise unsupported elan LND in the libcfs_netstrfns[] table.  This would allow it to be parsed.  However I&apos;m unclear if putting an unsupported nidlist entry into lnet data structures might have bad side effects.  It might get referenced and assume a functional LND is really there underneath.&lt;/p&gt;</comment>
                            <comment id="129212" author="simmonsja" created="Fri, 2 Oct 2015 22:46:43 +0000"  >&lt;p&gt;You are correct putting the &#233;lan LND support back will have negative effects. The proper fix is to update the test like we did for master to test for gnilnd instead of &#233;lan.&lt;/p&gt;</comment>
                            <comment id="129214" author="simmonsja" created="Fri, 2 Oct 2015 22:56:20 +0000"  >&lt;p&gt;I pushed a patch : &lt;a href=&quot;http://review.whamcloud.com/#/c/16717&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/#/c/16717&lt;/a&gt;.  I assume we need a patch for 2.6 and 2.5 as well? Lets land this to 2.7.1 before it is officially released, then we will have no further interop issues.&lt;/p&gt;</comment>
                            <comment id="129217" author="pjones" created="Fri, 2 Oct 2015 23:05:27 +0000"  >&lt;p&gt;James&lt;/p&gt;

&lt;p&gt;We only test 2.8 interop with 2.5.x and 2.7.x releases, so I think that is the limit of what is needed.&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="129218" author="simmonsja" created="Fri, 2 Oct 2015 23:05:31 +0000"  >&lt;p&gt;I see you pushed a patch Bob so I will abandon my patch.&lt;/p&gt;</comment>
                            <comment id="129219" author="pjones" created="Fri, 2 Oct 2015 23:08:59 +0000"  >&lt;p&gt;To summarize though, I think that we can discount this from a fix version 2.8 and just plan to tidy up the tests on the maintenance branches for future interop testing. As such I think that we can close this ticket and track that effort separately.&lt;/p&gt;</comment>
                            <comment id="129246" author="adilger" created="Sun, 4 Oct 2015 01:52:15 +0000"  >&lt;p&gt;The patch for b2_7 still needs to land. &lt;/p&gt;</comment>
                            <comment id="129247" author="pjones" created="Sun, 4 Oct 2015 03:29:18 +0000"  >&lt;p&gt;..which will be tracked separately &lt;img class=&quot;emoticon&quot; src=&quot;https://jira.whamcloud.com/images/icons/emoticons/smile.png&quot; height=&quot;16&quot; width=&quot;16&quot; align=&quot;absmiddle&quot; alt=&quot;&quot; border=&quot;0&quot;/&gt;&lt;/p&gt;</comment>
                            <comment id="132107" author="standan" created="Thu, 29 Oct 2015 22:29:05 +0000"  >&lt;p&gt;Encountered same issue for interop testing for 2.7.62 Tag.&lt;br/&gt;
Server: master, 2.7.62, build #3225&lt;br/&gt;
Client: 2.5.5, b2_5_fe/62&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/44cc8dd8-7b67-11e5-a83c-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/44cc8dd8-7b67-11e5-a83c-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="136458" author="standan" created="Tue, 15 Dec 2015 23:36:48 +0000"  >&lt;p&gt;Another instance for following interop config&lt;br/&gt;
Server: Master, Build# 3266, Tag 2.7.64 &lt;br/&gt;
Client: 2.5.5, b2_5_fe/62&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/bc333cda-9fcc-11e5-a33d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/bc333cda-9fcc-11e5-a33d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="136553" author="standan" created="Wed, 16 Dec 2015 16:19:07 +0000"  >&lt;p&gt;Server: Master, Build# 3266, Tag 2.7.64 , RHEL 7 &lt;br/&gt;
Client: 2.5.5, b2_5_fe/62&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/f8bb27de-9fff-11e5-a33d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/f8bb27de-9fff-11e5-a33d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="139346" author="standan" created="Tue, 19 Jan 2016 23:23:01 +0000"  >&lt;p&gt;Another instance found for interop : EL6.7 Server/2.5.5 Client&lt;br/&gt;
Server: master, build# 3303, RHEL 6.7&lt;br/&gt;
Client: 2.5.5, b2_5_fe/62&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/2f6cb0c2-bad6-11e5-9137-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/2f6cb0c2-bad6-11e5-9137-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="141555" author="standan" created="Mon, 8 Feb 2016 22:13:14 +0000"  >&lt;p&gt;This is issue is seen 21 times in past 30 days.&lt;/p&gt;</comment>
                            <comment id="141868" author="standan" created="Wed, 10 Feb 2016 22:18:06 +0000"  >&lt;p&gt;Another instance found for interop tag 2.7.66 - EL6.7 Server/2.5.5 Client, build# 3316&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/bdea5946-cc9f-11e5-963e-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/bdea5946-cc9f-11e5-963e-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another instance found for interop tag 2.7.66 - EL7 Server/2.5.5 Client, build# 3316&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/79a03aac-cc46-11e5-901d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/79a03aac-cc46-11e5-901d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="143602" author="standan" created="Wed, 24 Feb 2016 17:26:17 +0000"  >&lt;p&gt;Another instance found for interop - EL6.7 Server/2.5.5 Client, tag 2.7.90. &lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/f99a2d60-d567-11e5-bc47-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/f99a2d60-d567-11e5-bc47-5254006e85c2&lt;/a&gt;&lt;br/&gt;
Another instance found for interop - EL7 Server/2.5.5 Client, tag 2.7.90. &lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/93baffee-d2ae-11e5-8697-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/93baffee-d2ae-11e5-8697-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="233255" author="simmonsja" created="Mon, 10 Sep 2018 16:41:57 +0000"  >&lt;p&gt;Can we close this?&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                                        </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxn67:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>