<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:08:53 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-7436] conf-sanity test_91: @@@@@@ FAIL: found cc0b3805-41ce-ef63-799a-a55708b119b7 192.168.113.19@tcp on MDT</title>
                <link>https://jira.whamcloud.com/browse/LU-7436</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>
&lt;p&gt;Configuration : 4 node - 1 MDS/1OSS/2 clients&lt;br/&gt;
Release&lt;br/&gt;
2.6.32_431.17.1.x86_64&lt;br/&gt;
2.6.32_431.29.2.el6.x86_64_g70e90c3 &lt;/p&gt;

&lt;p&gt;Server 2.5.1.x6&lt;br/&gt;
Client 2.7.62&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;
stdout.log
== conf-sanity test 91: evict-by-nid support == 05:55:39 (1447566939)
../libcfs/libcfs/libcfs options: &apos;cpu_npartitions=2&apos;
Loading modules from /usr/lib64/lustre/tests/..
detected 2 online CPUs by sysfs
Force libcfs to create 2 CPU partitions
debug=-1
subsystem_debug=all -lnet -lnd -pinger
../lnet/lnet/lnet options: &apos;accept=all&apos;
../lnet/klnds/socklnd/ksocklnd options: &apos;sock_timeout=10&apos;
gss/krb5 is not supported
start mds service on fre1317
Starting mds1: -o rw,user_xattr  /dev/vdb /mnt/mds1
fre1317: mount.lustre: set /sys/block/vdb/queue/max_sectors_kb to 2147483647
fre1317: 
pdsh@fre1319: fre1317: ssh exited with exit code 1
pdsh@fre1319: fre1317: ssh exited with exit code 1
Started lustre-MDT0000
start ost1 service on fre1318
Starting ost1: -o user_xattr  /dev/vdb /mnt/ost1
fre1318: mount.lustre: set /sys/block/vdb/queue/max_sectors_kb to 2147483647
fre1318: 
pdsh@fre1319: fre1318: ssh exited with exit code 1
pdsh@fre1319: fre1318: ssh exited with exit code 1
Started lustre-OST0000
mount lustre on /mnt/lustre.....
Starting client: fre1319:  -o user_xattr,flock fre1317@tcp:/lustre /mnt/lustre
setup single mount lustre success
list nids on mdt:
mdt.lustre-MDT0000.exports.0@lo
mdt.lustre-MDT0000.exports.192.168.113.18@tcp
mdt.lustre-MDT0000.exports.192.168.113.19@tcp
mdt.lustre-MDT0000.exports.clear
uuid from 192\.168\.113\.19@tcp:
mdt.lustre-MDT0000.exports.192.168.113.19@tcp.uuid=cc0b3805-41ce-ef63-799a-a55708b119b7
manual umount lustre on /mnt/lustre....
evict 192\.168\.113\.19@tcp
 conf-sanity test_91: @@@@@@ FAIL: found cc0b3805-41ce-ef63-799a-a55708b119b7 192\.168\.113\.19@tcp on MDT 
  Trace dump:
  = /usr/lib64/lustre/tests/../tests/test-framework.sh:4812:error_noexit()
  = /usr/lib64/lustre/tests/../tests/test-framework.sh:4843:error()
  = /usr/lib64/lustre/tests/conf-sanity.sh:6091:test_91()
  = /usr/lib64/lustre/tests/../tests/test-framework.sh:5090:run_one()
  = /usr/lib64/lustre/tests/../tests/test-framework.sh:5127:run_one_logged()
  = /usr/lib64/lustre/tests/../tests/test-framework.sh:4944:run_test()
  = /usr/lib64/lustre/tests/conf-sanity.sh:6104:main()
Dumping lctl log to /tmp/test_logs/1447566903/conf-sanity.test_91.*.1447566962.log
fre1320: Warning: Permanently added &apos;fre1319,192.168.113.19&apos; (RSA) to the list of known hosts.
fre1317: Warning: Permanently added &apos;fre1319,192.168.113.19&apos; (RSA) to the list of known hosts.
fre1318: Warning: Permanently added &apos;fre1319,192.168.113.19&apos; (RSA) to the list of known hosts.
FAIL 91 (24s)



stderr.log
fre1317: mount.lustre: set /sys/block/vdb/queue/max_sectors_kb to 2147483647
fre1317: 
pdsh@fre1319: fre1317: ssh exited with exit code 1
fre1318: mount.lustre: set /sys/block/vdb/queue/max_sectors_kb to 2147483647
fre1318: 
pdsh@fre1319: fre1318: ssh exited with exit code 1
pdsh@fre1319: fre1317: ssh exited with exit code 3



&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>interop 2.5.x &amp;lt;-&amp;gt; master client</environment>
        <key id="33163">LU-7436</key>
            <summary>conf-sanity test_91: @@@@@@ FAIL: found cc0b3805-41ce-ef63-799a-a55708b119b7 192.168.113.19@tcp on MDT</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="bzzz">Alex Zhuravlev</assignee>
                                    <reporter username="parinay">parinay v kondekar</reporter>
                        <labels>
                    </labels>
                <created>Tue, 17 Nov 2015 05:36:50 +0000</created>
                <updated>Fri, 26 Aug 2016 13:17:30 +0000</updated>
                            <resolved>Tue, 24 Nov 2015 21:44:20 +0000</resolved>
                                    <version>Lustre 2.8.0</version>
                                    <fixVersion>Lustre 2.8.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>3</watches>
                                                                            <comments>
                            <comment id="133664" author="parinay" created="Tue, 17 Nov 2015 05:52:34 +0000"  >&lt;ul class=&quot;alternate&quot; type=&quot;square&quot;&gt;
	&lt;li&gt;The test fails on interop 2.5.1 server &amp;lt;-&amp;gt; master client.&lt;/li&gt;
	&lt;li&gt;newly added by following patch
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Author:     Alex Zhuravlev &amp;lt;alexey.zhuravlev@intel.com&amp;gt;
AuthorDate: Mon Oct 19 13:18:44 2015 +0300
Commit:     Oleg Drokin &amp;lt;oleg.drokin@intel.com&amp;gt;
CommitDate: Wed Nov 11 15:53:13 2015 +0000

    LU-2222 mdt: restore evict-by-nid functionality
    
    Writing a NID or UUID to mdt.*.evict_tgt_nids will evict clients
    with NID or UUID specified all the targets (OSTs and MDTs).
    
    Change-Id: I66a60a6c81fbac1571f5685111df7b00a306be36
    Signed-off-by: Alex Zhuravlev &amp;lt;alexey.zhuravlev@intel.com&amp;gt;
    Reviewed-on: http://review.whamcloud.com/16867
    Tested-by: Jenkins
    Reviewed-by: Andreas Dilger &amp;lt;andreas.dilger@intel.com&amp;gt;
    Tested-by: Maloo &amp;lt;hpdd-maloo@intel.com&amp;gt;
    Reviewed-by: Niu Yawei &amp;lt;yawei.niu@intel.com&amp;gt;
    Reviewed-by: Oleg Drokin &amp;lt;oleg.drokin@intel.com&amp;gt;

&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;&lt;/li&gt;
&lt;/ul&gt;
</comment>
                            <comment id="133667" author="gerrit" created="Tue, 17 Nov 2015 06:40:02 +0000"  >&lt;p&gt;Alex Zhuravlev (alexey.zhuravlev@intel.com) uploaded a new patch: &lt;a href=&quot;http://review.whamcloud.com/17222&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/17222&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7436&quot; title=&quot;conf-sanity test_91: @@@@@@ FAIL: found cc0b3805-41ce-ef63-799a-a55708b119b7 192.168.113.19@tcp on MDT&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7436&quot;&gt;&lt;del&gt;LU-7436&lt;/del&gt;&lt;/a&gt; tests: skip conf-sanity/91 with old servers&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 224adf32d1e858903c0b92834d6b377e306e42db&lt;/p&gt;</comment>
                            <comment id="134427" author="gerrit" created="Tue, 24 Nov 2015 18:03:19 +0000"  >&lt;p&gt;Oleg Drokin (oleg.drokin@intel.com) merged in patch &lt;a href=&quot;http://review.whamcloud.com/17222/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/17222/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7436&quot; title=&quot;conf-sanity test_91: @@@@@@ FAIL: found cc0b3805-41ce-ef63-799a-a55708b119b7 192.168.113.19@tcp on MDT&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7436&quot;&gt;&lt;del&gt;LU-7436&lt;/del&gt;&lt;/a&gt; tests: skip conf-sanity/91 with old servers&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 8438f2a7b56fd97aa2207aac375a880df668a61d&lt;/p&gt;</comment>
                            <comment id="134458" author="pjones" created="Tue, 24 Nov 2015 21:44:20 +0000"  >&lt;p&gt;Landed for 2.8&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                                                <inwardlinks description="is duplicated by">
                                                        </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                            <attachment id="19636" name="91.lctl.tgz" size="1360841" author="parinay" created="Tue, 17 Nov 2015 05:36:50 +0000"/>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxt8f:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>