<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:39:55 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-10982] Interop 2.11.0&lt;-&gt;master obdfilter-survey test_3a: (sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff8800586341a0(obdecho)</title>
                <link>https://jira.whamcloud.com/browse/LU-10982</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for sarah_lw &amp;lt;wei3.liu@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/3ebf5b1c-475e-11e8-b45c-52540065bddc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/3ebf5b1c-475e-11e8-b45c-52540065bddc&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;test_3a failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Timeout occurred after 1487 mins, last suite running was obdfilter-survey, restarting cluster to continue tests
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;OST demsg shows&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[81964.390516] Lustre: DEBUG MARKER: /usr/sbin/lctl mark == obdfilter-survey test 3a: Network survey ========================================================== 20:32:51 \(1524515571\)
[81964.583105] Lustre: DEBUG MARKER: == obdfilter-survey test 3a: Network survey ========================================================== 20:32:51 (1524515571)
[81970.186119] Lustre: lustre-MDT0000-lwp-OST0001: Connection to lustre-MDT0000 (at 10.9.4.92@tcp) was lost; in progress operations using this service will wait for recovery to complete
[81970.190412] Lustre: Skipped 13 previous similar messages
[81980.855116] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost1&apos; &apos; /proc/mounts || true
[81981.193727] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost1
[81981.447527] Lustre: server umount lustre-OST0000 complete
[81981.449516] Lustre: Skipped 1 previous similar message
[81981.652873] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81982.017942] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81982.185728] Lustre: 30952:0:(client.c:2100:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1524515582/real 1524515582]  req@ffff880002e87000 x1598513769219936/t0(0) o400-&amp;gt;MGC10.9.4.92@tcp@10.9.4.92@tcp:26/25 lens 224/224 e 0 to 1 dl 1524515589 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1
[81982.193976] LustreError: 166-1: MGC10.9.4.92@tcp: Connection to MGS (at 10.9.4.92@tcp) was lost; in progress operations using this service will fail
[81982.424258] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost1_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81982.777779] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost1_flakey
[81983.125293] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost1_flakey
[81983.480619] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81983.828936] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[81984.169370] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost2&apos; &apos; /proc/mounts || true
[81984.520893] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost2
[81984.912239] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81985.273224] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81985.661512] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost2_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81985.999795] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost2_flakey
[81986.338097] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost2_flakey
[81986.697529] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81987.063945] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[81987.414873] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost3&apos; &apos; /proc/mounts || true
[81987.760777] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost3
[81988.156561] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81988.504258] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81988.863600] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost3_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81989.211827] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost3_flakey
[81989.552982] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost3_flakey
[81989.916366] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81990.269341] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[81990.616850] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost4&apos; &apos; /proc/mounts || true
[81990.957487] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost4
[81991.334436] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81991.676495] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81992.020599] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost4_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81992.382610] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost4_flakey
[81992.731881] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost4_flakey
[81993.091887] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81993.430868] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[81993.785945] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost5&apos; &apos; /proc/mounts || true
[81994.140587] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost5
[81994.569250] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81994.944802] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81995.316076] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost5_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81995.679724] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost5_flakey
[81996.047102] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost5_flakey
[81996.428055] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81996.818046] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[81997.175548] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost6&apos; &apos; /proc/mounts || true
[81997.545522] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost6
[81997.962737] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[81998.322180] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[81998.680191] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost6_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[81999.048424] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost6_flakey
[81999.413955] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost6_flakey
[81999.784411] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[82000.149521] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[82000.529580] Lustre: DEBUG MARKER: grep -c /mnt/lustre-ost7&apos; &apos; /proc/mounts || true
[82000.886363] Lustre: DEBUG MARKER: umount -d -f /mnt/lustre-ost7
[82001.118309] Lustre: server umount lustre-OST0006 complete
[82001.119962] Lustre: Skipped 5 previous similar messages
[82001.298997] Lustre: DEBUG MARKER: lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp;
lctl dl | grep &apos; ST &apos; || true
[82001.627448] Lustre: DEBUG MARKER: modprobe dm-flakey;
			 dmsetup targets | grep -q flakey
[82001.978028] Lustre: DEBUG MARKER: dmsetup status /dev/mapper/ost7_flakey &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[82002.302621] Lustre: DEBUG MARKER: dmsetup table /dev/mapper/ost7_flakey
[82002.621593] Lustre: DEBUG MARKER: dmsetup remove /dev/mapper/ost7_flakey
[82002.961859] Lustre: DEBUG MARKER: dmsetup mknodes &amp;gt;/dev/null 2&amp;gt;&amp;amp;1
[82003.296140] Lustre: DEBUG MARKER: modprobe -r dm-flakey
[82006.237824] Lustre: DEBUG MARKER: /usr/sbin/lctl list_nids | grep tcp | cut -f 1 -d @
[82009.201799] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82009.203860] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82009.205719] Lustre: Skipped 6 previous similar messages
[82009.207235] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880004b67450 x1598570486562832/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515636 ref 1 fl Interpret:/0/0 rc 0/0
[82034.201197] Lustre: echo_srv: Client echotmp_UUID (at 10.9.4.89@tcp) reconnecting
[82034.203080] Lustre: Skipped 1 previous similar message
[82034.204691] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82034.206731] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82034.208851] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880004b65450 x1598570486562864/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515661 ref 1 fl Interpret:/0/0 rc 0/0
[82059.201216] Lustre: echo_srv: Client echotmp_UUID (at 10.9.4.89@tcp) reconnecting
[82059.203412] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82059.205599] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82059.207993] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880004b67c50 x1598570486562880/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515686 ref 1 fl Interpret:/0/0 rc 0/0
[82084.201186] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82084.204460] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82084.207488] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880004b66450 x1598570486562896/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515711 ref 1 fl Interpret:/0/0 rc 0/0
[82109.201281] Lustre: echo_srv: Client echotmp_UUID (at 10.9.4.89@tcp) reconnecting
[82109.204094] Lustre: Skipped 1 previous similar message
[82109.206826] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82109.209506] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82109.212281] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880003a3dc50 x1598570486562912/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515736 ref 1 fl Interpret:/0/0 rc 0/0
[82134.201230] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82134.203993] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82134.206547] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880003a3d450 x1598570486562928/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515761 ref 1 fl Interpret:/0/0 rc 0/0
[82159.201265] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82159.204184] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82159.206875] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880003a3ec50 x1598570486562944/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515786 ref 1 fl Interpret:/0/0 rc 0/0
[82184.201328] Lustre: echo_srv: Client echotmp_UUID (at 10.9.4.89@tcp) reconnecting
[82184.204149] Lustre: Skipped 2 previous similar messages
[82209.201239] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82209.204274] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) Skipped 1 previous similar message
[82209.207097] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82209.209922] Lustre: Skipped 1 previous similar message
[82209.212557] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880003a3f450 x1598570486562976/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515836 ref 1 fl Interpret:/0/0 rc 0/0
[82209.220624] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) Skipped 1 previous similar message
[82284.201211] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82284.204632] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) Skipped 2 previous similar messages
[82284.207893] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)
[82284.210845] Lustre: Skipped 2 previous similar messages
[82284.213351] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) @@@ echo_srv: No target for connected export
  req@ffff880003a3c050 x1598570486563024/t0(0) o8-&amp;gt;echotmp_UUID@10.9.4.89@tcp:0/0 lens 520/416 e 0 to 0 dl 1524515911 ref 1 fl Interpret:/0/0 rc 0/0
[82284.221387] LustreError: 30423:0:(tgt_handler.c:739:tgt_request_handle()) Skipped 2 previous similar messages
[82334.201204] Lustre: echo_srv: Client echotmp_UUID (at 10.9.4.89@tcp) reconnecting
[82334.203983] Lustre: Skipped 5 previous similar messages
[82434.201254] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff880067cca0d0(obdecho)
[82434.204327] LustreError: 30423:0:(sec_config.c:84:sptlrpc_target_sec_part()) Skipped 5 previous similar messages
[82434.207157] Lustre: echo_srv: Connection restored to 27842367-7172-2018-dd01-a259e7c19659 (at 10.9.4.89@tcp)&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;VVVVVVV DO NOT REMOVE LINES BELOW, Added by Maloo for auto-association VVVVVVV&lt;br/&gt;
 obdfilter-survey test_3a - Timeout occurred after 1487 mins, last suite running was obdfilter-survey, restarting cluster to continue tests&lt;/p&gt;</description>
                <environment></environment>
        <key id="52067">LU-10982</key>
            <summary>Interop 2.11.0&lt;-&gt;master obdfilter-survey test_3a: (sec_config.c:84:sptlrpc_target_sec_part()) unknown target ffff8800586341a0(obdecho)</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="1" iconUrl="https://jira.whamcloud.com/images/icons/statuses/open.png" description="The issue is open and ready for the assignee to start work on it.">Open</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Tue, 1 May 2018 19:48:18 +0000</created>
                <updated>Wed, 19 Sep 2018 20:50:48 +0000</updated>
                                            <version>Lustre 2.12.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>1</watches>
                                                                                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzzwnb:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>