<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:46:59 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-11794] recovery-mds-scale test failover_mds crashes with &apos;&lt;hostname&gt; crashed during recovery-mds-scale test_failover_mds&apos;</title>
                <link>https://jira.whamcloud.com/browse/LU-11794</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;recovery-mds-scale test_failover_mds crashes with &apos;trevis-16vm3 crashed during recovery-mds-scale test_failover_mds&apos;&lt;/p&gt;

&lt;p&gt;Looking at the kernel crash from &lt;a href=&quot;https://testing.whamcloud.com/test_sets/4db0c228-fd1b-11e8-8512-52540065bddc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/4db0c228-fd1b-11e8-8512-52540065bddc&lt;/a&gt; , we see&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[  733.722957] Lustre: DEBUG MARKER: mds1 has failed over 1 times, and counting...
[  746.732703] Lustre: Evicted from MGS (at MGC10.9.4.191@tcp_1) after server handle changed from 0xd73c2b185ff898b4 to 0x7b37c6365056206
[  746.734484] Lustre: MGC10.9.4.191@tcp: Connection restored to MGC10.9.4.191@tcp_1 (at 10.9.4.192@tcp)
[  746.736017] LustreError: 13384:0:(client.c:3023:ptlrpc_replay_interpret()) @@@ status 301, old was 0  req@ffff88006542b980 x1619485946545808/t4294967305(4294967305) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007ae8a000@10.9.4.192@tcp:12/10 lens 712/560 e 0 to 0 dl 1544462286 ref 2 fl Interpret:RP/4/0 rc 301/301
[  747.973965] Lustre: lustre-MDT0000-mdc-ffff88007ae8a000: Connection restored to 10.9.4.192@tcp (at 10.9.4.192@tcp)
[  794.635505] Lustre: 13385:0:(client.c:2132:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1544462204/real 1544462204]  req@ffff88003aa1acc0 x1619485946562912/t0(0) o400-&amp;gt;lustre-MDT0000-mdc-ffff88007ae8a000@10.9.4.192@tcp:12/10 lens 224/224 e 0 to 1 dl 1544462212 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1
[  794.635528] Lustre: 13385:0:(client.c:2132:ptlrpc_expire_one_request()) Skipped 1 previous similar message
[ 1438.045823] kworker/0:2H: page allocation failure: order:0, mode:0x1284020(GFP_ATOMIC|__GFP_COMP|__GFP_NOTRACK)
[ 1438.045839] CPU: 0 PID: 1516 Comm: kworker/0:2H Tainted: G           OE   N  4.4.162-94.69-default #1
[ 1438.045840] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2011
[ 1438.045875] Workqueue: kblockd blk_mq_run_work_fn
[ 1438.045878]  0000000000000000 ffffffff8132cdc0 0000000000000000 ffff88007b6479a8
[ 1438.045879]  ffffffff8119ddc2 0128402000000030 0000000000000046 002c422000000000
[ 1438.045881]  00000001005d4200 0000000000000000 0000000000000000 0000000000000020
[ 1438.045881] Call Trace:
[ 1438.045928]  [&amp;lt;ffffffff81019b09&amp;gt;] dump_trace+0x59/0x340
[ 1438.045934]  [&amp;lt;ffffffff81019eda&amp;gt;] show_stack_log_lvl+0xea/0x170
[ 1438.045936]  [&amp;lt;ffffffff8101acb1&amp;gt;] show_stack+0x21/0x40
[ 1438.045944]  [&amp;lt;ffffffff8132cdc0&amp;gt;] dump_stack+0x5c/0x7c
[ 1438.045964]  [&amp;lt;ffffffff8119ddc2&amp;gt;] warn_alloc_failed+0xe2/0x150
[ 1438.045975]  [&amp;lt;ffffffff8119e23b&amp;gt;] __alloc_pages_nodemask+0x40b/0xb70
[ 1438.045988]  [&amp;lt;ffffffff811edbed&amp;gt;] kmem_getpages+0x4d/0xf0
[ 1438.045995]  [&amp;lt;ffffffff811ef3fb&amp;gt;] fallback_alloc+0x19b/0x240
[ 1438.045998]  [&amp;lt;ffffffff811f13fa&amp;gt;] __kmalloc+0x26a/0x4b0
[ 1438.046022]  [&amp;lt;ffffffffa0184708&amp;gt;] alloc_indirect.isra.4+0x18/0x50 [virtio_ring]
[ 1438.046031]  [&amp;lt;ffffffffa0184a02&amp;gt;] virtqueue_add_sgs+0x2c2/0x410 [virtio_ring]
[ 1438.046044]  [&amp;lt;ffffffffa0053417&amp;gt;] __virtblk_add_req+0xa7/0x170 [virtio_blk]
[ 1438.046048]  [&amp;lt;ffffffffa00535fa&amp;gt;] virtio_queue_rq+0x11a/0x270 [virtio_blk]
[ 1438.046051]  [&amp;lt;ffffffff813073e5&amp;gt;] blk_mq_dispatch_rq_list+0xd5/0x1e0
[ 1438.046057]  [&amp;lt;ffffffff8130761e&amp;gt;] blk_mq_process_rq_list+0x12e/0x150
[ 1438.046065]  [&amp;lt;ffffffff8109ad14&amp;gt;] process_one_work+0x154/0x420
[ 1438.046073]  [&amp;lt;ffffffff8109b906&amp;gt;] worker_thread+0x116/0x4a0
[ 1438.046079]  [&amp;lt;ffffffff810a0e29&amp;gt;] kthread+0xc9/0xe0
[ 1438.046099]  [&amp;lt;ffffffff8161e1f5&amp;gt;] ret_from_fork+0x55/0x80
[ 1438.048865] DWARF2 unwinder stuck at ret_from_fork+0x55/0x80
[ 1438.048865] 
[ 1438.048868] Leftover inexact backtrace:
               
[ 1438.048877]  [&amp;lt;ffffffff810a0d60&amp;gt;] ? kthread_park+0x50/0x50
[ 1438.048878] Mem-Info:
[ 1438.048882] active_anon:961 inactive_anon:982 isolated_anon:64
                active_file:179814 inactive_file:247899 isolated_file:32
                unevictable:20 dirty:3366 writeback:60306 unstable:0
                slab_reclaimable:3317 slab_unreclaimable:29811
                mapped:7232 shmem:752 pagetables:890 bounce:0
                free:2433 free_pcp:169 free_cma:0
[ 1438.048896] Node 0 DMA free:364kB min:376kB low:468kB high:560kB active_anon:76kB inactive_anon:80kB active_file:1836kB inactive_file:5360kB unevictable:0kB isolated(anon):0kB isolated(file):0kB present:15992kB managed:15904kB mlocked:0kB dirty:40kB writeback:920kB mapped:292kB shmem:80kB slab_reclaimable:68kB slab_unreclaimable:7712kB kernel_stack:16kB pagetables:16kB unstable:0kB bounce:0kB free_pcp:0kB local_pcp:0kB free_cma:0kB writeback_tmp:0kB pages_scanned:45812 all_unreclaimable? yes
[ 1438.048898] lowmem_reserve[]: 0 1843 1843 1843 1843
[ 1438.048907] Node 0 DMA32 free:9368kB min:44676kB low:55844kB high:67012kB active_anon:3768kB inactive_anon:3848kB active_file:717420kB inactive_file:986236kB unevictable:80kB isolated(anon):256kB isolated(file):128kB present:2080744kB managed:1900752kB mlocked:80kB dirty:13424kB writeback:240304kB mapped:28636kB shmem:2928kB slab_reclaimable:13200kB slab_unreclaimable:111532kB kernel_stack:2608kB pagetables:3544kB unstable:0kB bounce:0kB free_pcp:676kB local_pcp:676kB free_cma:0kB writeback_tmp:0kB pages_scanned:103620 all_unreclaimable? no
[ 1438.048908] lowmem_reserve[]: 0 0 0 0 0
[ 1438.048915] Node 0 DMA: 2*4kB (M) 0*8kB 0*16kB 1*32kB (U) 1*64kB (U) 0*128kB 1*256kB (U) 0*512kB 0*1024kB 0*2048kB 0*4096kB = 360kB
[ 1438.048921] Node 0 DMA32: 140*4kB (UME) 69*8kB (UM) 42*16kB (UM) 77*32kB (UM) 29*64kB (UM) 2*128kB (M) 4*256kB (M) 4*512kB (M) 0*1024kB 0*2048kB 0*4096kB = 9432kB
[ 1438.048932] Node 0 hugepages_total=0 hugepages_free=0 hugepages_surp=0 hugepages_size=1048576kB
[ 1438.048939] Node 0 hugepages_total=0 hugepages_free=0 hugepages_surp=0 hugepages_size=2048kB
[ 1438.048940] 76857 total pagecache pages
[ 1438.048941] 344 pages in swap cache
[ 1438.048943] Swap cache stats: add 7967, delete 7623, find 157/240
[ 1438.048944] Free swap  = 14306948kB
[ 1438.048944] Total swap = 14338044kB
[ 1438.048945] 524184 pages RAM
[ 1438.048945] 0 pages HighMem/MovableOnly
[ 1438.048945] 45020 pages reserved
[ 1438.048946] 0 pages hwpoisoned
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;There are no other recovery-mds-scale test_failover_mds crashes with the same stack trace in the past month.&lt;/p&gt;</description>
                <environment></environment>
        <key id="54320">LU-11794</key>
            <summary>recovery-mds-scale test failover_mds crashes with &apos;&lt;hostname&gt; crashed during recovery-mds-scale test_failover_mds&apos;</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="jamesanunez">James Nunez</reporter>
                        <labels>
                            <label>failover</label>
                    </labels>
                <created>Mon, 17 Dec 2018 16:02:07 +0000</created>
                <updated>Mon, 27 Jun 2022 23:15:10 +0000</updated>
                            <resolved>Fri, 21 Jan 2022 00:07:20 +0000</resolved>
                                    <version>Lustre 2.12.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>1</watches>
                                                                                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i0086n:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>